added preprocessing option
This commit is contained in:
parent
6355736b99
commit
3a50765954
@ -22,7 +22,7 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 32,
|
||||
"execution_count": 2,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
@ -74,11 +74,11 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 4,
|
||||
"execution_count": 5,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"data_root_folder = \"./data_en/\" # i created a symlink here"
|
||||
"data_root_folder = \"../data/en/\" # i created a symlink here"
|
||||
]
|
||||
},
|
||||
{
|
||||
@ -90,7 +90,7 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 5,
|
||||
"execution_count": 6,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
@ -113,7 +113,7 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 6,
|
||||
"execution_count": 7,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
@ -1205,7 +1205,7 @@
|
||||
"[68733 rows x 9 columns]"
|
||||
]
|
||||
},
|
||||
"execution_count": 6,
|
||||
"execution_count": 7,
|
||||
"metadata": {},
|
||||
"output_type": "execute_result"
|
||||
}
|
||||
@ -1224,7 +1224,7 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 7,
|
||||
"execution_count": 8,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
@ -1243,7 +1243,7 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 8,
|
||||
"execution_count": 9,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
@ -1261,7 +1261,7 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 9,
|
||||
"execution_count": 10,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
@ -1278,7 +1278,7 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 10,
|
||||
"execution_count": 11,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
@ -1298,7 +1298,7 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 11,
|
||||
"execution_count": 12,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
@ -1307,7 +1307,7 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 12,
|
||||
"execution_count": 13,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
@ -1316,7 +1316,7 @@
|
||||
"68733"
|
||||
]
|
||||
},
|
||||
"execution_count": 12,
|
||||
"execution_count": 13,
|
||||
"metadata": {},
|
||||
"output_type": "execute_result"
|
||||
}
|
||||
@ -1327,7 +1327,7 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 13,
|
||||
"execution_count": 14,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
@ -1343,7 +1343,7 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 14,
|
||||
"execution_count": 15,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
@ -1354,7 +1354,7 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 15,
|
||||
"execution_count": 16,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
@ -1369,6 +1369,150 @@
|
||||
"print(len(labels), len(emojis), len(plain_text))"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"* Apply stemming and lemmatization (if needed)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 17,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from nltk.stem.snowball import SnowballStemmer\n",
|
||||
"from nltk.stem import WordNetLemmatizer\n",
|
||||
"from nltk import pos_tag\n",
|
||||
"from nltk import word_tokenize\n",
|
||||
"from nltk.corpus import wordnet"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 23,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"def get_wordnet_pos(treebank_tag):\n",
|
||||
"\n",
|
||||
" if treebank_tag.startswith('J'):\n",
|
||||
" return wordnet.ADJ\n",
|
||||
" elif treebank_tag.startswith('V'):\n",
|
||||
" return wordnet.VERB\n",
|
||||
" elif treebank_tag.startswith('N'):\n",
|
||||
" return wordnet.NOUN\n",
|
||||
" elif treebank_tag.startswith('R'):\n",
|
||||
" return wordnet.ADV\n",
|
||||
" else:\n",
|
||||
" return wordnet.NOUN"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 19,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"stemmer = SnowballStemmer(\"english\")\n",
|
||||
"for key in plain_text.keys():\n",
|
||||
" stemmed_sent = []\n",
|
||||
" for word in plain_text[key].split(\" \"):\n",
|
||||
" word_stemmed = stemmer.stem(word)\n",
|
||||
" stemmed_sent.append(word_stemmed)\n",
|
||||
" stemmed_sent = (\" \").join(stemmed_sent)\n",
|
||||
" plain_text[key] = stemmed_sent"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 26,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"2 woooaaaahhh\n",
|
||||
"4 i wan na know too\n",
|
||||
"6 i 'm le stress about turn 30 now i think i'v r...\n",
|
||||
"9 got me emot there\n",
|
||||
"14 cutest son roll no . 31\n",
|
||||
"15 by the summer i should have everyth up and run...\n",
|
||||
"18 that pictur wa not taken this morning !\n",
|
||||
"26 i so can not be bother with the rest of the da...\n",
|
||||
"27 2lit4lif\n",
|
||||
"35 hate fall asleep befor i put my phone on the c...\n",
|
||||
"36 unexpect saw two of my crush today . this day ...\n",
|
||||
"40 elvi whi o whi ? our girl wa such a love stori...\n",
|
||||
"42 you'r late i ate them all\n",
|
||||
"43 me toooo\n",
|
||||
"47 the pressur is just too much\n",
|
||||
"51 i broke grammar\n",
|
||||
"52 have not desir to go to work today\n",
|
||||
"53 omg do n't it scari all i know is that i do no...\n",
|
||||
"56 achoo mr. fuck nigga you , you done caught cau...\n",
|
||||
"58 i can never catch a dang break !\n",
|
||||
"59 pa my p on two hour of sleep\n",
|
||||
"60 i 'm realli not amus\n",
|
||||
"65 i can help you\n",
|
||||
"71 whew i slept good af last night\n",
|
||||
"74 this would be epic . pizza and play perfect gi...\n",
|
||||
"76 hey , it 1st novemb\n",
|
||||
"80 u is to press bitch for me to have been speak ...\n",
|
||||
"88 lmfao thought it wa just me be bitter\n",
|
||||
"89 yupp yuppp . super prettttyyy , my heart cant ...\n",
|
||||
"90 bakit halo halong seri binanggit mo be ? none ...\n",
|
||||
" ... \n",
|
||||
"68675 go back to dark hair tomorrow , mhmm yasss\n",
|
||||
"68677 i miss them so much\n",
|
||||
"68678 i wan na feel your gut too\n",
|
||||
"68683 everi time\n",
|
||||
"68687 i neither own nor watch tv . now go watch cnn\n",
|
||||
"68688 revolutionari love\n",
|
||||
"68694 ear worm is run in the famili after sing an aw...\n",
|
||||
"68696 ill never look at you the same . yeah you got ...\n",
|
||||
"68699 it our 3 year anniversari today to celebrate ,...\n",
|
||||
"68700 person that scare me\n",
|
||||
"68701 damn girl . can u look ani hotter than this ? ...\n",
|
||||
"68703 this one fall under the weird crazi one .\n",
|
||||
"68704 i 'm not allow to have chocol yet , then i uni...\n",
|
||||
"68705 manchest unit manag mourinho slam specialists'...\n",
|
||||
"68708 look , mammeh and daddeh ! cuuutee..\n",
|
||||
"68709 life is so good with you\n",
|
||||
"68710 happi halloween !\n",
|
||||
"68712 scotti and kristen halloween costum\n",
|
||||
"68713 may pre-month celebr si\n",
|
||||
"68717 lmaooo i 'm so proud\n",
|
||||
"68720 ambot ! ! !\n",
|
||||
"68721 1st of the month ! ! happi 1st of novemb *53 d...\n",
|
||||
"68722 ... stay in bed\n",
|
||||
"68723 this is gold . gold .\n",
|
||||
"68724 thank you for the kind compliment\n",
|
||||
"68725 enjoy the silenc\n",
|
||||
"68728 thank you yomi !\n",
|
||||
"68729 lol . just enjoy the star . music kidhar aur b...\n",
|
||||
"68730 thought and prayer for ny\n",
|
||||
"68732 thank you so muchhav a happi wednesday and a g...\n",
|
||||
"Name: text, Length: 33368, dtype: object\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"lemmatizer = WordNetLemmatizer()\n",
|
||||
"for key in plain_text.keys():\n",
|
||||
" lemmatized_sent = []\n",
|
||||
" sent_pos = pos_tag(word_tokenize(plain_text[key]))\n",
|
||||
" for word in sent_pos:\n",
|
||||
" wordnet_pos = get_wordnet_pos(word[1].lower())\n",
|
||||
" word_lemmatized = lemmatizer.lemmatize(word[0], pos=wordnet_pos)\n",
|
||||
" lemmatized_sent.append(word_lemmatized)\n",
|
||||
" lemmatized_sent = (\" \").join(lemmatized_sent)\n",
|
||||
" plain_text[key] = lemmatized_sent\n",
|
||||
"print(plain_text)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
@ -1378,7 +1522,7 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 16,
|
||||
"execution_count": 27,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
@ -1419,7 +1563,7 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 17,
|
||||
"execution_count": 28,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
@ -1428,7 +1572,7 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 18,
|
||||
"execution_count": 29,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
@ -1505,7 +1649,7 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 19,
|
||||
"execution_count": 30,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
@ -1533,7 +1677,7 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 20,
|
||||
"execution_count": 31,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
@ -1551,7 +1695,7 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 22,
|
||||
"execution_count": 32,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
@ -1569,13 +1713,15 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 23,
|
||||
"execution_count": 33,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stderr",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"C:\\Users\\Maren\\Anaconda3\\lib\\site-packages\\h5py\\__init__.py:36: FutureWarning: Conversion of the second argument of issubdtype from `float` to `np.floating` is deprecated. In future, it will be treated as `np.float64 == np.dtype(float).type`.\n",
|
||||
" from ._conv import register_converters as _register_converters\n",
|
||||
"Using TensorFlow backend.\n"
|
||||
]
|
||||
}
|
||||
@ -1591,7 +1737,7 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 24,
|
||||
"execution_count": 34,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
@ -1623,7 +1769,7 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 25,
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
@ -1631,12 +1777,7 @@
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"Train on 18861 samples, validate on 4716 samples\n",
|
||||
"Epoch 1/3\n",
|
||||
"18861/18861 [==============================] - 1288s 68ms/step - loss: 0.0185 - val_loss: 0.0164\n",
|
||||
"Epoch 2/3\n",
|
||||
"18861/18861 [==============================] - 1282s 68ms/step - loss: 0.0103 - val_loss: 0.0164\n",
|
||||
"Epoch 3/3\n",
|
||||
"18861/18861 [==============================] - 1333s 71ms/step - loss: 0.0063 - val_loss: 0.0166\n"
|
||||
"Epoch 1/3\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
@ -2739,7 +2880,7 @@
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.6.5"
|
||||
"version": "3.6.4"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
|
Loading…
Reference in New Issue
Block a user