Merge branch 'master' of ssh://the-cake-is-a-lie.net:20022/jonas/NLP-LAB

This commit is contained in:
Carsten 2018-05-07 18:16:32 +02:00
commit 645428a73e

View File

@ -40,18 +40,7 @@
"cell_type": "code",
"execution_count": 2,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"'\\n return {\\n \\'word\\': word,\\n \\'is_capitalized\\': word[0].upper() == word[0],\\n \\'prefix-1\\': word[0],\\n \\'suffix-1\\': word[-1],\\n \\'prev_word\\': \\'\\' if index == 0 else sentence[index - 1],\\n \\'next_word\\': \\'\\' if index == len(sentence) - 1 else sentence[index + 1],\\n \\'length\\': len(word),\\n \\'index\\' : index,\\n \\'rev_index\\': len(sentence) - index,\\n \\'sentence_length\\': len(sentence)#,\\n \\'relative_third\\': relative_third,\\n \\'is_punctuation_mark\\': is_punctuation_mark,\\n \\',\\': word == \",\",\\n \\'.\\': word == \".\",\\n \\'!\\': word == \"!\",\\n \\'?\\': word == \"?\"\\n }\\n'"
]
},
"execution_count": 2,
"metadata": {},
"output_type": "execute_result"
}
],
"outputs": [],
"source": [
"def features(sentence, index):\n",
" word = sentence[index]\n",
@ -80,27 +69,7 @@
" '!': word == \"!\",\n",
" '?': word == \"?\",\n",
" 'vowels' : vowels\n",
" }\n",
"'''\n",
" return {\n",
" 'word': word,\n",
" 'is_capitalized': word[0].upper() == word[0],\n",
" 'prefix-1': word[0],\n",
" 'suffix-1': word[-1],\n",
" 'prev_word': '' if index == 0 else sentence[index - 1],\n",
" 'next_word': '' if index == len(sentence) - 1 else sentence[index + 1],\n",
" 'length': len(word),\n",
" 'index' : index,\n",
" 'rev_index': len(sentence) - index,\n",
" 'sentence_length': len(sentence)#,\n",
" 'relative_third': relative_third,\n",
" 'is_punctuation_mark': is_punctuation_mark,\n",
" ',': word == \",\",\n",
" '.': word == \".\",\n",
" '!': word == \"!\",\n",
" '?': word == \"?\"\n",
" }\n",
"'''"
" }"
]
},
{
@ -109,7 +78,7 @@
"metadata": {},
"outputs": [],
"source": [
"test_sentence = ['The','cake','is','a','lie','!']\n",
"#test_sentence = ['The','cake','is','a','lie','!']\n",
"#for i in range(len(test_sentence)):\n",
"# pprint.pprint(features(test_sentence, i))"
]
@ -225,7 +194,7 @@
},
{
"cell_type": "code",
"execution_count": 16,
"execution_count": 7,
"metadata": {},
"outputs": [],
"source": [
@ -338,23 +307,23 @@
"P1.1\n",
"start training…\n",
"training done\n",
"Accuracy: 0.7671041469135187\n",
"Accuracy: 0.7755377014821099\n",
"P1.2\n",
"P1.3\n",
"P1.4\n",
"start training…\n",
"training done\n",
"Accuracy: 0.6359655404139998\n",
"Accuracy: 0.63253390325317\n",
"P1.5\n",
"P1.6\n",
"{'P1.1': 0.7671041469135187,\n",
"{'P1.1': 0.7755377014821099,\n",
" 'P1.2': 0.8936074654423873,\n",
" 'P1.3 -- bi_model': 0.1132791057437996,\n",
" 'P1.3 -- def_model': 0.1447677029791906,\n",
" 'P1.3 -- regexp_model': 0.24232746145017217,\n",
" 'P1.3 -- tri_model': 0.06736863116922003,\n",
" 'P1.3 -- uni_model': 0.8608213982733669,\n",
" 'P1.4': 0.6359655404139998,\n",
" 'P1.4': 0.63253390325317,\n",
" 'P1.5': 0.6044583741861567,\n",
" 'P1.6 -- bi_model': 0.1132791057437996,\n",
" 'P1.6 -- def_model': 0.1447677029791906,\n",
@ -422,7 +391,7 @@
{
"data": {
"application/vnd.jupyter.widget-view+json": {
"model_id": "c148aec5450948cea1f32c302344dbab",
"model_id": "c6d8e1186c1f44dcb77b146346b1dedb",
"version_major": 2,
"version_minor": 0
},
@ -501,15 +470,22 @@
"source": [
"ru_tagged = ru_corp.tagged_sents()\n",
"\n",
"ru_tagged[0]\n",
"\n",
"\n",
"X3,y3,tX3,ty3 = create_training_and_test_set(annotated_sentences=ru_tagged, \n",
" relative_cutoff=0.8)\n"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"#### Model 04, Performance 2.1"
]
},
{
"cell_type": "code",
"execution_count": 18,
"execution_count": 14,
"metadata": {},
"outputs": [
{
@ -519,15 +495,198 @@
"P2.1\n",
"start training…\n",
"training done\n",
"Accuracy: 0.7043834741655548\n",
"0.7043834741655548\n"
"Accuracy: 0.7079014288483687\n",
"0.7079014288483687\n"
]
}
],
"source": [
"print(\"P2.1\")\n",
"performances['P2.1'] = model_01(X3,y3,tX3,ty3, max_size=1000)\n",
"print(performances['P2.1'])"
"performances2 = {}\n",
"performances2['P2.1'] = model_01(X3,y3,tX3,ty3, max_size=1000)\n",
"print(performances2['P2.1'])"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"#### Model 05, Performance 2.2"
]
},
{
"cell_type": "code",
"execution_count": 15,
"metadata": {},
"outputs": [],
"source": [
"# write russian text out to file:\n",
"f = open(\"ru_text.txt\", 'w')\n",
"for sentence in ru_tagged:\n",
" for word, tag in sentence:\n",
" f.write(word + \" \")\n",
" f.write(\"\\n\")\n",
"f.close()"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"* download the python 3 fork of the rdrpos-tagger: https://github.com/jacopofar/RDRPOSTagger-python-3\n",
"* adjust `RDRPOS_TAGGER_PATH` to match with the download location"
]
},
{
"cell_type": "code",
"execution_count": 16,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"/home/jonas/Dokumente/gitRepos/NLP-LAB/Jonas_Solutions\n"
]
}
],
"source": [
"import sys, os\n",
"\n",
"dir_path = os.getcwd()\n",
"print(dir_path)"
]
},
{
"cell_type": "code",
"execution_count": 17,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"['Node', '__builtins__', '__cached__', '__doc__', '__file__', '__loader__', '__name__', '__package__', '__spec__', 'tabStr']\n",
"('\\nOutput file:', 'ru_text.txt.TAGGED')\n"
]
}
],
"source": [
"RDRPOS_TAGGER_PATH = r\"/home/jonas/src/RDRPOSTagger-python-3/pSCRDRtagger/\"\n",
"\n",
"sys.path.insert(0, RDRPOS_TAGGER_PATH)\n",
"os.chdir(RDRPOS_TAGGER_PATH)\n",
"\n",
"import RDRPOSTagger as model05_tagger \n",
"\n",
"r = model05_tagger.RDRPOSTagger()\n",
"r.constructSCRDRtreeFromRDRfile(\"../Models/UniPOS/UD_Russian-SynTagRus/train.UniPOS.RDR\")\n",
"DICT = model05_tagger.readDictionary(\"../Models/UniPOS/UD_Russian-SynTagRus/train.UniPOS.DICT\")\n",
"\n",
"os.chdir(dir_path)\n",
"\n",
"r.tagRawCorpus(DICT, \"ru_text.txt\")"
]
},
{
"cell_type": "code",
"execution_count": 18,
"metadata": {},
"outputs": [],
"source": [
"tagged_words = []\n",
"f = open(\"ru_text.txt.TAGGED\", 'r')\n",
"for line in f:\n",
" for splits in line.split():\n",
" cmp = splits.rsplit('/',1)\n",
" if len(cmp) != 2:\n",
" print(\"error parsing: \", cmp)\n",
" else:\n",
" w,t = cmp\n",
" tagged_words.append((w,t))\n"
]
},
{
"cell_type": "code",
"execution_count": 19,
"metadata": {},
"outputs": [],
"source": [
"score_2_2 = 0\n",
"i = 0\n",
"for sent in ru_tagged:\n",
" for tagged_w in sent:\n",
" if tagged_w[1] == tagged_words[i][1]:\n",
" score_2_2 += 1\n",
" i += 1\n",
"performances2['P2.2'] = score_2_2 / len(tagged_words)\n"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Results of performance 2.2"
]
},
{
"cell_type": "code",
"execution_count": 20,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"{'P2.1': 0.7079014288483687, 'P2.2': 0.8899716702179293}\n"
]
}
],
"source": [
"pprint.pprint(performances2)"
]
},
{
"cell_type": "code",
"execution_count": 21,
"metadata": {},
"outputs": [
{
"data": {
"application/vnd.jupyter.widget-view+json": {
"model_id": "aeb29243e58d49b8942122ceec03fab5",
"version_major": 2,
"version_minor": 0
},
"text/html": [
"<p>Failed to display Jupyter Widget of type <code>FigureCanvasNbAgg</code>.</p>\n",
"<p>\n",
" If you're reading this message in the Jupyter Notebook or JupyterLab Notebook, it may mean\n",
" that the widgets JavaScript is still loading. If this message persists, it\n",
" likely means that the widgets JavaScript library is either not installed or\n",
" not enabled. See the <a href=\"https://ipywidgets.readthedocs.io/en/stable/user_install.html\">Jupyter\n",
" Widgets Documentation</a> for setup instructions.\n",
"</p>\n",
"<p>\n",
" If you're reading this message in another frontend (for example, a static\n",
" rendering on GitHub or <a href=\"https://nbviewer.jupyter.org/\">NBViewer</a>),\n",
" it may mean that your frontend doesn't currently support widgets.\n",
"</p>\n"
],
"text/plain": [
"FigureCanvasNbAgg()"
]
},
"metadata": {},
"output_type": "display_data"
}
],
"source": [
"fig_2, ax_2 = plt.subplots()\n",
"plt.bar(np.arange(len(performances2)), performances2.values())\n",
"plt.xticks(np.arange(len(performances2)), performances2.keys(), rotation=30, ha='right')\n",
"plt.tight_layout()\n",
"plt.show()\n"
]
},
{