evaluation anpassungen
26
Carsten_Solutions/Emoji-Emoticon-Assignment_new.csv
Normal file
@ -0,0 +1,26 @@
|
|||||||
|
Icon,Icon,Icon,Icon,Icon,Icon,Icon,Icon,Icon,Icon,Icon,Emoji,Meaning
|
||||||
|
:‑) :),:-] :],:-3 :3,:-> :>,8-) 8),:-} :},:o),:c),:^),=],=),☺️🙂😊😀😁,Smiley or happy face.[4][5][6]
|
||||||
|
:‑D :D,8‑D 8D,x‑D xD,X‑D XD,=D,=3,B^D,,,,,😃😄😆😍,"Laughing,[4] big grin,[5][6] laugh with glasses,[7] or wide-eyed surprise[8]"
|
||||||
|
:-)),:-)),:-)),:-)),:-)),:-)),:-)),:-)),:-)),:-)),:-)),,Very happy or double chin[7]
|
||||||
|
:‑( :(,:‑c :c,:‑< :<,:‑[ :[,:-||,>:[,:{,:@,>:(,,,☹️🙁😠😡😞😟😣😖,"Frown,[4][5][6] sad,[9] angry,[7] pouting"
|
||||||
|
:'‑( :'(,:'‑( :'(,:'‑( :'(,:'‑( :'(,:'‑( :'(,:'‑( :'(,:'‑( :'(,:'‑( :'(,:'‑( :'(,:'‑( :'(,:'‑( :'(,😢😭,Crying[9]
|
||||||
|
:'‑) :'),:'‑) :'),:'‑) :'),:'‑) :'),:'‑) :'),:'‑) :'),:'‑) :'),:'‑) :'),:'‑) :'),:'‑) :'),:'‑) :'),😂,Tears of happiness[9]
|
||||||
|
D‑':,D:<,D:,D8,D;,D=,DX,,,,,😨😧😦😱😫😩,"Horror, disgust, sadness, great dismay[5][6] (right to left)"
|
||||||
|
:‑O :O,:‑o :o,:-0,8‑0,>:O,,,,,,,😮😯😲,"Surprise,[3] shock,[4][10] yawn[11]"
|
||||||
|
:-* :*,:×,,,,,,,,,,😗😙😚😘😍,Kiss
|
||||||
|
;‑) ;),*-) *),;‑] ;],;^),":‑,",;D,,,,,,😉😜😘,"Wink,[4][5][6] smirk[10][11]"
|
||||||
|
:‑P :P,X‑P XP,x‑p xp,:‑p :p,:‑Þ :Þ,:‑þ :þ,:‑b :b,d:,=p,>:P,,😛😝😜🤑,"Tongue sticking out, cheeky/playful,[4] blowing a raspberry"
|
||||||
|
:‑/ :/,:‑.,>:\,>:/,:\,=/,=\,:L,=L,:S,,🤔😕😟,"Skeptical, annoyed, undecided, uneasy, hesitant[4]"
|
||||||
|
:‑| :|,,,,,,,,,,,😐😑,"Straight face[5] no expression, indecision[9]"
|
||||||
|
:$,,,,,,,,,,,😳😞😖,"Embarrassed,[6] blushing[7]"
|
||||||
|
:‑X :X,:‑# :#,:‑& :&,,,,,,,,,🤐😶,"Sealed lips or wearing braces,[4] tongue-tied[9]"
|
||||||
|
O:‑) O:),0:‑3 0:3,0:‑) 0:),0;^),,,,,,,,😇👼,"Angel,[4][5][10] saint,[9] innocent"
|
||||||
|
>:‑) >:),}:‑) }:),3:‑) 3:),>;),,,,,,,,😈,"Evil,[5] devilish[9]"
|
||||||
|
|;‑),|‑O,,,,,,,,,,😎😪,"Cool,[9] bored/yawning[10]"
|
||||||
|
:‑J,:‑J,:‑J,:‑J,:‑J,:‑J,:‑J,:‑J,:‑J,:‑J,:‑J,😏😒,Tongue-in-cheek[12]
|
||||||
|
#‑),#‑),#‑),#‑),#‑),#‑),#‑),#‑),#‑),#‑),#‑),—,Partied all night[9]
|
||||||
|
%‑) %),%‑) %),%‑) %),%‑) %),%‑) %),%‑) %),%‑) %),%‑) %),%‑) %),%‑) %),%‑) %),😵😕🤕,"Drunk,[9] confused"
|
||||||
|
:‑###.. :###..,:‑###.. :###..,:‑###.. :###..,:‑###.. :###..,:‑###.. :###..,:‑###.. :###..,:‑###.. :###..,:‑###.. :###..,:‑###.. :###..,:‑###.. :###..,:‑###.. :###..,🤒😷🤢,Being sick[9]
|
||||||
|
<:‑|,<:‑|,<:‑|,<:‑|,<:‑|,<:‑|,<:‑|,<:‑|,<:‑|,<:‑|,<:‑|,—,"Dumb, dunce-like[10]"
|
||||||
|
"',:-|","',:-l",,,,,,,,,,🤨,"Scepticism, disbelief, or disapproval[13][14]"
|
||||||
|
<_<,>_>,,,,,,,,,,,Sideways look. Devious or guilty.
|
|
1
Project/Assignment_emoticon_emoji_backup.txt
Normal file
@ -0,0 +1 @@
|
|||||||
|
[[':c)', '☺'], [':c)', '☺'], [':-3', '😊'], [':-}', '☺'], [':>', '☺'], ['=)', '☺'], [':‑)', '☺'], [':^)', '😊'], ['8)', '☺'], [':}', '☺'], [':->', '☺'], [':-]', '☺'], [':]', '☺'], ['=]', '☺'], [':o)', '☺'], [':)', '☺'], ['8-)', '☺'], [':3', '☺'], ['xD', '😆'], ['x‑D', '😆'], ['=3', '😃'], ['B^D', '😄'], ['XD', '😆'], ['=D', '😃'], ['8‑D', '😃'], [':‑D', '😃'], ['8D', '😃'], [':D', '😃'], ['X‑D', '😆'], [':‑[', '🙁'], [':@', '😡'], [':‑(', '🙁'], [':(', '🙁'], [':{', '🙁'], [':‑<', '🙁'], ['>:(', '😡'], [':c', '☹'], [':<', '☹'], [':‑c', '☹'], ['>:[', '😡'], [':-||', '🙁'], [':[', '🙁'], [":'‑(", '😢'], [":'(", '😢'], [":')", '😂'], [":'‑)", '😂'], ['DX', '😫'], ['D:', '😧'], ['D:<', '😩'], ['D8', '😦'], ['D=', '😦'], ['D;', '😩'], ["D‑':", '😨'], [':-0', '😮'], [':‑O', '😮'], [':o', '😮'], [':O', '😮'], [':‑o', '😮'], ['>:O', '😲'], ['8‑0', '😮'], [':×', '😘'], [':*', '😘'], [':-*', '😘'], [';]', '😉'], [';)', '😉'], [';‑)', '😉'], ['*-)', '😜'], [';^)', '😜'], [';D', '😜'], [';‑]', '😉'], ['*)', '😜'], [':‑,', '😘'], [':‑Þ', '😛'], [':þ', '😛'], [':‑þ', '😛'], [':‑b', '😛'], ['x‑p', '😝'], ['>:P', '😜'], ['XP', '😝'], [':Þ', '😛'], [':P', '😛'], ['xp', '😝'], [':b', '😛'], ['=p', '😛'], ['d:', '😛'], [':p', '😛'], [':‑p', '😛'], [':‑P', '😛'], ['X‑P', '😝'], [':\\', '🤔'], ['>:/', '🤔'], [':/', '🤔'], ['=\\', '🤔'], [':L', '😕'], [':S', '😕'], [':‑/', '🤔'], ['=L', '😕'], [':‑.', '🤔'], ['=/', '😕'], ['>:\\', '🤔'], [':‑|', '😐'], [':|', '😐'], [':$', '😳'], [':&', '😶'], [':‑X', '🤐'], [':#', '🤐'], [':‑#', '🤐'], [':‑&', '🤐'], [':X', '🤐'], ['0;^)', '😇'], ['0:‑3', '😇'], ['O:‑)', '😇'], ['0:)', '😇'], ['0:3', '😇'], ['0:‑)', '😇'], ['O:)', '😇'], ['3:)', '😈'], ['>:‑)', '😈'], ['>:)', '😈'], ['>;)', '😈'], ['}:)', '😈'], ['}:‑)', '😈'], ['3:‑)', '😈'], ['|‑O', '😪'], ['|;‑)', '😪'], [':‑J', '😏'], ['%‑)', '😵'], ['%)', '😵'], [':###..', '🤢'], [':‑###..', '🤢'], ["',:-|", '\U0001f928'], ["',:-l", '\U0001f928']]
|
Before Width: | Height: | Size: 91 KiB After Width: | Height: | Size: 58 KiB |
Before Width: | Height: | Size: 88 KiB After Width: | Height: | Size: 54 KiB |
@ -1,97 +1,97 @@
|
|||||||
Sentence,prediction,topic hit,sentiment hit,both,ranked
|
Sentence prediction topic hit sentiment hit both ranked
|
||||||
Hi how are you?,,,,,
|
0 Hi how are you? 😁😂😌😎😅😉🙌🤟
|
||||||
do you've got time,,,,,
|
1 do you've got time 😌😁😎😂🙌🤸🚯🤟
|
||||||
I go out for party tonight,,,,,
|
2 I go out for party tonight 😂😅😁😌🔰🔢🔣🔤
|
||||||
I'll take the bus or train,,,,,
|
3 I'll take the bus or train 😂😅😢😭🚆🚄🤘🤣
|
||||||
You look gorgeous in this dress,,,,,
|
4 You look gorgeous in this dress 😌🙌😀😎👗👥👤🤵
|
||||||
How hard was the exam,,,,,
|
5 How hard was the exam 😂😅😁😌😎😉🤘🤣
|
||||||
please can you give me some stuff,,,,,
|
6 please can you give me some stuff 😂😅😁😉😎😏🤙🤟
|
||||||
whats your name,,,,,
|
7 whats your name 😂😅😁😢😌😎😉📛
|
||||||
where are you from,,,,,
|
8 where are you from 😂😅😁😢😌😉😎🤟
|
||||||
what is your favourite color,,,,,
|
9 what is your favourite color 😌🙌😀😎😁😊😋😉
|
||||||
Do you like to play soccer this evening,,,,,
|
10 Do you like to play soccer this evening 😅😂😢😭😁😔⚽🤟
|
||||||
do you have any pets,,,,,
|
11 do you have any pets 😂😅😁😢😌🤸🚯🤟
|
||||||
I watch television all day,,,,,
|
12 I watch television all day 😂😅😢😁😉😌😎⌚
|
||||||
there some fake news but most of the time i dont care,,,,,
|
13 there some fake news but most of the time i dont care 😂😅😁😌🤘🗾🗽💩
|
||||||
i you fucking kidding,,,,,
|
14 i you fucking kidding 😂😅😢😁🤟🇮🇴🇼
|
||||||
i we have to hand in our report,,,,,
|
15 i we have to hand in our report 😌😁😎🙌✋🤟👊👋
|
||||||
is the world real,,,,,
|
16 is the world real 😂😅😁😉😎😌🤘🤣
|
||||||
i am you father,,,,,
|
17 i am you father 😂😅😢😭🎅🤟🇴🇼
|
||||||
is this a true cite,,,,,
|
18 is this a true cite 😂😅😁😌😢😘🇦💠
|
||||||
i like working for my phd,,,,,
|
19 i like working for my phd 😂😅😢😁🔢🔣🔰🇩
|
||||||
I at the end of my Master studes,,,,,
|
20 I at the end of my Master studes 😅😂😢😭🤘🗽🍲🥩
|
||||||
I like chilling with my friends outside,,,,,
|
21 I like chilling with my friends outside 😂😅😁😉📶🥤💘💝
|
||||||
are we allowed to extend our presentation time to 35min,,,,,
|
22 are we allowed to extend our presentation time to 35min 😌😁😂😅😎😉🙌🎁
|
||||||
yes you are because today there no other groups presenting,,,,,
|
23 yes you are because today there no other groups presenting 😌🙌😀😊🎁🚷⛔🚳
|
||||||
i would love if we needn't write a report,,,,,
|
24 i would love if we needn't write a report 😁😂😌😅💌🏩🤟😘
|
||||||
"no sorry, you have to wirite one.",,,,,
|
25 no sorry, you have to wirite one. 😢😭😅😔⛔🚳🚷🔞
|
||||||
"ALso Google Docs is not enough, you shell use share latex for your document",,,,,
|
26 ALso Google Docs is not enough, you shell use share latex for your document 😂😅😁😌🐚🔢🔣🔰
|
||||||
I'll hope we get a good grade,,,,,
|
27 I'll hope we get a good grade 😁😎😌😉😘🇦🙅💠
|
||||||
I'll really could imagine working in NLP in the feature,,,,,
|
28 I'll really could imagine working in NLP in the feature 😂😅😢😁👤⛳👥🤵
|
||||||
The weather today is really nice,,,,,
|
29 The weather today is really nice 😂😅😢😭😁😉🤘🤣
|
||||||
I like to take my dog out for a walk,,,,,
|
30 I like to take my dog out for a walk 😅😂😢😭🐕🐶🌭😘
|
||||||
I am a huge soccer fan,,,,,
|
31 I am a huge soccer fan 😂😌😁😅⚽😘🇦💠
|
||||||
I just hate bad tutorials,,,,,
|
32 I just hate bad tutorials 😢😅😂😭😔😤😁😌
|
||||||
I am so glad I bought new shoes yesterday,,,,,
|
33 I am so glad I bought new shoes yesterday 😁😎😌😉🆕🥿👞👠
|
||||||
My mom likes ice cream,,,,,
|
34 My mom likes ice cream 😌😁😎🙌🍨🍦🍧🏒
|
||||||
This so so much work...,,,,,
|
35 This so so much work... 😅😂😢😭😔😁😌😤
|
||||||
I want to have holidays,,,,,
|
36 I want to have holidays 😅😂😢😁😭😌😎😉
|
||||||
Please come to my birthday party,,,,,
|
37 Please come to my birthday party 😂😁😌😅😎😉🙌🎂
|
||||||
why are some people just not replying to emails,,,,,
|
38 why are some people just not replying to emails 😂😅😁😉😎😌😏🚯
|
||||||
I am sick of studying,,,,,
|
39 I am sick of studying 😢😅😭😂🗾🧵🧶🥛
|
||||||
Living in Germany can be expensive,,,,,
|
40 Living in Germany can be expensive 😂😅😢😭👤🤵⛳👥
|
||||||
I love my new Iphone,,,,,
|
41 I love my new Iphone 😌😎😁😀🆕💌🏩🌑
|
||||||
Teddy bears are cute,,,,,
|
42 Teddy bears are cute 😌🙌😀😁😎😊🐻🧸
|
||||||
The sun is shining today,,,,,
|
43 The sun is shining today 😂😅😁😉⛅🌞🤘🤣
|
||||||
I am really stressed out,,,,,
|
44 I am really stressed out 😅😢😂😭😔😤😁😏
|
||||||
Mensa food is disgusting,,,,,
|
45 Mensa food is disgusting 😂😅😁😌🥫🍲🥘😋
|
||||||
I am so disappointed of this lecture,,,,,
|
46 I am so disappointed of this lecture 😂😅😁😌🗾🧵🧶🥛
|
||||||
I usually take my bike to work,,,,,
|
47 I usually take my bike to work 😅😂😁😌😎😉😢🙌
|
||||||
"This is so sad, I am almost crying",,,,,
|
48 This is so sad, I am almost crying 😢😭😅😔😂😤😩😒
|
||||||
My car broke down yesterday,,,,,
|
49 My car broke down yesterday 😅😂😢😭🚋🚓🚃👎
|
||||||
What is the usual time of study in Germany?,,,,,
|
50 What is the usual time of study in Germany? 😂😅😢😭🤘🗽🗾🤵
|
||||||
I try to eat healthy,,,,,
|
51 I try to eat healthy 😂😅😢😁😌😉😎😭
|
||||||
"Seeing people getting good marks with no effort, makes me angry",,,,,
|
52 Seeing people getting good marks with no effort, makes me angry 😂😅😁😢🙅🚷❌🚳
|
||||||
Live long and prosper,,,,,
|
53 Live long and prosper 😂😅😢😁🏹🍴🏀🔩
|
||||||
i love books about wizards,,,,,
|
54 i love books about wizards 😁😌😎😉📚📘📖📕
|
||||||
No one understands me,,,,,
|
55 No one understands me 😂😅😁😢🤙🕐🤪🔉
|
||||||
Why do we even have to study?,,,,,
|
56 Why do we even have to study? 😅😂😢😁😭😌🤸🚯
|
||||||
Tonight I will go drinking,,,,,
|
57 Tonight I will go drinking 😂😅😢😁😌😉😎🍹
|
||||||
Lets have a party,,,,,
|
58 Lets have a party 😂😅😁😌😎😘🇦💠
|
||||||
I dont think there is any bias in these sentences,,,,,
|
59 I dont think there is any bias in these sentences 😂😅😁😌👤🤵⛳👥
|
||||||
I really like to get this freedom in our work,,,,,
|
60 I really like to get this freedom in our work 😅😂😢😭👤🤵⛳👥
|
||||||
No one will care anyway,,,,,
|
61 No one will care anyway 😂😁😅😉🕐🤪🔉🔞
|
||||||
worth it?,,,,,
|
62 worth it? 🙌😌😀😊😎😋😁😍
|
||||||
I really thought this will be a hard semester,,,,,
|
63 I really thought this will be a hard semester 😅😂😢😭💭😘🇦💠
|
||||||
its hard for a schedule to fit all the expectations,,,,,
|
64 its hard for a schedule to fit all the expectations 😌😁😎🙌😘🔢🔣🔰
|
||||||
dont have enough time for all the sport i want to do,,,,,
|
65 dont have enough time for all the sport i want to do 😂😅😁😌🤸🔢🔣🚯
|
||||||
all in all i cant imagine how we are able to stay motivated ,,,,,
|
66 all in all i cant imagine how we are able to stay motivated 😂😅😢😁👤🤵⛳👥
|
||||||
do you prefere star wars or star trek,,,,,
|
67 do you prefere star wars or star trek 😌🙌😀😊🌟🌠⭐🌃
|
||||||
Mr. Spock is the best!!,,,,,
|
68 Mr. Spock is the best!! 😌😂😁😅😎😉🤘🤣
|
||||||
I would like to live in the US,,,,,
|
69 I would like to live in the US 😂😅😢😁👤⛳🤵👥
|
||||||
Studying is so much fun!! ,,,,,
|
70 Studying is so much fun!! 😂😁😉😅😎😌🙏😀
|
||||||
I dont think so at all ...,,,,,
|
71 I dont think so at all ... 😂😌😅😁😎🌉🌆📲
|
||||||
i think all the effort will pay off,,,,,
|
72 i think all the effort will pay off 😂😅😢😁📴🤘🇹🤟
|
||||||
take a flight to ibiza,,,,,
|
73 take a flight to ibiza 😂😅😢😭😏😘🇦💠
|
||||||
better eating a kebab or a burger,,,,,
|
74 better eating a kebab or a burger 😂😅😁😢😌😘🇦💠
|
||||||
nothing at all i hate meat,,,,,
|
75 nothing at all i hate meat 😢😭😅😂🌉🍖🌆🥩
|
||||||
jesus christ!,,,,,
|
76 jesus christ! 😂😅😁😌😎😉🙌😀
|
||||||
so what do you prefere to eat?,,,,,
|
77 so what do you prefere to eat? 😂😅😢😁😭🤸🚯🤟
|
||||||
pizza or a different heathy meal,,,,,
|
78 pizza or a different heathy meal 😂😁😅😌🍕😘🇦💠
|
||||||
"oh dear, you kidding",,,,,
|
79 oh dear, you kidding 😌😁😎🙌😉😀😂🤟
|
||||||
Donald Trump met Putin outside the USA,,,,,
|
80 Donald Trump met Putin outside the USA 😂😅😢😭😁😏🤘🤣
|
||||||
Who constructed this bridge,,,,,
|
81 Who constructed this bridge 😌🙌😁😀😎😊😋😉
|
||||||
I think this church is the largest in town,,,,,
|
82 I think this church is the largest in town 😁😌😎🙌⛪⛳👥🤵
|
||||||
you have to lost a bet to argue why you have this horrible hair cut,,,,,
|
83 you have to lost a bet to argue why you have this horrible hair cut 😅😂😢😭🤟🥩👱😘
|
||||||
hopefully we will have wolrd peace in feature,,,,,
|
84 hopefully we will have wolrd peace in feature 😂😅😁😉👤🤵⛳👥
|
||||||
so we can focus on mor important projects in our world,,,,,
|
85 so we can focus on mor important projects in our world 😁😌😂😎👥👤🍖⛳
|
||||||
"yes, climate change is real",,,,,
|
86 yes, climate change is real 😂😅😁😉😢😌😎😏
|
||||||
do you will recommend this nlp lab,,,,,
|
87 do you will recommend this nlp lab 😁😎😉😌🥼🤸🚯🤟
|
||||||
jonas have to focus on his oral exam tomorrow,,,,,
|
88 jonas have to focus on his oral exam tomorrow 😌😁😅😂🍖🚩🤣🔛
|
||||||
i wish you all the best,,,,,
|
89 i wish you all the best 😂😅😁😌🤟🤘🇹🤣
|
||||||
happy bithday darling,,,,,
|
90 happy bithday darling 😁😌😂😎😅😉🙌😀
|
||||||
i love mixing beer and wine with a shot of tequila,,,,,
|
91 i love mixing beer and wine with a shot of tequila 😅😂😢😭🏩🍺💌🍷
|
||||||
i love you this much my heart will broke if you leave me,,,,,
|
92 i love you this much my heart will broke if you leave me 😅😂😢😁🤟💜💟💛
|
||||||
does everybody understand my true feelings,,,,,
|
93 does everybody understand my true feelings 😂😁😅😉😎😌🙌😀
|
||||||
i think many people will read this and will be confused later,,,,,
|
94 i think many people will read this and will be confused later 😂😅😁😌🔩🏹🍴🏀
|
||||||
buying a red car will be more expensive,,,,,
|
95 buying a red car will be more expensive 😂😅😁😉🚃🍎🚋🚓
|
||||||
|
|
@ -102,6 +102,9 @@
|
|||||||
"#navigation into right path and generating classifier\n",
|
"#navigation into right path and generating classifier\n",
|
||||||
"import sys\n",
|
"import sys\n",
|
||||||
"sys.path.append(\"..\")\n",
|
"sys.path.append(\"..\")\n",
|
||||||
|
"sys.path.append(\"../naive_approach\")\n",
|
||||||
|
"\n",
|
||||||
|
"\n",
|
||||||
"\n",
|
"\n",
|
||||||
"import simple_approach.simple_twitter_learning as stl\n",
|
"import simple_approach.simple_twitter_learning as stl\n",
|
||||||
"clf_advanced = stl.pipeline_manager.load_pipeline_from_files( '../simple_approach/custom_classifier', ['keras_model'], ['vectorizer', 'keras_model'])\n",
|
"clf_advanced = stl.pipeline_manager.load_pipeline_from_files( '../simple_approach/custom_classifier', ['keras_model'], ['vectorizer', 'keras_model'])\n",
|
||||||
@ -143,15 +146,13 @@
|
|||||||
{
|
{
|
||||||
"cell_type": "code",
|
"cell_type": "code",
|
||||||
"execution_count": 5,
|
"execution_count": 5,
|
||||||
"metadata": {
|
"metadata": {},
|
||||||
"collapsed": true
|
|
||||||
},
|
|
||||||
"outputs": [],
|
"outputs": [],
|
||||||
"source": [
|
"source": [
|
||||||
"#sys.path.append(\"..\")\n",
|
"#sys.path.append(\"..\")\n",
|
||||||
"#print(sys.path)\n",
|
"#print(sys.path)\n",
|
||||||
"\n",
|
"\n",
|
||||||
"import naive_approach.naive_approach as clf_naive"
|
"import naive_approach as clf_naive"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@ -248,8 +249,7 @@
|
|||||||
" if(current_message != \"\"):\n",
|
" if(current_message != \"\"):\n",
|
||||||
" p = merged_prediction(msg = current_message, target_emojis=top_emojis)\n",
|
" p = merged_prediction(msg = current_message, target_emojis=top_emojis)\n",
|
||||||
"\n",
|
"\n",
|
||||||
" predictions = p\n",
|
" predictions = p"
|
||||||
" update_descriptions()"
|
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@ -265,103 +265,24 @@
|
|||||||
"metadata": {},
|
"metadata": {},
|
||||||
"outputs": [
|
"outputs": [
|
||||||
{
|
{
|
||||||
"data": {
|
"ename": "ParserError",
|
||||||
"text/html": [
|
"evalue": "Error tokenizing data. C error: Expected 1 fields in line 27, saw 2\n",
|
||||||
"<div>\n",
|
"output_type": "error",
|
||||||
"<style>\n",
|
"traceback": [
|
||||||
" .dataframe thead tr:only-child th {\n",
|
"\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
|
||||||
" text-align: right;\n",
|
"\u001b[0;31mParserError\u001b[0m Traceback (most recent call last)",
|
||||||
" }\n",
|
"\u001b[0;32m<ipython-input-9-7e24563a7fda>\u001b[0m in \u001b[0;36m<module>\u001b[0;34m()\u001b[0m\n\u001b[1;32m 1\u001b[0m \u001b[0;31m# get table\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 2\u001b[0m \u001b[0;32mimport\u001b[0m \u001b[0mpandas\u001b[0m \u001b[0;32mas\u001b[0m \u001b[0mpd\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 3\u001b[0;31m \u001b[0mdf\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mpd\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mread_csv\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m\"Evaluation Sentences - Tabellenblatt1.csv\"\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 4\u001b[0m \u001b[0mdf\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mhead\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
|
||||||
"\n",
|
"\u001b[0;32m~/anaconda3/lib/python3.6/site-packages/pandas/io/parsers.py\u001b[0m in \u001b[0;36mparser_f\u001b[0;34m(filepath_or_buffer, sep, delimiter, header, names, index_col, usecols, squeeze, prefix, mangle_dupe_cols, dtype, engine, converters, true_values, false_values, skipinitialspace, skiprows, nrows, na_values, keep_default_na, na_filter, verbose, skip_blank_lines, parse_dates, infer_datetime_format, keep_date_col, date_parser, dayfirst, iterator, chunksize, compression, thousands, decimal, lineterminator, quotechar, quoting, escapechar, comment, encoding, dialect, tupleize_cols, error_bad_lines, warn_bad_lines, skipfooter, skip_footer, doublequote, delim_whitespace, as_recarray, compact_ints, use_unsigned, low_memory, buffer_lines, memory_map, float_precision)\u001b[0m\n\u001b[1;32m 653\u001b[0m skip_blank_lines=skip_blank_lines)\n\u001b[1;32m 654\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 655\u001b[0;31m \u001b[0;32mreturn\u001b[0m \u001b[0m_read\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfilepath_or_buffer\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mkwds\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 656\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 657\u001b[0m \u001b[0mparser_f\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m__name__\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mname\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
|
||||||
" .dataframe thead th {\n",
|
"\u001b[0;32m~/anaconda3/lib/python3.6/site-packages/pandas/io/parsers.py\u001b[0m in \u001b[0;36m_read\u001b[0;34m(filepath_or_buffer, kwds)\u001b[0m\n\u001b[1;32m 409\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 410\u001b[0m \u001b[0;32mtry\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 411\u001b[0;31m \u001b[0mdata\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mparser\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mread\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mnrows\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 412\u001b[0m \u001b[0;32mfinally\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 413\u001b[0m \u001b[0mparser\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mclose\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
|
||||||
" text-align: left;\n",
|
"\u001b[0;32m~/anaconda3/lib/python3.6/site-packages/pandas/io/parsers.py\u001b[0m in \u001b[0;36mread\u001b[0;34m(self, nrows)\u001b[0m\n\u001b[1;32m 1003\u001b[0m \u001b[0;32mraise\u001b[0m \u001b[0mValueError\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m'skipfooter not supported for iteration'\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1004\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1005\u001b[0;31m \u001b[0mret\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_engine\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mread\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mnrows\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 1006\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1007\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0moptions\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mget\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m'as_recarray'\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
|
||||||
" }\n",
|
"\u001b[0;32m~/anaconda3/lib/python3.6/site-packages/pandas/io/parsers.py\u001b[0m in \u001b[0;36mread\u001b[0;34m(self, nrows)\u001b[0m\n\u001b[1;32m 1746\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0mread\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mnrows\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;32mNone\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1747\u001b[0m \u001b[0;32mtry\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1748\u001b[0;31m \u001b[0mdata\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_reader\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mread\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mnrows\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 1749\u001b[0m \u001b[0;32mexcept\u001b[0m \u001b[0mStopIteration\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1750\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_first_chunk\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
|
||||||
"\n",
|
"\u001b[0;32mpandas/_libs/parsers.pyx\u001b[0m in \u001b[0;36mpandas._libs.parsers.TextReader.read (pandas/_libs/parsers.c:10862)\u001b[0;34m()\u001b[0m\n",
|
||||||
" .dataframe tbody tr th {\n",
|
"\u001b[0;32mpandas/_libs/parsers.pyx\u001b[0m in \u001b[0;36mpandas._libs.parsers.TextReader._read_low_memory (pandas/_libs/parsers.c:11138)\u001b[0;34m()\u001b[0m\n",
|
||||||
" vertical-align: top;\n",
|
"\u001b[0;32mpandas/_libs/parsers.pyx\u001b[0m in \u001b[0;36mpandas._libs.parsers.TextReader._read_rows (pandas/_libs/parsers.c:11884)\u001b[0;34m()\u001b[0m\n",
|
||||||
" }\n",
|
"\u001b[0;32mpandas/_libs/parsers.pyx\u001b[0m in \u001b[0;36mpandas._libs.parsers.TextReader._tokenize_rows (pandas/_libs/parsers.c:11755)\u001b[0;34m()\u001b[0m\n",
|
||||||
"</style>\n",
|
"\u001b[0;32mpandas/_libs/parsers.pyx\u001b[0m in \u001b[0;36mpandas._libs.parsers.raise_parser_error (pandas/_libs/parsers.c:28765)\u001b[0;34m()\u001b[0m\n",
|
||||||
"<table border=\"1\" class=\"dataframe\">\n",
|
"\u001b[0;31mParserError\u001b[0m: Error tokenizing data. C error: Expected 1 fields in line 27, saw 2\n"
|
||||||
" <thead>\n",
|
]
|
||||||
" <tr style=\"text-align: right;\">\n",
|
|
||||||
" <th></th>\n",
|
|
||||||
" <th>Sentence</th>\n",
|
|
||||||
" <th>prediction</th>\n",
|
|
||||||
" <th>topic hit</th>\n",
|
|
||||||
" <th>sentiment hit</th>\n",
|
|
||||||
" <th>both</th>\n",
|
|
||||||
" <th>ranked</th>\n",
|
|
||||||
" </tr>\n",
|
|
||||||
" </thead>\n",
|
|
||||||
" <tbody>\n",
|
|
||||||
" <tr>\n",
|
|
||||||
" <th>0</th>\n",
|
|
||||||
" <td>Hi how are you?</td>\n",
|
|
||||||
" <td>NaN</td>\n",
|
|
||||||
" <td>NaN</td>\n",
|
|
||||||
" <td>NaN</td>\n",
|
|
||||||
" <td>NaN</td>\n",
|
|
||||||
" <td>NaN</td>\n",
|
|
||||||
" </tr>\n",
|
|
||||||
" <tr>\n",
|
|
||||||
" <th>1</th>\n",
|
|
||||||
" <td>do you've got time</td>\n",
|
|
||||||
" <td>NaN</td>\n",
|
|
||||||
" <td>NaN</td>\n",
|
|
||||||
" <td>NaN</td>\n",
|
|
||||||
" <td>NaN</td>\n",
|
|
||||||
" <td>NaN</td>\n",
|
|
||||||
" </tr>\n",
|
|
||||||
" <tr>\n",
|
|
||||||
" <th>2</th>\n",
|
|
||||||
" <td>I go out for party tonight</td>\n",
|
|
||||||
" <td>NaN</td>\n",
|
|
||||||
" <td>NaN</td>\n",
|
|
||||||
" <td>NaN</td>\n",
|
|
||||||
" <td>NaN</td>\n",
|
|
||||||
" <td>NaN</td>\n",
|
|
||||||
" </tr>\n",
|
|
||||||
" <tr>\n",
|
|
||||||
" <th>3</th>\n",
|
|
||||||
" <td>I'll take the bus or train</td>\n",
|
|
||||||
" <td>NaN</td>\n",
|
|
||||||
" <td>NaN</td>\n",
|
|
||||||
" <td>NaN</td>\n",
|
|
||||||
" <td>NaN</td>\n",
|
|
||||||
" <td>NaN</td>\n",
|
|
||||||
" </tr>\n",
|
|
||||||
" <tr>\n",
|
|
||||||
" <th>4</th>\n",
|
|
||||||
" <td>You look gorgeous in this dress</td>\n",
|
|
||||||
" <td>NaN</td>\n",
|
|
||||||
" <td>NaN</td>\n",
|
|
||||||
" <td>NaN</td>\n",
|
|
||||||
" <td>NaN</td>\n",
|
|
||||||
" <td>NaN</td>\n",
|
|
||||||
" </tr>\n",
|
|
||||||
" </tbody>\n",
|
|
||||||
"</table>\n",
|
|
||||||
"</div>"
|
|
||||||
],
|
|
||||||
"text/plain": [
|
|
||||||
" Sentence prediction topic hit sentiment hit \\\n",
|
|
||||||
"0 Hi how are you? NaN NaN NaN \n",
|
|
||||||
"1 do you've got time NaN NaN NaN \n",
|
|
||||||
"2 I go out for party tonight NaN NaN NaN \n",
|
|
||||||
"3 I'll take the bus or train NaN NaN NaN \n",
|
|
||||||
"4 You look gorgeous in this dress NaN NaN NaN \n",
|
|
||||||
"\n",
|
|
||||||
" both ranked \n",
|
|
||||||
"0 NaN NaN \n",
|
|
||||||
"1 NaN NaN \n",
|
|
||||||
"2 NaN NaN \n",
|
|
||||||
"3 NaN NaN \n",
|
|
||||||
"4 NaN NaN "
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"execution_count": 9,
|
|
||||||
"metadata": {},
|
|
||||||
"output_type": "execute_result"
|
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"source": [
|
"source": [
|
||||||
@ -373,48 +294,50 @@
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
"cell_type": "code",
|
"cell_type": "code",
|
||||||
"execution_count": 11,
|
"execution_count": null,
|
||||||
"metadata": {},
|
"metadata": {},
|
||||||
"outputs": [
|
"outputs": [],
|
||||||
{
|
|
||||||
"name": "stdout",
|
|
||||||
"output_type": "stream",
|
|
||||||
"text": [
|
|
||||||
"Hi how are you?\n"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"ename": "FileNotFoundError",
|
|
||||||
"evalue": "[Errno 2] No such file or directory: 'word2vec.model'",
|
|
||||||
"output_type": "error",
|
|
||||||
"traceback": [
|
|
||||||
"\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
|
|
||||||
"\u001b[0;31mFileNotFoundError\u001b[0m Traceback (most recent call last)",
|
|
||||||
"\u001b[0;32m<ipython-input-11-22a65efd4496>\u001b[0m in \u001b[0;36m<module>\u001b[0;34m()\u001b[0m\n\u001b[1;32m 3\u001b[0m \u001b[0mprint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0msentence\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 4\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 5\u001b[0;31m \u001b[0mtrigger_new_prediction\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mall_chat\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;34m\"\"\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mcurrent_message\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0msentence\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 6\u001b[0m \u001b[0mprint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mprediction\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 7\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
|
|
||||||
"\u001b[0;32m<ipython-input-8-20fe10f899eb>\u001b[0m in \u001b[0;36mtrigger_new_prediction\u001b[0;34m(all_chat, current_message)\u001b[0m\n\u001b[1;32m 11\u001b[0m \u001b[0;31m#merged prediction\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 12\u001b[0m \u001b[0;32mif\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mcurrent_message\u001b[0m \u001b[0;34m!=\u001b[0m \u001b[0;34m\"\"\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 13\u001b[0;31m \u001b[0mp\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mmerged_prediction\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mmsg\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mcurrent_message\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mtarget_emojis\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mtop_emojis\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 14\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 15\u001b[0m \u001b[0mpredictions\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mp\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
|
|
||||||
"\u001b[0;32m<ipython-input-7-5ed291336bae>\u001b[0m in \u001b[0;36mmerged_prediction\u001b[0;34m(msg, split, number, target_emojis)\u001b[0m\n\u001b[1;32m 6\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 7\u001b[0m \u001b[0;31m#predict emojis with the naive approach\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 8\u001b[0;31m \u001b[0mprediction_naive\u001b[0m \u001b[0;34m,\u001b[0m \u001b[0mprediction_naive_values\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mclf_naive\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mpredict\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0msentence\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mmsg\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mlookup\u001b[0m\u001b[0;34m=\u001b[0m \u001b[0mtmp_dict\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mn\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mnumber_naive\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 9\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 10\u001b[0m \u001b[0;31m#filter 0 values\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
|
|
||||||
"\u001b[0;32m~/GitRepos/NLP-LAB/Project/naive_approach/naive_approach.py\u001b[0m in \u001b[0;36mpredict\u001b[0;34m(sentence, lookup, emojis_to_consider, criteria, lang, n, t)\u001b[0m\n\u001b[1;32m 98\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0mpredict\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0msentence\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mlookup\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0memojis_to_consider\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;34m\"all\"\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mcriteria\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;34m\"threshold\"\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mlang\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m'eng'\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mn\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;36m10\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mt\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;36m0.9\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 99\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 100\u001b[0;31m \u001b[0mresult\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mevaluate_sentence\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0msentence\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mlang\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0memojis_to_consider\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0memojis_to_consider\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 101\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 102\u001b[0m \u001b[0;32mtry\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
|
|
||||||
"\u001b[0;32m~/GitRepos/NLP-LAB/Project/naive_approach/naive_approach.py\u001b[0m in \u001b[0;36mevaluate_sentence\u001b[0;34m(sentence, description_key, lang, emojis_to_consider, stem)\u001b[0m\n\u001b[1;32m 44\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0mevaluate_sentence\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0msentence\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mdescription_key\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m'description'\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mlang\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m'eng'\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0memojis_to_consider\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;34m\"all\"\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mstem\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;32mTrue\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 45\u001b[0m \u001b[0;31m# assumes there is a trained w2v model stored in the same directory!\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 46\u001b[0;31m \u001b[0mwv\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mKeyedVectors\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mload\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m\"word2vec.model\"\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mmmap\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;34m'r'\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 47\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 48\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0;34m(\u001b[0m\u001b[0mstem\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
|
|
||||||
"\u001b[0;32m~/anaconda3/lib/python3.6/site-packages/gensim/models/keyedvectors.py\u001b[0m in \u001b[0;36mload\u001b[0;34m(cls, fname_or_handle, **kwargs)\u001b[0m\n\u001b[1;32m 120\u001b[0m \u001b[0;34m@\u001b[0m\u001b[0mclassmethod\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 121\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0mload\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mcls\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mfname_or_handle\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 122\u001b[0;31m \u001b[0;32mreturn\u001b[0m \u001b[0msuper\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mBaseKeyedVectors\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mcls\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mload\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfname_or_handle\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 123\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 124\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0msimilarity\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mentity1\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mentity2\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
|
|
||||||
"\u001b[0;32m~/anaconda3/lib/python3.6/site-packages/gensim/utils.py\u001b[0m in \u001b[0;36mload\u001b[0;34m(cls, fname, mmap)\u001b[0m\n\u001b[1;32m 423\u001b[0m \u001b[0mcompress\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0msubname\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mSaveLoad\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_adapt_by_suffix\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfname\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 424\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 425\u001b[0;31m \u001b[0mobj\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0munpickle\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfname\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 426\u001b[0m \u001b[0mobj\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_load_specials\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfname\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mmmap\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mcompress\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0msubname\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 427\u001b[0m \u001b[0mlogger\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0minfo\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m\"loaded %s\"\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mfname\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
|
|
||||||
"\u001b[0;32m~/anaconda3/lib/python3.6/site-packages/gensim/utils.py\u001b[0m in \u001b[0;36munpickle\u001b[0;34m(fname)\u001b[0m\n\u001b[1;32m 1327\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1328\u001b[0m \"\"\"\n\u001b[0;32m-> 1329\u001b[0;31m \u001b[0;32mwith\u001b[0m \u001b[0msmart_open\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfname\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m'rb'\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;32mas\u001b[0m \u001b[0mf\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 1330\u001b[0m \u001b[0;31m# Because of loading from S3 load can't be used (missing readline in smart_open)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1331\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0msys\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mversion_info\u001b[0m \u001b[0;34m>\u001b[0m \u001b[0;34m(\u001b[0m\u001b[0;36m3\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;36m0\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
|
|
||||||
"\u001b[0;32m~/anaconda3/lib/python3.6/site-packages/smart_open/smart_open_lib.py\u001b[0m in \u001b[0;36msmart_open\u001b[0;34m(uri, mode, **kw)\u001b[0m\n\u001b[1;32m 179\u001b[0m \u001b[0;32mraise\u001b[0m \u001b[0mTypeError\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m'mode should be a string'\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 180\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 181\u001b[0;31m \u001b[0mfobj\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0m_shortcut_open\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0muri\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mmode\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkw\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 182\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mfobj\u001b[0m \u001b[0;32mis\u001b[0m \u001b[0;32mnot\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 183\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0mfobj\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
|
|
||||||
"\u001b[0;32m~/anaconda3/lib/python3.6/site-packages/smart_open/smart_open_lib.py\u001b[0m in \u001b[0;36m_shortcut_open\u001b[0;34m(uri, mode, **kw)\u001b[0m\n\u001b[1;32m 285\u001b[0m \u001b[0mmode\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mmode\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mreplace\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m'b'\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m''\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 286\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 287\u001b[0;31m \u001b[0;32mreturn\u001b[0m \u001b[0mio\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mopen\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mparsed_uri\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0muri_path\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mmode\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mopen_kwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 288\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 289\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
|
|
||||||
"\u001b[0;31mFileNotFoundError\u001b[0m: [Errno 2] No such file or directory: 'word2vec.model'"
|
|
||||||
]
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"source": [
|
"source": [
|
||||||
|
"all_predictions = []\n",
|
||||||
|
"\n",
|
||||||
"for index, row in df.iterrows():\n",
|
"for index, row in df.iterrows():\n",
|
||||||
" sentence = row[\"Sentence\"]\n",
|
" sentence = row[\"Sentence\"]\n",
|
||||||
" print(sentence)\n",
|
" #print(sentence)\n",
|
||||||
"\n",
|
"\n",
|
||||||
" trigger_new_prediction(all_chat=\"\", current_message = sentence)\n",
|
" trigger_new_prediction(all_chat=\"\", current_message = sentence)\n",
|
||||||
" print(prediction)\n",
|
" #print(predictions)\n",
|
||||||
" \n",
|
" \n",
|
||||||
|
" #prediction to string\n",
|
||||||
|
" tmp_prediction = \"\".join(predictions)\n",
|
||||||
|
" \n",
|
||||||
|
" #construct the preediction column\n",
|
||||||
|
" all_predictions.append(tmp_prediction)\n",
|
||||||
" "
|
" "
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": null,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"df[\"prediction\"] = all_predictions\n",
|
||||||
|
"\n",
|
||||||
|
"df.head()\n",
|
||||||
|
"\n"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": null,
|
||||||
|
"metadata": {
|
||||||
|
"collapsed": true
|
||||||
|
},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"df.to_csv(\"Evaluation Sentences - Tabellenblatt1.csv\", sep='\\t', encoding='utf-8')"
|
||||||
|
]
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"cell_type": "code",
|
"cell_type": "code",
|
||||||
"execution_count": null,
|
"execution_count": null,
|
||||||
|
Before Width: | Height: | Size: 317 KiB After Width: | Height: | Size: 97 KiB |
Before Width: | Height: | Size: 321 KiB After Width: | Height: | Size: 99 KiB |
@ -102,6 +102,7 @@
|
|||||||
"#navigation into right path and generating classifier\n",
|
"#navigation into right path and generating classifier\n",
|
||||||
"import sys\n",
|
"import sys\n",
|
||||||
"sys.path.append(\"..\")\n",
|
"sys.path.append(\"..\")\n",
|
||||||
|
"sys.path.append(\"../naive_approach\")\n",
|
||||||
"\n",
|
"\n",
|
||||||
"import simple_approach.simple_twitter_learning as stl\n",
|
"import simple_approach.simple_twitter_learning as stl\n",
|
||||||
"clf_advanced = stl.pipeline_manager.load_pipeline_from_files( '../simple_approach/custom_classifier', ['keras_model'], ['vectorizer', 'keras_model'])\n",
|
"clf_advanced = stl.pipeline_manager.load_pipeline_from_files( '../simple_approach/custom_classifier', ['keras_model'], ['vectorizer', 'keras_model'])\n",
|
||||||
@ -151,7 +152,7 @@
|
|||||||
"#sys.path.append(\"..\")\n",
|
"#sys.path.append(\"..\")\n",
|
||||||
"#print(sys.path)\n",
|
"#print(sys.path)\n",
|
||||||
"\n",
|
"\n",
|
||||||
"import naive_approach.naive_approach as clf_naive"
|
"import naive_approach as clf_naive"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@ -557,7 +558,7 @@
|
|||||||
{
|
{
|
||||||
"data": {
|
"data": {
|
||||||
"application/vnd.jupyter.widget-view+json": {
|
"application/vnd.jupyter.widget-view+json": {
|
||||||
"model_id": "6cb4372b3ac5467b8eec0f4cd67f8212",
|
"model_id": "6216e3a271cc4428ba568adbad2fa40c",
|
||||||
"version_major": 2,
|
"version_major": 2,
|
||||||
"version_minor": 0
|
"version_minor": 0
|
||||||
},
|
},
|
||||||
|
Before Width: | Height: | Size: 19 KiB After Width: | Height: | Size: 37 KiB |