diff --git a/Project/advanced_approach/simple_twitter_learning_old.ipynb b/Project/advanced_approach/simple_twitter_learning_old.ipynb deleted file mode 100644 index 54ea663..0000000 --- a/Project/advanced_approach/simple_twitter_learning_old.ipynb +++ /dev/null @@ -1,3706 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "/home/jonas/.local/lib/python3.6/site-packages/h5py/__init__.py:36: FutureWarning: Conversion of the second argument of issubdtype from `float` to `np.floating` is deprecated. In future, it will be treated as `np.float64 == np.dtype(float).type`.\n", - " from ._conv import register_converters as _register_converters\n", - "Using TensorFlow backend.\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[nltk_data] Downloading package punkt to /home/jonas/nltk_data...\n", - "[nltk_data] Package punkt is already up-to-date!\n", - "[nltk_data] Downloading package averaged_perceptron_tagger to\n", - "[nltk_data] /home/jonas/nltk_data...\n", - "[nltk_data] Package averaged_perceptron_tagger is already up-to-\n", - "[nltk_data] date!\n", - "[nltk_data] Downloading package wordnet to /home/jonas/nltk_data...\n", - "[nltk_data] Package wordnet is already up-to-date!\n" - ] - }, - { - "data": { - "text/plain": [ - "True" - ] - }, - "execution_count": 1, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "import pandas as pd\n", - "from IPython.display import clear_output, Markdown, Math\n", - "import ipywidgets as widgets\n", - "import os\n", - "import glob\n", - "import json\n", - "import numpy as np\n", - "import itertools\n", - "import sklearn.utils as sku\n", - "from sklearn.feature_extraction.text import CountVectorizer, TfidfVectorizer, HashingVectorizer\n", - "from sklearn.model_selection import train_test_split\n", - "from sklearn.preprocessing import MultiLabelBinarizer\n", - "import nltk\n", - "from keras.models import load_model\n", - "from sklearn.externals import joblib\n", - "import pickle\n", - "import operator\n", - "from sklearn.pipeline import Pipeline\n", - "nltk.download('punkt')\n", - "nltk.download('averaged_perceptron_tagger')\n", - "nltk.download('wordnet')" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [], - "source": [ - "import sys\n", - "sys.path.append(\"..\")\n", - "\n", - "from Tools.Emoji_Distance import sentiment_vector_to_emoji\n", - "from Tools.Emoji_Distance import emoji_to_sentiment_vector\n", - "\n", - "def emoji2sent(emoji_arr, only_emoticons=True):\n", - " return np.array([emoji_to_sentiment_vector(e, only_emoticons=only_emoticons) for e in emoji_arr])\n", - "\n", - "def sent2emoji(sent_arr, custom_target_emojis=None, only_emoticons=True):\n", - " return [sentiment_vector_to_emoji(s, custom_target_emojis=custom_target_emojis, only_emoticons=only_emoticons) for s in sent_arr]" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": {}, - "outputs": [], - "source": [ - "SINGLE_LABEL = True" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "----\n", - "## classes and functions we are using later:\n", - "----" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "* functions for selecting items from a set / list" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [], - "source": [ - "def latest(lst):\n", - " return lst[-1] if len(lst) > 0 else 'X' \n", - "def most_common(lst):\n", - " # trying to find the most common used emoji in the given lst\n", - " return max(set(lst), key=lst.count) if len(lst) > 0 else \"X\" # setting label to 'X' if there is an empty emoji list" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "* our emoji blacklist (skin and sex modifiers)" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": {}, - "outputs": [], - "source": [ - "# defining blacklist for modifier emojis:\n", - "emoji_blacklist = set([\n", - " chr(0x1F3FB),\n", - " chr(0x1F3FC),\n", - " chr(0x1F3FD),\n", - " chr(0x1F3FE),\n", - " chr(0x1F3FF),\n", - " chr(0x2642),\n", - " chr(0x2640)\n", - "])" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "* lemmatization helper functions" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": {}, - "outputs": [], - "source": [ - "from nltk.stem.snowball import SnowballStemmer\n", - "from nltk.stem import WordNetLemmatizer\n", - "from nltk import pos_tag\n", - "from nltk import word_tokenize\n", - "from nltk.corpus import wordnet\n", - "\n", - "def get_wordnet_pos(treebank_tag):\n", - "\n", - " if treebank_tag.startswith('J'):\n", - " return wordnet.ADJ\n", - " elif treebank_tag.startswith('V'):\n", - " return wordnet.VERB\n", - " elif treebank_tag.startswith('N'):\n", - " return wordnet.NOUN\n", - " elif treebank_tag.startswith('R'):\n", - " return wordnet.ADV\n", - " else:\n", - " return wordnet.NOUN" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "* the sample data manager loads and preprocesses data" - ] - }, - { - "cell_type": "code", - "execution_count": 11, - "metadata": {}, - "outputs": [], - "source": [ - "class sample_data_manager(object):\n", - " @staticmethod\n", - " def generate_and_read(path:str, only_emoticons=True, apply_stemming=True, n_top_emojis=-1, file_range=None):\n", - " sdm = sample_data_manager(path)\n", - " sdm.read_files(file_index_range=range(sdm.n_files) if file_range is None else file_range, only_emoticons=only_emoticons)\n", - " if apply_stemming:\n", - " sdm.apply_stemming_and_lemmatization()\n", - " \n", - " sdm.generate_emoji_count_and_weights()\n", - " \n", - " if n_top_emojis > 0:\n", - " sdm.filter_by_top_emojis(n_top=n_top_emojis)\n", - " \n", - " return sdm\n", - " \n", - " \n", - " def __init__(self, data_root_folder:str):\n", - " self.data_root_folder = data_root_folder\n", - " self.json_files = sorted(glob.glob(self.data_root_folder + \"/*.json\"))\n", - " self.n_files = len(self.json_files)\n", - " self.raw_data = None\n", - " self.emojis = None\n", - " self.plain_text = None\n", - " self.labels = None\n", - " self.emoji_count = None\n", - " self.emoji_weights = None\n", - " self.X = None\n", - " self.y = None\n", - " self.Xt = None\n", - " self.yt = None\n", - " self.top_emojis = None\n", - " \n", - " def read_files(self, file_index_range:list, only_emoticons=True):\n", - " assert np.min(file_index_range) >= 0 and np.max(file_index_range) < self.n_files\n", - " for i in file_index_range:\n", - " print(\"reading file: \" + self.json_files[i] + \"...\")\n", - " if self.raw_data is None:\n", - " self.raw_data = pd.read_json(self.json_files[i], encoding=\"utf-8\")\n", - " else:\n", - " self.raw_data = self.raw_data.append(pd.read_json(self.json_files[i], encoding=\"utf-8\"))\n", - " \n", - " self.emojis = self.raw_data['EMOJI']\n", - " self.plain_text = self.raw_data['text']\n", - " \n", - " # replacing keywords. TODO: maybe these information can be extracted and used\n", - " self.plain_text = self.plain_text.str.replace(\"(||)\",\"\").str.replace(\"[\" + \"\".join(list(emoji_blacklist)) + \"]\",\"\")\n", - " \n", - " # so far filtering for the latest emoji. TODO: maybe there are also better approaches\n", - " self.labels = emoji2sent([latest(e) for e in self.emojis], only_emoticons=only_emoticons )\n", - " \n", - " # and filter out all samples we have no label for:\n", - " wrong_labels = np.isnan(np.linalg.norm(self.labels, axis=1)) \n", - "\n", - " self.labels = self.labels[np.invert(wrong_labels)]\n", - " self.plain_text = self.plain_text[np.invert(wrong_labels)]\n", - " self.emojis = self.emojis[np.invert(wrong_labels)]\n", - " \n", - " print(\"imported \" + str(len(self.labels)) + \" samples\")\n", - " \n", - " def apply_stemming_and_lemmatization(self):\n", - " stemmer = SnowballStemmer(\"english\")\n", - " for key in self.plain_text.keys():\n", - " stemmed_sent = []\n", - " for word in self.plain_text[key].split(\" \"):\n", - " word_stemmed = stemmer.stem(word)\n", - " stemmed_sent.append(word_stemmed)\n", - " stemmed_sent = (\" \").join(stemmed_sent)\n", - " self.plain_text[key] = stemmed_sent\n", - " \n", - " lemmatizer = WordNetLemmatizer()\n", - " for key in self.plain_text.keys():\n", - " lemmatized_sent = []\n", - " sent_pos = pos_tag(word_tokenize(self.plain_text[key]))\n", - " for word in sent_pos:\n", - " wordnet_pos = get_wordnet_pos(word[1].lower())\n", - " word_lemmatized = lemmatizer.lemmatize(word[0], pos=wordnet_pos)\n", - " lemmatized_sent.append(word_lemmatized)\n", - " lemmatized_sent = (\" \").join(lemmatized_sent)\n", - " self.plain_text[key] = lemmatized_sent\n", - " \n", - " def generate_emoji_count_and_weights(self):\n", - " self.emoji_count = {}\n", - " for e_list in self.emojis:\n", - " for e in set(e_list):\n", - " if e not in self.emoji_count:\n", - " self.emoji_count[e] = 0\n", - " self.emoji_count[e] += 1\n", - " \n", - " emoji_sum = sum([self.emoji_count[e] for e in self.emoji_count])\n", - "\n", - " self.emoji_weights = {}\n", - " for e in self.emoji_count:\n", - " # tfidf for emojis\n", - " self.emoji_weights[e] = np.log((emoji_sum / self.emoji_count[e]))\n", - "\n", - " weights_sum= sum([self.emoji_weights[x] for x in self.emoji_weights])\n", - "\n", - " # normalize:\n", - " for e in self.emoji_weights:\n", - " self.emoji_weights[e] = self.emoji_weights[e] / weights_sum\n", - "\n", - " self.emoji_weights['X'] = 0 # dummy values\n", - " self.emoji_count['X'] = 0\n", - " \n", - " def get_emoji_count(self):\n", - " sorted_emoji_count = list(reversed(sorted(self.emoji_count.items(), key=operator.itemgetter(1))))\n", - " #display(sorted_emoji_count)\n", - " return sorted_emoji_count\n", - " \n", - " def filter_by_top_emojis(self,n_top = 20):\n", - " self.top_emojis = [x[0] for x in self.get_emoji_count()[:n_top]]\n", - " in_top = [sentiment_vector_to_emoji(x) in self.top_emojis for x in self.labels]\n", - " self.labels = self.labels[in_top]\n", - " self.plain_text = self.plain_text[in_top]\n", - " self.emojis = self.emojis[in_top]\n", - " print(\"remaining samples after top emoji filtering: \", len(self.labels))\n", - " \n", - " def create_train_test_split(self, split = 0.1, random_state = 4222):\n", - " self.X, self.Xt, self.y, self.yt = train_test_split(self.plain_text, self.labels, test_size=split, random_state=random_state)\n", - "\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "* the pipeline manager saves and stores sklearn pipelines. Keras models are handled differently, so the have to be named explicitly during save and load operations" - ] - }, - { - "cell_type": "code", - "execution_count": 21, - "metadata": {}, - "outputs": [], - "source": [ - "class pipeline_manager(object):\n", - " @staticmethod\n", - " def load_pipeline_from_files(file_prefix:str, keras_models = [], all_models = []):\n", - " pm = pipeline_manager(keras_models=keras_models)\n", - " pm.load(file_prefix, all_models)\n", - " return pm\n", - " \n", - " @staticmethod\n", - " def create_keras_pipeline_with_vectorizer(vectorizer, layers, sdm:sample_data_manager):\n", - " '''\n", - " creates pipeline with vectorizer and keras classifier\n", - " '''\n", - " from keras.models import Sequential\n", - " from keras.layers import Dense\n", - " \n", - " if sdm.X is None:\n", - " sdm.create_train_test_split()\n", - " \n", - " vec_train = vectorizer.fit_transform(sdm.X)\n", - " vec_test = vectorizer.transform(sdm.Xt)\n", - " # creating keras model:\n", - " model=Sequential()\n", - " \n", - " keras_layers = []\n", - " first_layer = True\n", - " for layer in layers:\n", - " if first_layer:\n", - " model.add(Dense(units=layer[0], activation=layer[1], input_dim=vectorizer.transform([\" \"])[0]._shape[1]))\n", - " first_layer = False\n", - " else:\n", - " model.add(Dense(units=layer[0], activation=layer[1]))\n", - " \n", - " model.compile(loss='mean_squared_error',\n", - " optimizer='adam')\n", - " \n", - " pipeline = Pipeline([\n", - " ('vectorizer',vectorizer),\n", - " ('keras_model', model)\n", - " ])\n", - " \n", - " return pipeline_manager(pipeline=pipeline, keras_models=['keras_model'])\n", - " \n", - " @staticmethod\n", - " def create_pipeline_with_classifier_and_vectorizer(vectorizer, classifier, sdm:sample_data_manager = None):\n", - " '''\n", - " creates a pipeline with vectorizer and classifier for non keras classifiers\n", - " if sample data manager is given, the vectorizer will be also fitted!\n", - " '''\n", - " if sdm is not None:\n", - " if sdm.X is None:\n", - " sdm.create_train_test_split()\n", - "\n", - " vec_train = vectorizer.fit_transform(sdm.X)\n", - " vec_test = vectorizer.transform(sdm.Xt)\n", - " \n", - " pipeline = Pipeline([\n", - " ('vectorizer',vectorizer),\n", - " ('classifier', classifier)\n", - " ])\n", - " \n", - " return pipeline_manager(pipeline=pipeline, keras_models=[])\n", - " \n", - " def __init__(self, pipeline = None, keras_models = []):\n", - " self.pipeline = pipeline\n", - " self.additional_objects = {}\n", - " self.keras_models = keras_models\n", - " \n", - " def save(self, prefix:str):\n", - " print(self.keras_models)\n", - " # doing this like explained here: https://stackoverflow.com/a/43415459\n", - " for step in self.pipeline.named_steps:\n", - " if step in self.keras_models:\n", - " self.pipeline.named_steps[step].model.save(prefix + \".\" + step)\n", - " else:\n", - " joblib.dump(self.pipeline.named_steps[step], prefix + \".\" + str(step))\n", - " \n", - " load_command = \"pipeline_manager.load_pipeline_from_files( '\"\n", - " load_command += prefix + \"', \" + str(self.keras_models) + \", \"\n", - " load_command += str(list(self.pipeline.named_steps.keys())) + \")\"\n", - " \n", - " import __main__ as main\n", - " if not hasattr(main, '__file__'):\n", - " display(\"saved pipeline. It can be loaded the following way:\")\n", - " display(Markdown(\"> ```\\n\"+load_command+\"\\n```\"))\n", - " else:\n", - " print(\"saved pipeline. It can be loaded the following way:\")\n", - " print(load_command)\n", - " \n", - " \n", - " def load(self, prefix:str, models = []):\n", - " self.pipeline = None\n", - " model_list = []\n", - " for model in models:\n", - " if model in self.keras_models:\n", - " model_list.append((model, load_model(prefix + \".\" + model)))\n", - " else:\n", - " model_list.append((model, joblib.load(prefix+\".\" + model)))\n", - " self.pipeline = Pipeline(model_list)\n", - " \n", - " def fit(self,X,y):\n", - " self.pipeline.fit(X,y)\n", - " \n", - " def predict(self,X):\n", - " return self.pipeline.predict(X)\n", - " " - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "* the trainer class passes Data from the sample manager to the pipeline manager" - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "metadata": {}, - "outputs": [], - "source": [ - "class trainer(object):\n", - " def __init__(self, sdm:sample_data_manager, pm:pipeline_manager):\n", - " self.sdm = sdm\n", - " self.pm = pm\n", - " \n", - " def fit(self, max_size=10000, disabled_fit_steps=['vectorizer']):\n", - " # TODO: make batch fitting available here (eg: continous waiting for data and fitting them)\n", - " if self.sdm.X is None:\n", - " self.sdm.create_train_test_split()\n", - " disabled_fits = {}\n", - " disabled_fit_transforms = {}\n", - " \n", - " named_steps = self.pm.pipeline.named_steps\n", - " \n", - " for s in disabled_fit_steps:\n", - " # now it gets a little bit dirty:\n", - " # replace fit functions we don't want to call again (e.g. for vectorizers)\n", - " disabled_fits[s] = named_steps[s].fit\n", - " disabled_fit_transforms[s] = named_steps[s].fit_transform\n", - " named_steps[s].fit = lambda self, X, y=None: self\n", - " named_steps[s].fit_transform = named_steps[s].transform\n", - " \n", - " self.pm.fit(X = self.sdm.X[:max_size], y = self.sdm.y[:max_size])\n", - " \n", - " # restore replaced fit functions:\n", - " for s in disabled_fit_steps:\n", - " named_steps[s].fit = disabled_fits[s]\n", - " named_steps[s].fit_transform = disabled_fit_transforms[s]\n", - " \n", - " def test(self):\n", - " '''\n", - " return: prediction:list, teacher:list\n", - " '''\n", - " if self.sdm.X is None:\n", - " self.sdm.create_train_test_split()\n", - " return self.pm.predict(self.sdm.Xt), self.sdm.yt\n", - "\n", - " " - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "----\n", - "## Train" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "* when in notebook environment: run the stuff below:" - ] - }, - { - "cell_type": "code", - "execution_count": 12, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "reading file: ./data_en/2017-11-01.json...\n", - "imported 33368 samples\n", - "remaining samples after top emoji filtering: 26197\n" - ] - } - ], - "source": [ - "import __main__ as main\n", - "if not hasattr(main, '__file__'):\n", - " # we are in an interactive environment (probably in jupyter)\n", - " # load data:\n", - " sdm = sample_data_manager.generate_and_read(path=\"./data_en/\", n_top_emojis=20, file_range=range(1))\n", - " " - ] - }, - { - "cell_type": "code", - "execution_count": 22, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Epoch 1/1\n", - "100/100 [==============================] - 3s 27ms/step - loss: 0.1225\n" - ] - } - ], - "source": [ - " #pm = pipeline_manager.create_keras_pipeline_with_vectorizer(vectorizer=TfidfVectorizer(stop_words='english'),\n", - " # layers=[(10000, 'relu'),(5000, 'relu'),(2500, 'relu'),(y1[0].shape[0],None)], sdm=sdm)\n", - " pm = pipeline_manager.create_keras_pipeline_with_vectorizer(vectorizer=TfidfVectorizer(stop_words='english'),\n", - " layers=[(2500, 'relu'),(3,None)], sdm=sdm)\n", - " \n", - " tr = trainer(sdm=sdm, pm=pm)\n", - " tr.fit(100)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "----\n", - "## save classifier" - ] - }, - { - "cell_type": "code", - "execution_count": 23, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "['keras_model']\n" - ] - }, - { - "data": { - "text/plain": [ - "'saved pipeline. It can be loaded the following way:'" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": { - "text/markdown": [ - "> ```\n", - "pipeline_manager.load_pipeline_from_files( 'custom_classifier', ['keras_model'], ['vectorizer', 'keras_model'])\n", - "```" - ], - "text/plain": [ - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "pm.save('custom_classifier')" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "----\n", - "## Prediction" - ] - }, - { - "cell_type": "code", - "execution_count": 33, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "array([[0.15801723, 0.11859037, 0.10975348],\n", - " [0.17035495, 0.10913695, 0.09354854],\n", - " [0.11777218, 0.06569621, 0.06620223],\n", - " ...,\n", - " [0.14746301, 0.09480572, 0.08052498],\n", - " [0.15932804, 0.11895895, 0.10343507],\n", - " [0.17135939, 0.1061406 , 0.09402546]], dtype=float32)" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": { - "text/plain": [ - "array([[0.46813021, 0.24716181, 0.28470797],\n", - " [0.46813021, 0.24716181, 0.28470797],\n", - " [0.70401758, 0.05932203, 0.23666039],\n", - " ...,\n", - " [0.46813021, 0.24716181, 0.28470797],\n", - " [0.46813021, 0.24716181, 0.28470797],\n", - " [0.46813021, 0.24716181, 0.28470797]])" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "prediction variance: 0.0006294687\n", - "teacher variance: 0.03341702104519965\n" - ] - }, - { - "data": { - "text/html": [ - "
\n", - "\n", - "\n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - "
predictpredicted_sentimentteacherteacher_sentimenttext
35671😒[0.15801723301410675, 0.11859036982059479, 0.1...πŸ˜‚[0.46813021474490496, 0.24716181096977158, 0.2...i feel like i care so much more in everi situat
25683😒[0.1703549474477768, 0.10913695394992828, 0.09...πŸ˜‚[0.46813021474490496, 0.24716181096977158, 0.2...i did not meat to add that 2 there ... hav see...
8985😒[0.1177721843123436, 0.06569620966911316, 0.06...😊[0.7040175768989329, 0.059322033898305086, 0.2...never…
5410😒[0.18182337284088135, 0.12382747232913971, 0.0...πŸ˜‚[0.46813021474490496, 0.24716181096977158, 0.2...lmao on me ! ! ! wtf wa he suppos to say
62611😒[0.1786666363477707, 0.11502400785684586, 0.10...😊[0.7040175768989329, 0.059322033898305086, 0.2...this dude alway help me get through my school ...
\n", - "
" - ], - "text/plain": [ - " predict predicted_sentiment teacher \\\n", - "35671 😒 [0.15801723301410675, 0.11859036982059479, 0.1... πŸ˜‚ \n", - "25683 😒 [0.1703549474477768, 0.10913695394992828, 0.09... πŸ˜‚ \n", - "8985 😒 [0.1177721843123436, 0.06569620966911316, 0.06... 😊 \n", - "5410 😒 [0.18182337284088135, 0.12382747232913971, 0.0... πŸ˜‚ \n", - "62611 😒 [0.1786666363477707, 0.11502400785684586, 0.10... 😊 \n", - "\n", - " teacher_sentiment \\\n", - "35671 [0.46813021474490496, 0.24716181096977158, 0.2... \n", - "25683 [0.46813021474490496, 0.24716181096977158, 0.2... \n", - "8985 [0.7040175768989329, 0.059322033898305086, 0.2... \n", - "5410 [0.46813021474490496, 0.24716181096977158, 0.2... \n", - "62611 [0.7040175768989329, 0.059322033898305086, 0.2... \n", - "\n", - " text \n", - "35671 i feel like i care so much more in everi situat \n", - "25683 i did not meat to add that 2 there ... hav see... \n", - "8985 never… \n", - "5410 lmao on me ! ! ! wtf wa he suppos to say \n", - "62611 this dude alway help me get through my school ... " - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Mean Squared Error: [0.14140389 0.04240099 0.02944344]\n", - "Variance teacher: [0.02183094 0.02513847 0.00285735]\n", - "Variance prediction: [0.00053908 0.00024232 0.00021658]\n" - ] - } - ], - "source": [ - "import __main__ as main\n", - "if not hasattr(main, '__file__'):\n", - " pred, teacher = tr.test()\n", - " \n", - " display(pred)\n", - " display(teacher)\n", - " \n", - " print('prediction variance: ', np.linalg.norm(np.var(pred, axis=0)))\n", - " print('teacher variance: ', np.linalg.norm(np.var(teacher, axis=0)))\n", - " \n", - " # build a dataframe to visualize test results:\n", - " testlist = pd.DataFrame({'text': sdm.Xt, \n", - " 'teacher': sent2emoji(sdm.yt),\n", - " 'teacher_sentiment': sdm.yt.tolist(),\n", - " 'predict': sent2emoji(pred, custom_target_emojis=sdm.top_emojis),\n", - " 'predicted_sentiment': pred.tolist()})\n", - " # display:\n", - " display(testlist.head())\n", - " \n", - " # mean squared error:\n", - " teacher_sentiments = np.array([sample[1]['teacher_sentiment'] for sample in testlist.iterrows()])\n", - " predicted_sentiments = np.array([sample[1]['predicted_sentiment'] for sample in testlist.iterrows()])\n", - "\n", - " mean_squared_error = ((teacher_sentiments - predicted_sentiments)**2).mean(axis=0)\n", - " print(\"Mean Squared Error: \", mean_squared_error)\n", - " print(\"Variance teacher: \", np.var(teacher_sentiments, axis=0))\n", - " print(\"Variance prediction: \", np.var(predicted_sentiments, axis=0))\n", - " \n", - " # save to csv:\n", - " testlist.to_csv('test.csv')" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "----\n", - "## Load classifier" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "import __main__ as main\n", - "if not hasattr(main, '__file__'):\n", - " try:\n", - " pm\n", - " except NameError:\n", - " pass\n", - " else:\n", - " del pm # delete existing pipeline manager if ther is one\n", - "\n", - " pm = pipeline_manager.load_pipeline_from_files( 'custom_classifier', ['keras_model'], ['vectorizer', 'keras_model'])\n", - " lookup_emojis = [#'πŸ˜‚',\n", - " '😭',\n", - " '😍',\n", - " '😩',\n", - " '😊',\n", - " '😘',\n", - " 'πŸ™',\n", - " 'πŸ™Œ',\n", - " 'πŸ˜‰',\n", - " '😁',\n", - " 'πŸ˜…',\n", - " '😎',\n", - " '😒',\n", - " 'πŸ˜’',\n", - " '😏',\n", - " '😌',\n", - " 'πŸ˜”',\n", - " 'πŸ˜‹',\n", - " 'πŸ˜€',\n", - " '😀']\n", - " out = widgets.Output()\n", - "\n", - " t = widgets.Text()\n", - " b = widgets.Button(\n", - " description='get emoji',\n", - " disabled=False,\n", - " button_style='', # 'success', 'info', 'warning', 'danger' or ''\n", - " tooltip='Click me',\n", - " icon='check'\n", - " )\n", - "\n", - "\n", - "\n", - " def handle_submit(sender):\n", - " with out:\n", - " clear_output()\n", - " with out:\n", - " pred = pm.predict([t.value])\n", - "\n", - " display(Markdown(\"# Predicted Emoji \" + str(sent2emoji(pred, lookup_emojis)[0])))\n", - " display(Markdown(\"# Sentiment Vector: $$ \\pmatrix{\" + str(pred[0,0]) +\n", - " \"\\\\\\\\\" + str(pred[0,1]) + \"\\\\\\\\\" + str(pred[0,2]) + \"}$$\"))\n", - "\n", - " b.on_click(handle_submit)\n", - "\n", - " display(t)\n", - " display(widgets.VBox([b, out])) " - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# simple twitter approach\n", - "*for learning emoji usage by single (in the meaning of unconnected) twitter messages*" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## loading train data" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "* Download preprocessed raw data:\n", - " * [here](https://the-cake-is-a-lie.net/nextcloud/index.php/s/MmXFYj6mGoMQoJN) for english\n", - " * [here](https://the-cake-is-a-lie.net/nextcloud/index.php/s/HgqpQ6rFadtWSAt) for german" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [], - "source": [ - "data_root_folder = \"./data_en/\" # i created a symlink here" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "* get all json files in `data_root_folder`" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": {}, - "outputs": [], - "source": [ - "json_files = sorted(glob.glob(data_root_folder + \"/*.json\"))" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "----" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "* so far, only load the first file" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
\n", - "\n", - "\n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - "
EMOJIHASHTAGSLINKED_USERdatetimeidlangpersonreply_totext
0[πŸ”₯, πŸ‘][][]2017-11-01 13:29:00925716304635547600en31507978NaNfashionbombdaily's photo <EMOJI><EMOJI>🏼
1[🀦][][]2017-11-01 13:29:00925716304664911900en231994649NaNIt’s scary how on point my horoscope be <EMOJI...
2[πŸ˜„][][]2017-11-01 13:29:03925716317214089200en2592765104NaNWoooaaaahhh <EMOJI>
3[πŸ“·][][]2017-11-01 13:29:04925716321416949800en278737933NaN<EMOJI> vivalcli: Portraits by Zhao Guojing an...
4[😩, 😩][][@hiphopphiIes]2017-11-01 13:29:06925716329801310200en8245862536349819009.257162e+17<USER> i wanna know too<EMOJI><EMOJI>
5[😭, πŸ’“][][@WizMommma]2017-11-01 13:29:02925716313019965400en15819538149.257088e+17<USER> veda was yoda too <EMOJI><EMOJI>
6[πŸ˜‚][][]2017-11-01 13:29:05925716325607133200en1001999683NaNI’m less stressed about turning 30 now <EMOJI>...
7[πŸ’―][][]2017-11-01 13:29:07925716334008082400en745222369183043600NaNFull charged. <EMOJI>
8[πŸ™„][][@SeaDimon, @lsarsour]2017-11-01 13:29:09925716342401052700en7985571552175391009.257147e+17<USER> That’s part of the problem, (they) <USE...
9[😟, πŸ˜₯, 😒][][@Ian_khetye]2017-11-01 13:29:10925716346570240000en7443960391264215009.250629e+17<USER> got me emotional there<EMOJI><EMOJI><EM...
10[🌻][][]2017-11-01 13:29:13925716359182520300en7214900101182054008.965900e+17back to the yellow <EMOJI>
11[🍁, 🌺, πŸ‚][][@Dimafadma]2017-11-01 13:29:15925716367558545400en5205367239.257159e+17<USER> Happy month to you and your loved ones ...
12[πŸƒ][#mortdale, #partofthefamily, #gorgeousboy][]2017-11-01 13:29:16925716371735900200en850852815941517300NaNMaxx and Patricia. Family hangs at For Good He...
13[πŸ’­, 🀦][][]2017-11-01 13:29:20925716388513230800en914145041588867100NaNI need to STOP beating myself up with my thoug...
14[😍, 😘][#7YearsOfKMH2][]2017-11-01 13:29:20925716388525645800en2425405622NaNCutest Son <EMOJI>Roll no. 31 <EMOJI> <HASHTAG>
15[😜][][]2017-11-01 13:29:22925716396931240000en4614871873NaNBy the summer I should have everything up and ...
16[πŸ˜‚, πŸ”₯][][]2017-11-01 13:29:23925716401125331000en2831608345NaNI know my English is not that good but that do...
17[πŸ’•][][@yungbabytate]2017-11-01 13:29:23925716401133948900en7885719746330092009.255778e+17<USER> I <EMOJI> u mama
18[πŸ˜‰][][@cmckenney]2017-11-01 13:29:23925716401125544000en2183078029.257115e+17<USER> That picture was NOT taken this morning...
19[πŸ‘…][#footfetishnation][]2017-11-01 13:29:25925716409489002500en885261166146179100NaNWelcome to <HASHTAG> <EMOJI>
20[πŸ‘Œ, πŸ™‚][][]2017-11-01 13:29:25925716409497272300en831437760833609700NaNAwkward <EMOJI><EMOJI>
21[πŸ€—, πŸ“Ί][][]2017-11-01 13:29:26925716413699854300en231664542NaNback at it with supernatural <EMOJI><EMOJI>
22[πŸ’―][][]2017-11-01 13:29:26925716413679009800en3196847035NaNOne of the best things I've learned was to sto...
23[πŸ‘…, πŸ’¦, πŸ‘][][@ctrlpurp]2017-11-01 13:29:29925716426278735900en9184928583526359009.257161e+17<USER> Can I taste?<EMOJI><EMOJI><EMOJI>
24[πŸ’”][][@saunders_court1]2017-11-01 13:29:30925716430473039900en34711873379.257163e+17<USER> we miss you ☹️<EMOJI>
25[🀐, 🀐, 🀐][][]2017-11-01 13:29:31925716434667184100en780060488600199200NaNActually my bias in WJSN are Eunseo &amp; Bona...
26[😴][][]2017-11-01 13:29:32925716438853345300en388380690NaNI so cannot be bothered with the rest of the d...
27[πŸ˜‚][][@xxxtentacion]2017-11-01 13:29:35925716451457163300en899320696869974000NaN<USER> 2lit4life<EMOJI>
28[πŸ˜‚, πŸ™„][][]2017-11-01 13:29:35925716451461357600en784790670NaNI’m not stop saying that!<EMOJI><EMOJI>
29[πŸŽ‰, πŸŽ‚, 🎈, 🎊, 🎁, πŸ’œ][][@justinerooney_]2017-11-01 13:29:37925716459828936700en3051266655NaN<USER> HAPPY BIHDAY <EMOJI><EMOJI><EMOJI><EMOJ...
..............................
68703[πŸ˜•][#halloweencostumes][]2017-11-01 07:23:04925624214522036200en1672876458NaN<HASHTAG> this one falls under the weird crazy...
68704[πŸ˜‚, 😩][][]2017-11-01 07:23:05925624218682777600en382473866NaNI'm not allowed to have chocolates yet, then I...
68705[πŸ˜‚][#MUFC][]2017-11-01 07:23:06925624222889766900en893145405457911800NaNManchester United manager Mourinho slams 'spec...
68706[πŸ’–][][]2017-11-01 07:23:07925624227088121900en240378516NaN<EMOJI> en Bushwhick
68707[πŸŒ†, πŸ‘‰, πŸš–, πŸ“ž][#BurkeCentre][]2017-11-01 07:23:18925624273237983200en784620573209002000NaN: <HASHTAG> <EMOJI> <EMOJI><EMOJI> For Taxi <E...
68708[😁][][@mychosliaheart, @BarrettoJulia, @iamjoshuaga...2017-11-01 07:23:18925624273212805100en1709981879.254136e+17<USER> <USER> <USER> Look, Mammeh and Daddeh! ...
68709[πŸ˜‹][][]2017-11-01 07:23:24925624298395533300en1348667816NaNLife is so good with you <EMOJI>
68710[πŸ‘Œ, πŸŽƒ, 😘][#portlandoregon, #portlandhalloween, #carrie…][]2017-11-01 07:23:24925624298378801200en722481645765300200NaNHappy Halloween! <EMOJI>🏽<EMOJI><EMOJI> <HASHT...
68711[πŸ‘][][@8limbsbondi...]2017-11-01 07:23:26925624306779897900en2443251500NaNSome work on the ropes in today’s boxing class...
68712[😭][][]2017-11-01 07:23:27925624310974136300en2406186390NaNScotty and Kristen’s halloween costumes <EMOJI>
68713[πŸ˜‚][][@rfrandrea, @AdaaanAndyyy]2017-11-01 07:23:30925624323557146600en10394481499.256150e+17<USER> <USER> May pre-month celebration sis <E...
68714[😭, 😭, πŸ’˜][][@peachshua1230]2017-11-01 07:23:31925624327755591700en8450855445896724009.256009e+17<USER> Awww <EMOJI> Ajsksjdjd im smiling like ...
68715[😫, βœ‹][][]2017-11-01 07:23:34925624340342812700en924752524871131100NaNI hate when I send a text or snap n I'm so anx...
68716[πŸ˜‚, πŸ™][][]2017-11-01 07:23:34925624340355280900en419493819NaNThe answer is no I have no plans and I never l...
68717[😭][][@BeachBoy_Gab]2017-11-01 07:23:34925624340346937300en24577459529.256219e+17<USER> LMAOOO I'm so proud <EMOJI>
68718[😒, πŸ’”][][]2017-11-01 07:23:34925624340338507800en1955767531NaNMy cousin/little sister is leaving to San Fran...
68719[πŸ™ƒ][][]2017-11-01 07:23:35925624344524361700en796490344581898200NaNCan't be alone w my thoughts tonight so just g...
68720[πŸ˜‚, πŸ˜‚, πŸ˜‚, πŸ˜‚, πŸ˜‚][][]2017-11-01 07:23:36925624348710285300en907808317124177900NaN<EMOJI><EMOJI><EMOJI><EMOJI><EMOJI> ambot!!!
68721[πŸ‘Œ, πŸ‘Š, πŸ™Œ][][]2017-11-01 07:23:37925624352929910800en262162415NaN<EMOJI>🏽<EMOJI>🏽 1st of the month!!Happy 1st o...
68722[😴][#WednesdayWisdom][]2017-11-01 07:23:41925624369715515400en574882525NaN<HASHTAG> ... stay in bed <EMOJI>
68723[πŸ˜‚, πŸ˜‚, πŸ˜‚, πŸ˜‚][][@Louis_Tomlinson, @NiallOfficial]2017-11-01 07:23:44925624382269124600en5561751739.254038e+17<USER> <USER> THIS IS GOLD. GOLD. <EMOJI><EMOJ...
68724[πŸ˜€][][]2017-11-01 07:23:45925624386455031800en1610265588NaN<EMOJI> thank you for the kind compliment
68725[😎][][]2017-11-01 07:23:45925624386454937600en4760724450NaNEnjoyed the silence <EMOJI>
68726[✨][][]2017-11-01 07:23:46925624390657572900en882858115636514800NaNOS: Spiderman Homecoming <EMOJI>
68727[🀷][][]2017-11-01 07:23:46925624390682849300en188129628NaNGo to hell <EMOJI>πŸ½β€β™€οΈ
68728[😘][][]2017-11-01 07:23:46925624390666129400en2473135939NaNThank you Yomi! <EMOJI>
68729[πŸ˜‚][][@discopiggu]2017-11-01 07:23:46925624390670106600en23735842099.256241e+17<USER> Lol. Just enjoy the stars. Music Kidhar...
68730[πŸ™][#NYCStrong][]2017-11-01 07:23:50925624407459971100en181689756NaNThoughts and prayers for NY<EMOJI>🏻 <HASHTAG>
68731[πŸ’][#GreatSuccess][@BrianyH]2017-11-01 07:23:50925624407460057100en601607889.254610e+17<USER> I searched COCK, PENIS, SHLONG, WINKY, ...
68732[πŸƒ, 🌻, 🌻, πŸƒ, πŸƒ, πŸ’, πŸ’, πŸƒ, πŸ™‹][][@amitbarman520]2017-11-01 07:23:53925624420022063100en37922907259.256215e+17<USER> Thank you so much<EMOJI><EMOJI><EMOJI><...
\n", - "

68733 rows Γ— 9 columns

\n", - "
" - ], - "text/plain": [ - " EMOJI \\\n", - "0 [πŸ”₯, πŸ‘] \n", - "1 [🀦] \n", - "2 [πŸ˜„] \n", - "3 [πŸ“·] \n", - "4 [😩, 😩] \n", - "5 [😭, πŸ’“] \n", - "6 [πŸ˜‚] \n", - "7 [πŸ’―] \n", - "8 [πŸ™„] \n", - "9 [😟, πŸ˜₯, 😒] \n", - "10 [🌻] \n", - "11 [🍁, 🌺, πŸ‚] \n", - "12 [πŸƒ] \n", - "13 [πŸ’­, 🀦] \n", - "14 [😍, 😘] \n", - "15 [😜] \n", - "16 [πŸ˜‚, πŸ”₯] \n", - "17 [πŸ’•] \n", - "18 [πŸ˜‰] \n", - "19 [πŸ‘…] \n", - "20 [πŸ‘Œ, πŸ™‚] \n", - "21 [πŸ€—, πŸ“Ί] \n", - "22 [πŸ’―] \n", - "23 [πŸ‘…, πŸ’¦, πŸ‘] \n", - "24 [πŸ’”] \n", - "25 [🀐, 🀐, 🀐] \n", - "26 [😴] \n", - "27 [πŸ˜‚] \n", - "28 [πŸ˜‚, πŸ™„] \n", - "29 [πŸŽ‰, πŸŽ‚, 🎈, 🎊, 🎁, πŸ’œ] \n", - "... ... \n", - "68703 [πŸ˜•] \n", - "68704 [πŸ˜‚, 😩] \n", - "68705 [πŸ˜‚] \n", - "68706 [πŸ’–] \n", - "68707 [πŸŒ†, πŸ‘‰, πŸš–, πŸ“ž] \n", - "68708 [😁] \n", - "68709 [πŸ˜‹] \n", - "68710 [πŸ‘Œ, πŸŽƒ, 😘] \n", - "68711 [πŸ‘] \n", - "68712 [😭] \n", - "68713 [πŸ˜‚] \n", - "68714 [😭, 😭, πŸ’˜] \n", - "68715 [😫, βœ‹] \n", - "68716 [πŸ˜‚, πŸ™] \n", - "68717 [😭] \n", - "68718 [😒, πŸ’”] \n", - "68719 [πŸ™ƒ] \n", - "68720 [πŸ˜‚, πŸ˜‚, πŸ˜‚, πŸ˜‚, πŸ˜‚] \n", - "68721 [πŸ‘Œ, πŸ‘Š, πŸ™Œ] \n", - "68722 [😴] \n", - "68723 [πŸ˜‚, πŸ˜‚, πŸ˜‚, πŸ˜‚] \n", - "68724 [πŸ˜€] \n", - "68725 [😎] \n", - "68726 [✨] \n", - "68727 [🀷] \n", - "68728 [😘] \n", - "68729 [πŸ˜‚] \n", - "68730 [πŸ™] \n", - "68731 [πŸ’] \n", - "68732 [πŸƒ, 🌻, 🌻, πŸƒ, πŸƒ, πŸ’, πŸ’, πŸƒ, πŸ™‹] \n", - "\n", - " HASHTAGS \\\n", - "0 [] \n", - "1 [] \n", - "2 [] \n", - "3 [] \n", - "4 [] \n", - "5 [] \n", - "6 [] \n", - "7 [] \n", - "8 [] \n", - "9 [] \n", - "10 [] \n", - "11 [] \n", - "12 [#mortdale, #partofthefamily, #gorgeousboy] \n", - "13 [] \n", - "14 [#7YearsOfKMH2] \n", - "15 [] \n", - "16 [] \n", - "17 [] \n", - "18 [] \n", - "19 [#footfetishnation] \n", - "20 [] \n", - "21 [] \n", - "22 [] \n", - "23 [] \n", - "24 [] \n", - "25 [] \n", - "26 [] \n", - "27 [] \n", - "28 [] \n", - "29 [] \n", - "... ... \n", - "68703 [#halloweencostumes] \n", - "68704 [] \n", - "68705 [#MUFC] \n", - "68706 [] \n", - "68707 [#BurkeCentre] \n", - "68708 [] \n", - "68709 [] \n", - "68710 [#portlandoregon, #portlandhalloween, #carrie…] \n", - "68711 [] \n", - "68712 [] \n", - "68713 [] \n", - "68714 [] \n", - "68715 [] \n", - "68716 [] \n", - "68717 [] \n", - "68718 [] \n", - "68719 [] \n", - "68720 [] \n", - "68721 [] \n", - "68722 [#WednesdayWisdom] \n", - "68723 [] \n", - "68724 [] \n", - "68725 [] \n", - "68726 [] \n", - "68727 [] \n", - "68728 [] \n", - "68729 [] \n", - "68730 [#NYCStrong] \n", - "68731 [#GreatSuccess] \n", - "68732 [] \n", - "\n", - " LINKED_USER datetime \\\n", - "0 [] 2017-11-01 13:29:00 \n", - "1 [] 2017-11-01 13:29:00 \n", - "2 [] 2017-11-01 13:29:03 \n", - "3 [] 2017-11-01 13:29:04 \n", - "4 [@hiphopphiIes] 2017-11-01 13:29:06 \n", - "5 [@WizMommma] 2017-11-01 13:29:02 \n", - "6 [] 2017-11-01 13:29:05 \n", - "7 [] 2017-11-01 13:29:07 \n", - "8 [@SeaDimon, @lsarsour] 2017-11-01 13:29:09 \n", - "9 [@Ian_khetye] 2017-11-01 13:29:10 \n", - "10 [] 2017-11-01 13:29:13 \n", - "11 [@Dimafadma] 2017-11-01 13:29:15 \n", - "12 [] 2017-11-01 13:29:16 \n", - "13 [] 2017-11-01 13:29:20 \n", - "14 [] 2017-11-01 13:29:20 \n", - "15 [] 2017-11-01 13:29:22 \n", - "16 [] 2017-11-01 13:29:23 \n", - "17 [@yungbabytate] 2017-11-01 13:29:23 \n", - "18 [@cmckenney] 2017-11-01 13:29:23 \n", - "19 [] 2017-11-01 13:29:25 \n", - "20 [] 2017-11-01 13:29:25 \n", - "21 [] 2017-11-01 13:29:26 \n", - "22 [] 2017-11-01 13:29:26 \n", - "23 [@ctrlpurp] 2017-11-01 13:29:29 \n", - "24 [@saunders_court1] 2017-11-01 13:29:30 \n", - "25 [] 2017-11-01 13:29:31 \n", - "26 [] 2017-11-01 13:29:32 \n", - "27 [@xxxtentacion] 2017-11-01 13:29:35 \n", - "28 [] 2017-11-01 13:29:35 \n", - "29 [@justinerooney_] 2017-11-01 13:29:37 \n", - "... ... ... \n", - "68703 [] 2017-11-01 07:23:04 \n", - "68704 [] 2017-11-01 07:23:05 \n", - "68705 [] 2017-11-01 07:23:06 \n", - "68706 [] 2017-11-01 07:23:07 \n", - "68707 [] 2017-11-01 07:23:18 \n", - "68708 [@mychosliaheart, @BarrettoJulia, @iamjoshuaga... 2017-11-01 07:23:18 \n", - "68709 [] 2017-11-01 07:23:24 \n", - "68710 [] 2017-11-01 07:23:24 \n", - "68711 [@8limbsbondi...] 2017-11-01 07:23:26 \n", - "68712 [] 2017-11-01 07:23:27 \n", - "68713 [@rfrandrea, @AdaaanAndyyy] 2017-11-01 07:23:30 \n", - "68714 [@peachshua1230] 2017-11-01 07:23:31 \n", - "68715 [] 2017-11-01 07:23:34 \n", - "68716 [] 2017-11-01 07:23:34 \n", - "68717 [@BeachBoy_Gab] 2017-11-01 07:23:34 \n", - "68718 [] 2017-11-01 07:23:34 \n", - "68719 [] 2017-11-01 07:23:35 \n", - "68720 [] 2017-11-01 07:23:36 \n", - "68721 [] 2017-11-01 07:23:37 \n", - "68722 [] 2017-11-01 07:23:41 \n", - "68723 [@Louis_Tomlinson, @NiallOfficial] 2017-11-01 07:23:44 \n", - "68724 [] 2017-11-01 07:23:45 \n", - "68725 [] 2017-11-01 07:23:45 \n", - "68726 [] 2017-11-01 07:23:46 \n", - "68727 [] 2017-11-01 07:23:46 \n", - "68728 [] 2017-11-01 07:23:46 \n", - "68729 [@discopiggu] 2017-11-01 07:23:46 \n", - "68730 [] 2017-11-01 07:23:50 \n", - "68731 [@BrianyH] 2017-11-01 07:23:50 \n", - "68732 [@amitbarman520] 2017-11-01 07:23:53 \n", - "\n", - " id lang person reply_to \\\n", - "0 925716304635547600 en 31507978 NaN \n", - "1 925716304664911900 en 231994649 NaN \n", - "2 925716317214089200 en 2592765104 NaN \n", - "3 925716321416949800 en 278737933 NaN \n", - "4 925716329801310200 en 824586253634981900 9.257162e+17 \n", - "5 925716313019965400 en 1581953814 9.257088e+17 \n", - "6 925716325607133200 en 1001999683 NaN \n", - "7 925716334008082400 en 745222369183043600 NaN \n", - "8 925716342401052700 en 798557155217539100 9.257147e+17 \n", - "9 925716346570240000 en 744396039126421500 9.250629e+17 \n", - "10 925716359182520300 en 721490010118205400 8.965900e+17 \n", - "11 925716367558545400 en 520536723 9.257159e+17 \n", - "12 925716371735900200 en 850852815941517300 NaN \n", - "13 925716388513230800 en 914145041588867100 NaN \n", - "14 925716388525645800 en 2425405622 NaN \n", - "15 925716396931240000 en 4614871873 NaN \n", - "16 925716401125331000 en 2831608345 NaN \n", - "17 925716401133948900 en 788571974633009200 9.255778e+17 \n", - "18 925716401125544000 en 218307802 9.257115e+17 \n", - "19 925716409489002500 en 885261166146179100 NaN \n", - "20 925716409497272300 en 831437760833609700 NaN \n", - "21 925716413699854300 en 231664542 NaN \n", - "22 925716413679009800 en 3196847035 NaN \n", - "23 925716426278735900 en 918492858352635900 9.257161e+17 \n", - "24 925716430473039900 en 3471187337 9.257163e+17 \n", - "25 925716434667184100 en 780060488600199200 NaN \n", - "26 925716438853345300 en 388380690 NaN \n", - "27 925716451457163300 en 899320696869974000 NaN \n", - "28 925716451461357600 en 784790670 NaN \n", - "29 925716459828936700 en 3051266655 NaN \n", - "... ... ... ... ... \n", - "68703 925624214522036200 en 1672876458 NaN \n", - "68704 925624218682777600 en 382473866 NaN \n", - "68705 925624222889766900 en 893145405457911800 NaN \n", - "68706 925624227088121900 en 240378516 NaN \n", - "68707 925624273237983200 en 784620573209002000 NaN \n", - "68708 925624273212805100 en 170998187 9.254136e+17 \n", - "68709 925624298395533300 en 1348667816 NaN \n", - "68710 925624298378801200 en 722481645765300200 NaN \n", - "68711 925624306779897900 en 2443251500 NaN \n", - "68712 925624310974136300 en 2406186390 NaN \n", - "68713 925624323557146600 en 1039448149 9.256150e+17 \n", - "68714 925624327755591700 en 845085544589672400 9.256009e+17 \n", - "68715 925624340342812700 en 924752524871131100 NaN \n", - "68716 925624340355280900 en 419493819 NaN \n", - "68717 925624340346937300 en 2457745952 9.256219e+17 \n", - "68718 925624340338507800 en 1955767531 NaN \n", - "68719 925624344524361700 en 796490344581898200 NaN \n", - "68720 925624348710285300 en 907808317124177900 NaN \n", - "68721 925624352929910800 en 262162415 NaN \n", - "68722 925624369715515400 en 574882525 NaN \n", - "68723 925624382269124600 en 556175173 9.254038e+17 \n", - "68724 925624386455031800 en 1610265588 NaN \n", - "68725 925624386454937600 en 4760724450 NaN \n", - "68726 925624390657572900 en 882858115636514800 NaN \n", - "68727 925624390682849300 en 188129628 NaN \n", - "68728 925624390666129400 en 2473135939 NaN \n", - "68729 925624390670106600 en 2373584209 9.256241e+17 \n", - "68730 925624407459971100 en 181689756 NaN \n", - "68731 925624407460057100 en 60160788 9.254610e+17 \n", - "68732 925624420022063100 en 3792290725 9.256215e+17 \n", - "\n", - " text \n", - "0 fashionbombdaily's photo 🏼 \n", - "1 It’s scary how on point my horoscope be \n", - "3 vivalcli: Portraits by Zhao Guojing an... \n", - "4 i wanna know too \n", - "5 veda was yoda too \n", - "6 I’m less stressed about turning 30 now ... \n", - "7 Full charged. \n", - "8 That’s part of the problem, (they) got me emotional there \n", - "11 Happy month to you and your loved ones ... \n", - "12 Maxx and Patricia. Family hangs at For Good He... \n", - "13 I need to STOP beating myself up with my thoug... \n", - "14 Cutest Son Roll no. 31 \n", - "15 By the summer I should have everything up and ... \n", - "16 I know my English is not that good but that do... \n", - "17 I u mama \n", - "18 That picture was NOT taken this morning... \n", - "19 Welcome to \n", - "20 Awkward \n", - "21 back at it with supernatural \n", - "22 One of the best things I've learned was to sto... \n", - "23 Can I taste? \n", - "24 we miss you ☹️ \n", - "25 Actually my bias in WJSN are Eunseo & Bona... \n", - "26 I so cannot be bothered with the rest of the d... \n", - "27 2lit4life \n", - "28 I’m not stop saying that! \n", - "29 HAPPY BIHDAY this one falls under the weird crazy... \n", - "68704 I'm not allowed to have chocolates yet, then I... \n", - "68705 Manchester United manager Mourinho slams 'spec... \n", - "68706 en Bushwhick \n", - "68707 : For Taxi Look, Mammeh and Daddeh! ... \n", - "68709 Life is so good with you \n", - "68710 Happy Halloween! 🏽 \n", - "68713 May pre-month celebration sis Awww Ajsksjdjd im smiling like ... \n", - "68715 I hate when I send a text or snap n I'm so anx... \n", - "68716 The answer is no I have no plans and I never l... \n", - "68717 LMAOOO I'm so proud \n", - "68718 My cousin/little sister is leaving to San Fran... \n", - "68719 Can't be alone w my thoughts tonight so just g... \n", - "68720 ambot!!! \n", - "68721 🏽🏽 1st of the month!!Happy 1st o... \n", - "68722 ... stay in bed \n", - "68723 THIS IS GOLD. GOLD. thank you for the kind compliment \n", - "68725 Enjoyed the silence \n", - "68726 OS: Spiderman Homecoming \n", - "68727 Go to hell πŸ½β€β™€οΈ \n", - "68728 Thank you Yomi! \n", - "68729 Lol. Just enjoy the stars. Music Kidhar... \n", - "68730 Thoughts and prayers for NY🏻 \n", - "68731 I searched COCK, PENIS, SHLONG, WINKY, ... \n", - "68732 Thank you so much<... \n", - "\n", - "[68733 rows x 9 columns]" - ] - }, - "execution_count": 6, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "twitter_data = pd.read_json(json_files[0], encoding=\"utf-8\")\n", - "twitter_data" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "* extracting emojis and text" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": {}, - "outputs": [], - "source": [ - "emojis = twitter_data['EMOJI']\n", - "plain_text = twitter_data['text']" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "* make our plain text more \"plain\":\n", - " * removing the keyword `` (just for the beginning)\n", - " * removing remaining useless emojis, like skin modifier etc." - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "metadata": {}, - "outputs": [], - "source": [ - "# defining blacklist for modifier emojis:\n", - "emoji_blacklist = set([\n", - " chr(0x1F3FB),\n", - " chr(0x1F3FC),\n", - " chr(0x1F3FD),\n", - " chr(0x1F3FE),\n", - " chr(0x1F3FF),\n", - " chr(0x2642),\n", - " chr(0x2640)\n", - "])" - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "metadata": {}, - "outputs": [], - "source": [ - "# filtering them and the EMOJI keyword out:\n", - "plain_text = plain_text.str.replace(\"(||)\",\"\").str.replace(\"[\" + \"\".join(list(emoji_blacklist)) + \"]\",\"\")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "* convert all emojis to a sentiment vector" - ] - }, - { - "cell_type": "code", - "execution_count": 11, - "metadata": {}, - "outputs": [], - "source": [ - "labels = emoji2sent([latest(e) for e in emojis])\n" - ] - }, - { - "cell_type": "code", - "execution_count": 12, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "68733" - ] - }, - "execution_count": 12, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "len(labels)" - ] - }, - { - "cell_type": "code", - "execution_count": 13, - "metadata": {}, - "outputs": [], - "source": [ - "wrong_labels = np.isnan(np.linalg.norm(labels, axis=1))" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "* remove all data we have no label for" - ] - }, - { - "cell_type": "code", - "execution_count": 14, - "metadata": {}, - "outputs": [], - "source": [ - "labels = labels[np.invert(wrong_labels)]\n", - "plain_text = plain_text[np.invert(wrong_labels)]\n", - "emojis = emojis[np.invert(wrong_labels)]" - ] - }, - { - "cell_type": "code", - "execution_count": 15, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "33368 33368 33368\n" - ] - } - ], - "source": [ - "print(len(labels), len(emojis), len(plain_text))" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "* Apply stemming and lemmatization (if needed)" - ] - }, - { - "cell_type": "code", - "execution_count": 16, - "metadata": {}, - "outputs": [], - "source": [ - "from nltk.stem.snowball import SnowballStemmer\n", - "from nltk.stem import WordNetLemmatizer\n", - "from nltk import pos_tag\n", - "from nltk import word_tokenize\n", - "from nltk.corpus import wordnet" - ] - }, - { - "cell_type": "code", - "execution_count": 17, - "metadata": {}, - "outputs": [], - "source": [ - "def get_wordnet_pos(treebank_tag):\n", - "\n", - " if treebank_tag.startswith('J'):\n", - " return wordnet.ADJ\n", - " elif treebank_tag.startswith('V'):\n", - " return wordnet.VERB\n", - " elif treebank_tag.startswith('N'):\n", - " return wordnet.NOUN\n", - " elif treebank_tag.startswith('R'):\n", - " return wordnet.ADV\n", - " else:\n", - " return wordnet.NOUN" - ] - }, - { - "cell_type": "code", - "execution_count": 18, - "metadata": {}, - "outputs": [], - "source": [ - "stemmer = SnowballStemmer(\"english\")\n", - "for key in plain_text.keys():\n", - " stemmed_sent = []\n", - " for word in plain_text[key].split(\" \"):\n", - " word_stemmed = stemmer.stem(word)\n", - " stemmed_sent.append(word_stemmed)\n", - " stemmed_sent = (\" \").join(stemmed_sent)\n", - " plain_text[key] = stemmed_sent" - ] - }, - { - "cell_type": "code", - "execution_count": 19, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "2 woooaaaahhh\n", - "4 i wan na know too\n", - "6 i 'm le stress about turn 30 now i think i'v r...\n", - "9 got me emot there\n", - "14 cutest son roll no . 31\n", - "15 by the summer i should have everyth up and run...\n", - "18 that pictur wa not taken this morning !\n", - "26 i so can not be bother with the rest of the da...\n", - "27 2lit4lif\n", - "35 hate fall asleep befor i put my phone on the c...\n", - "36 unexpect saw two of my crush today . this day ...\n", - "40 elvi whi o whi ? our girl wa such a love stori...\n", - "42 you'r late i ate them all\n", - "43 me toooo\n", - "47 the pressur is just too much\n", - "51 i broke grammar\n", - "52 have not desir to go to work today\n", - "53 omg do n't it scari all i know is that i do no...\n", - "56 achoo mr. fuck nigga you , you done caught cau...\n", - "58 i can never catch a dang break !\n", - "59 pas my p on two hour of sleep\n", - "60 i 'm realli not amus\n", - "65 i can help you\n", - "71 whew i slept good af last night\n", - "74 this would be epic . pizza and play perfect gi...\n", - "76 hey , it 1st novemb\n", - "80 u is to press bitch for me to have been speak ...\n", - "88 lmfao thought it wa just me be bitter\n", - "89 yupp yuppp . super prettttyyy , my heart cant ...\n", - "90 bakit halo halong seri binanggit mo be ? none ...\n", - " ... \n", - "68675 go back to dark hair tomorrow , mhmm yasss\n", - "68677 i miss them so much\n", - "68678 i wan na feel your gut too\n", - "68683 everi time\n", - "68687 i neither own nor watch tv . now go watch cnn\n", - "68688 revolutionari love\n", - "68694 ear worm is run in the famili after sing an aw...\n", - "68696 ill never look at you the same . yeah you got ...\n", - "68699 it our 3 year anniversari today to celebrate ,...\n", - "68700 person that scare me\n", - "68701 damn girl . can u look ani hotter than this ? ...\n", - "68703 this one fall under the weird crazi one .\n", - "68704 i 'm not allow to have chocol yet , then i uni...\n", - "68705 manchest unit manag mourinho slam specialists'...\n", - "68708 look , mammeh and daddeh ! cuuutee..\n", - "68709 life is so good with you\n", - "68710 happi halloween !\n", - "68712 scotti and kristen halloween costum\n", - "68713 may pre-month celebr si\n", - "68717 lmaooo i 'm so proud\n", - "68720 ambot ! ! !\n", - "68721 1st of the month ! ! happi 1st of novemb *53 d...\n", - "68722 ... stay in bed\n", - "68723 this is gold . gold .\n", - "68724 thank you for the kind compliment\n", - "68725 enjoy the silenc\n", - "68728 thank you yomi !\n", - "68729 lol . just enjoy the star . music kidhar aur b...\n", - "68730 thought and prayer for ny\n", - "68732 thank you so muchhav a happi wednesday and a g...\n", - "Name: text, Length: 33368, dtype: object\n" - ] - } - ], - "source": [ - "lemmatizer = WordNetLemmatizer()\n", - "for key in plain_text.keys():\n", - " lemmatized_sent = []\n", - " sent_pos = pos_tag(word_tokenize(plain_text[key]))\n", - " for word in sent_pos:\n", - " wordnet_pos = get_wordnet_pos(word[1].lower())\n", - " word_lemmatized = lemmatizer.lemmatize(word[0], pos=wordnet_pos)\n", - " lemmatized_sent.append(word_lemmatized)\n", - " lemmatized_sent = (\" \").join(lemmatized_sent)\n", - " plain_text[key] = lemmatized_sent\n", - "print(plain_text)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "* generate weights:" - ] - }, - { - "cell_type": "code", - "execution_count": 20, - "metadata": {}, - "outputs": [], - "source": [ - "# at first count over our table\n", - "emoji_count = {}\n", - "\n", - "\n", - "for e_list in emojis:\n", - " for e in set(e_list):\n", - " if e not in emoji_count:\n", - " emoji_count[e] = 0\n", - " emoji_count[e] += 1\n", - "\n", - "emoji_count\n", - "emoji_sum = sum([emoji_count[e] for e in emoji_count])\n", - "\n", - "emoji_weights = {}\n", - "for e in emoji_count:\n", - " # tfidf for emojis\n", - " emoji_weights[e] = np.log((emoji_sum / emoji_count[e]))\n", - "\n", - "weights_sum= sum([emoji_weights[x] for x in emoji_weights])\n", - " \n", - "# normalize:\n", - "for e in emoji_weights:\n", - " emoji_weights[e] = emoji_weights[e] / weights_sum\n", - "\n", - "emoji_weights['X'] = 0 # dummy values\n", - "emoji_count['X'] = 0" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "* most used emojis in Dataset" - ] - }, - { - "cell_type": "code", - "execution_count": 21, - "metadata": {}, - "outputs": [], - "source": [ - "import operator" - ] - }, - { - "cell_type": "code", - "execution_count": 22, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "[('πŸ˜‚', 10182),\n", - " ('😭', 3893),\n", - " ('😍', 2866),\n", - " ('😩', 1647),\n", - " ('😊', 1450),\n", - " ('😘', 1151),\n", - " ('πŸ™', 1089),\n", - " ('πŸ™Œ', 1003),\n", - " ('πŸ˜‰', 752),\n", - " ('😁', 697),\n", - " ('πŸ˜…', 651),\n", - " ('😎', 606),\n", - " ('😒', 544),\n", - " ('πŸ˜’', 539),\n", - " ('😏', 478),\n", - " ('😌', 434),\n", - " ('πŸ˜”', 415),\n", - " ('πŸ˜‹', 397),\n", - " ('πŸ˜€', 392),\n", - " ('😀', 368)]" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": { - "text/plain": [ - "['πŸ˜‚',\n", - " '😭',\n", - " '😍',\n", - " '😩',\n", - " '😊',\n", - " '😘',\n", - " 'πŸ™',\n", - " 'πŸ™Œ',\n", - " 'πŸ˜‰',\n", - " '😁',\n", - " 'πŸ˜…',\n", - " '😎',\n", - " '😒',\n", - " 'πŸ˜’',\n", - " '😏',\n", - " '😌',\n", - " 'πŸ˜”',\n", - " 'πŸ˜‹',\n", - " 'πŸ˜€',\n", - " '😀']" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "sorted_emoji_count = list(reversed(sorted(emoji_count.items(), key=operator.itemgetter(1))))\n", - "display(sorted_emoji_count[:20])\n", - "\n", - "top_emojis = [x[0] for x in sorted_emoji_count[:20]]\n", - "display(top_emojis)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "* only learn the most used ones:" - ] - }, - { - "cell_type": "code", - "execution_count": 23, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "26197 26197 26197\n" - ] - } - ], - "source": [ - "in_top = [sentiment_vector_to_emoji(x) in top_emojis for x in labels]\n", - "labels = labels[in_top]\n", - "plain_text = plain_text[in_top]\n", - "emojis = emojis[in_top]\n", - "print(len(labels), len(emojis), len(plain_text))" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "* generating train and test set:" - ] - }, - { - "cell_type": "code", - "execution_count": 24, - "metadata": {}, - "outputs": [], - "source": [ - "X1, Xt1, y1, yt1 = train_test_split(plain_text, labels, test_size=0.1, random_state=4222)" - ] - }, - { - "cell_type": "code", - "execution_count": 25, - "metadata": {}, - "outputs": [], - "source": [ - "#y1_weights = np.array([(sum([emoji_weights[e] for e in e_list]) / len(e_list)) if len(e_list) > 0 else 0 for e_list in sent2emoji(y1)])" - ] - }, - { - "cell_type": "code", - "execution_count": 26, - "metadata": {}, - "outputs": [], - "source": [ - "vectorizer = TfidfVectorizer(stop_words='english')\n", - "vec_train = vectorizer.fit_transform(X1)\n", - "vec_test = vectorizer.transform(Xt1)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "* train. this can take a very long time..." - ] - }, - { - "cell_type": "code", - "execution_count": 27, - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "Using TensorFlow backend.\n" - ] - } - ], - "source": [ - "from sklearn.neural_network import MLPClassifier as MLP\n", - "from sklearn.multiclass import OneVsRestClassifier as OVRC\n", - "from sklearn.tree import DecisionTreeClassifier as DTC\n", - "\n", - "from keras.models import Sequential\n", - "from keras.layers import Dense" - ] - }, - { - "cell_type": "code", - "execution_count": 28, - "metadata": {}, - "outputs": [], - "source": [ - "def train(max_size = 10000, layers=[(1024, 'relu'),(y1[0].shape[0],'softmax')], random_state=4222, ovrc=False, n_iter=5):\n", - " \n", - " model = Sequential()\n", - " \n", - " # build mlp layers:\n", - " keras_layers = []\n", - " first_layer = True\n", - " for layer in layers:\n", - " if first_layer:\n", - " model.add(Dense(units=layer[0], activation=layer[1], input_dim=vectorizer.transform([\" \"])[0]._shape[1]))\n", - " first_layer = False\n", - " else:\n", - " model.add(Dense(units=layer[0], activation=layer[1]))\n", - " \n", - " #mlp = MLPClassifier(layers=sknn_layers, random_state=random_state, verbose=True, n_iter=n_iter, batch_size=100)\n", - " \n", - " model.compile(loss='mean_squared_error',\n", - " optimizer='adam')\n", - " \n", - " clf = OVRC(model) if ovrc else model\n", - "\n", - " clf.fit(vec_train[:max_size].A, y1[:max_size], validation_split=0.2, epochs=n_iter)#, sample_weight=y1_weights[:max_size])\n", - " \n", - " return clf" - ] - }, - { - "cell_type": "code", - "execution_count": 29, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Train on 18861 samples, validate on 4716 samples\n", - "Epoch 1/3\n", - "18861/18861 [==============================] - 1106s 59ms/step - loss: 0.0185 - val_loss: 0.0152\n", - "Epoch 2/3\n", - "18861/18861 [==============================] - 1104s 59ms/step - loss: 0.0107 - val_loss: 0.0163\n", - "Epoch 3/3\n", - "18861/18861 [==============================] - 1106s 59ms/step - loss: 0.0065 - val_loss: 0.0166\n" - ] - } - ], - "source": [ - "clf = train(max_size=100000,layers=[(10000, 'relu'),(5000, 'relu'),(2500, 'relu'),(y1[0].shape[0],None)], n_iter=3)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "* make a prediction and store it in a csv file:" - ] - }, - { - "cell_type": "code", - "execution_count": 30, - "metadata": {}, - "outputs": [], - "source": [ - "pred = clf.predict(vectorizer.transform(Xt1))" - ] - }, - { - "cell_type": "code", - "execution_count": 31, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "0.011668838\n", - "0.03387511671001757\n" - ] - } - ], - "source": [ - "print(np.linalg.norm(np.var(pred, axis=0)))\n", - "print(np.linalg.norm(np.var(labels, axis=0)))\n" - ] - }, - { - "cell_type": "code", - "execution_count": 32, - "metadata": {}, - "outputs": [], - "source": [ - "# build a dataframe to visualize test results:\n", - "testlist = pd.DataFrame({'text': Xt1, \n", - " 'teacher': sent2emoji(yt1),\n", - " 'teacher_sentiment': yt1.tolist(),\n", - " 'predict': sent2emoji(pred, custom_target_emojis=top_emojis),\n", - " 'predicted_sentiment': pred.tolist()})" - ] - }, - { - "cell_type": "code", - "execution_count": 33, - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
\n", - "\n", - "\n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - "
textteacherteacher_sentimentpredictpredicted_sentiment
35671i feel like i care so much more in everi situatπŸ˜‚[0.46813021474490496, 0.24716181096977158, 0.2...πŸ˜…[0.4447824954986572, 0.30056363344192505, 0.27...
25683i did not meat to add that 2 there ... hav see...πŸ˜‚[0.46813021474490496, 0.24716181096977158, 0.2...😁[0.5660845637321472, 0.1737498641014099, 0.284...
8985neverβ€¦πŸ˜Š[0.7040175768989329, 0.059322033898305086, 0.2...πŸ˜‚[0.4871470034122467, 0.26607102155685425, 0.27...
5410lmao on me ! ! ! wtf wa he suppos to sayπŸ˜‚[0.46813021474490496, 0.24716181096977158, 0.2...😒[0.4061833620071411, 0.3226468861103058, 0.273...
62611this dude alway help me get through my school ...😊[0.7040175768989329, 0.059322033898305086, 0.2...πŸ˜‚[0.4549962878227234, 0.21886931359767914, 0.33...
48197happi b'day sir😊[0.7040175768989329, 0.059322033898305086, 0.2...πŸ˜€[0.6561306715011597, 0.11821962147951126, 0.27...
23654you need some good old fashion swedish jesusπŸ™[0.4983755685510071, 0.08057179987004548, 0.42...πŸ˜…[0.4600130021572113, 0.28595462441444397, 0.27...
58207these late shift are make me not have a social...πŸ˜…[0.47186147186147187, 0.2922077922077922, 0.23...πŸ˜‚[0.49543458223342896, 0.25571855902671814, 0.2...
374dc this weekend😍[0.7296744771190439, 0.05173769460607014, 0.21...😌[0.6040589213371277, 0.15823380649089813, 0.26...
26310paul lad you 'll make e blush😊[0.7040175768989329, 0.059322033898305086, 0.2...πŸ˜‚[0.452500581741333, 0.2882971167564392, 0.2790...
30892did you have a fun halloween ?πŸ˜‚[0.46813021474490496, 0.24716181096977158, 0.2...πŸ˜‚[0.4644194543361664, 0.2708289325237274, 0.277...
11868hi handsom😍[0.7296744771190439, 0.05173769460607014, 0.21...😊[0.7254493236541748, 0.12355809658765793, 0.22...
46219i 'm not okay with this , i 'm su snapchat😭[0.34310532030401736, 0.4364820846905538, 0.22...πŸ˜”[0.3264158070087433, 0.48023173213005066, 0.23...
13583my parent be so mad i be buy new stuff & amp ;...😭[0.34310532030401736, 0.4364820846905538, 0.22...πŸ˜‚[0.4271591007709503, 0.29361462593078613, 0.29...
43843one of the few song that calm me down esp on f...πŸ˜‚[0.46813021474490496, 0.24716181096977158, 0.2...πŸ˜‚[0.44168680906295776, 0.2790682315826416, 0.29...
63589iphon x bouta be the last phone we ever buyπŸ˜‚[0.46813021474490496, 0.24716181096977158, 0.2...πŸ˜…[0.41863512992858887, 0.3106093108654022, 0.28...
53695visit my main man today ❀ i miss u papa😭[0.34310532030401736, 0.4364820846905538, 0.22...😁[0.5650997757911682, 0.19236208498477936, 0.27...
67529donut😍[0.7296744771190439, 0.05173769460607014, 0.21...πŸ˜‚[0.45511549711227417, 0.28582143783569336, 0.2...
25493ha anyon heard this by ? who the fuck knew he ...😘[0.7546600877192983, 0.05290570175438596, 0.19...πŸ˜…[0.4276219606399536, 0.30413898825645447, 0.28...
19486wow superrbb😍[0.7296744771190439, 0.05173769460607014, 0.21...😊[0.7149834036827087, 0.10459273308515549, 0.24...
48449of cours they do n't . their perfect model of ...πŸ˜‚[0.46813021474490496, 0.24716181096977158, 0.2...πŸ˜‚[0.5363025665283203, 0.22163532674312592, 0.27...
4504plea pick me . pick me . pick me . please .😍[0.7296744771190439, 0.05173769460607014, 0.21...πŸ˜‚[0.4641677737236023, 0.18824045360088348, 0.37...
40285shiid no crack is wack😭[0.34310532030401736, 0.4364820846905538, 0.22...πŸ˜‚[0.44292521476745605, 0.28201037645339966, 0.2...
56741pj still sleep like a newborn😩[0.22289823008849557, 0.5912610619469026, 0.18...😒[0.40168094635009766, 0.3777309060096741, 0.24...
22948an opinion doe n't mean you make sen first off...😌[0.6240601503759399, 0.13984962406015036, 0.23...πŸ˜‚[0.4365620017051697, 0.2830066680908203, 0.294...
68426missyou too😘[0.7546600877192983, 0.05290570175438596, 0.19...😭[0.3452186584472656, 0.4593580365180969, 0.223...
13431i swear she did😩[0.22289823008849557, 0.5912610619469026, 0.18...πŸ˜‚[0.4436468482017517, 0.2736954987049103, 0.294...
66287it true , he wa the mutt ( big-d ) nut .πŸ˜‰[0.5634451019066403, 0.0992767915844839, 0.337...πŸ˜…[0.47334975004196167, 0.2881445586681366, 0.26...
41980is happen so happi😭[0.34310532030401736, 0.4364820846905538, 0.22...πŸ˜…[0.4635038673877716, 0.30251604318618774, 0.26...
34632saw that the first one said sose you tmmrw and npπŸ˜‚[0.46813021474490496, 0.24716181096977158, 0.2...πŸ˜…[0.43297499418258667, 0.31000325083732605, 0.2...
..................
47218keep it i do n't want itπŸ˜‚[0.46813021474490496, 0.24716181096977158, 0.2...πŸ˜…[0.470976859331131, 0.2997904419898987, 0.2489...
350876 year ago today we said our final good bye , ...😒[0.39118825100133514, 0.38451268357810414, 0.2...πŸ˜…[0.4478594660758972, 0.30181050300598145, 0.27...
61252amenπŸ™[0.4983755685510071, 0.08057179987004548, 0.42...😁[0.5783949494361877, 0.15405726432800293, 0.30...
39960fact😎[0.5981432360742706, 0.10477453580901856, 0.29...πŸ˜‚[0.4938848316669464, 0.24237176775932312, 0.25...
365nw final found someon hi loydiπŸ˜…[0.47186147186147187, 0.2922077922077922, 0.23...😌[0.6203110218048096, 0.17863908410072327, 0.23...
50665i need ur shoulder to cri on😒[0.39118825100133514, 0.38451268357810414, 0.2...πŸ˜…[0.4619033932685852, 0.2977892756462097, 0.267...
21007awww you 'll get me teari eye gurl !😘[0.7546600877192983, 0.05290570175438596, 0.19...πŸ˜…[0.5029815435409546, 0.2711288630962372, 0.253...
18819γ€°happi hump day to all my ladi is def sweet like😘[0.7546600877192983, 0.05290570175438596, 0.19...πŸ™Œ[0.6974080801010132, 0.11416944861412048, 0.24...
42421thanks .😊[0.7040175768989329, 0.059322033898305086, 0.2...😁[0.5765037536621094, 0.18341206014156342, 0.26...
38705it okay i did n't see you until i wa in your faceπŸ˜‚[0.46813021474490496, 0.24716181096977158, 0.2...😒[0.4030645787715912, 0.36543628573417664, 0.25...
47033fall in with alexissdang β¬…οΈπŸ˜[0.7296744771190439, 0.05173769460607014, 0.21...😌[0.6219494342803955, 0.1795988380908966, 0.226...
38742let your wild side free girl ! ! ! ! !πŸ˜‚[0.46813021474490496, 0.24716181096977158, 0.2...😒[0.4049544930458069, 0.35102248191833496, 0.25...
67610can we get season 3 already ! ?😭[0.34310532030401736, 0.4364820846905538, 0.22...😒[0.416861355304718, 0.35468244552612305, 0.245...
61021have to quot this bih rememb this niggaπŸ˜‚[0.46813021474490496, 0.24716181096977158, 0.2...πŸ˜‚[0.4401417672634125, 0.2810788154602051, 0.292...
14904he so damn😩[0.22289823008849557, 0.5912610619469026, 0.18...πŸ˜‚[0.5411785244941711, 0.2148503214120865, 0.246...
50994yasss it time for a great show maxloyalβ„’β™› : gi...πŸ˜‚[0.46813021474490496, 0.24716181096977158, 0.2...πŸ™Œ[0.7073097229003906, 0.12480126321315765, 0.23...
6973got to love a fish finger sarniπŸ˜‚[0.46813021474490496, 0.24716181096977158, 0.2...😌[0.6375303864479065, 0.14495858550071716, 0.24...
3269say datπŸ˜‚[0.46813021474490496, 0.24716181096977158, 0.2...πŸ˜‚[0.5052645802497864, 0.21280284225940704, 0.30...
54827if ur happi i 'm happi😊[0.7040175768989329, 0.059322033898305086, 0.2...πŸ˜…[0.5138391852378845, 0.26520460844039917, 0.25...
19409rockstarπŸ˜€[0.6560364464692483, 0.08428246013667426, 0.25...πŸ˜‚[0.520979106426239, 0.22392335534095764, 0.265...
38703god fuck me i alreadi fix it oncπŸ˜‚[0.46813021474490496, 0.24716181096977158, 0.2...πŸ˜”[0.32009249925613403, 0.4819949269294739, 0.22...
25133it social accept to listen to ani christma mus...πŸ˜‚[0.46813021474490496, 0.24716181096977158, 0.2...😁[0.5703040361404419, 0.17875489592552185, 0.27...
15339i thrash p in imessag now he wan na bet on 2kπŸ˜‚[0.46813021474490496, 0.24716181096977158, 0.2...πŸ˜…[0.4571400284767151, 0.2949279546737671, 0.269...
28082my fatass need some lemon pepper wing w a lil ...πŸ˜‚[0.46813021474490496, 0.24716181096977158, 0.2...😭[0.36291953921318054, 0.4310339391231537, 0.22...
44462fck off tristan yes soph collect u in 15 n we ...😀[0.2691131498470948, 0.4801223241590214, 0.250...😌[0.6122021675109863, 0.15579015016555786, 0.27...
60212open the bagπŸ˜‹[0.6784741144414169, 0.04495912806539509, 0.27...πŸ˜‚[0.470508873462677, 0.2711063623428345, 0.2810...
34950i ask peopl to guess my zodiac sign and this o...😀[0.2691131498470948, 0.4801223241590214, 0.250...πŸ˜‚[0.4431285262107849, 0.27477312088012695, 0.29...
59462i regret this smπŸ˜‚[0.46813021474490496, 0.24716181096977158, 0.2...😒[0.40375131368637085, 0.3289208710193634, 0.27...
19465fuck collegπŸ˜‚[0.46813021474490496, 0.24716181096977158, 0.2...πŸ˜…[0.4494284391403198, 0.31076472997665405, 0.26...
8135aye this wa my first time actual see it wendi ...πŸ˜‚[0.46813021474490496, 0.24716181096977158, 0.2...πŸ˜…[0.42708122730255127, 0.31392431259155273, 0.2...
\n", - "

2620 rows Γ— 5 columns

\n", - "
" - ], - "text/plain": [ - " text teacher \\\n", - "35671 i feel like i care so much more in everi situat πŸ˜‚ \n", - "25683 i did not meat to add that 2 there ... hav see... πŸ˜‚ \n", - "8985 never… 😊 \n", - "5410 lmao on me ! ! ! wtf wa he suppos to say πŸ˜‚ \n", - "62611 this dude alway help me get through my school ... 😊 \n", - "48197 happi b'day sir 😊 \n", - "23654 you need some good old fashion swedish jesus πŸ™ \n", - "58207 these late shift are make me not have a social... πŸ˜… \n", - "374 dc this weekend 😍 \n", - "26310 paul lad you 'll make e blush 😊 \n", - "30892 did you have a fun halloween ? πŸ˜‚ \n", - "11868 hi handsom 😍 \n", - "46219 i 'm not okay with this , i 'm su snapchat 😭 \n", - "13583 my parent be so mad i be buy new stuff & amp ;... 😭 \n", - "43843 one of the few song that calm me down esp on f... πŸ˜‚ \n", - "63589 iphon x bouta be the last phone we ever buy πŸ˜‚ \n", - "53695 visit my main man today ❀ i miss u papa 😭 \n", - "67529 donut 😍 \n", - "25493 ha anyon heard this by ? who the fuck knew he ... 😘 \n", - "19486 wow superrbb 😍 \n", - "48449 of cours they do n't . their perfect model of ... πŸ˜‚ \n", - "4504 plea pick me . pick me . pick me . please . 😍 \n", - "40285 shiid no crack is wack 😭 \n", - "56741 pj still sleep like a newborn 😩 \n", - "22948 an opinion doe n't mean you make sen first off... 😌 \n", - "68426 missyou too 😘 \n", - "13431 i swear she did 😩 \n", - "66287 it true , he wa the mutt ( big-d ) nut . πŸ˜‰ \n", - "41980 is happen so happi 😭 \n", - "34632 saw that the first one said sose you tmmrw and np πŸ˜‚ \n", - "... ... ... \n", - "47218 keep it i do n't want it πŸ˜‚ \n", - "35087 6 year ago today we said our final good bye , ... 😒 \n", - "61252 amen πŸ™ \n", - "39960 fact 😎 \n", - "365 nw final found someon hi loydi πŸ˜… \n", - "50665 i need ur shoulder to cri on 😒 \n", - "21007 awww you 'll get me teari eye gurl ! 😘 \n", - "18819 γ€°happi hump day to all my ladi is def sweet like 😘 \n", - "42421 thanks . 😊 \n", - "38705 it okay i did n't see you until i wa in your face πŸ˜‚ \n", - "47033 fall in with alexissdang ⬅️ 😍 \n", - "38742 let your wild side free girl ! ! ! ! ! πŸ˜‚ \n", - "67610 can we get season 3 already ! ? 😭 \n", - "61021 have to quot this bih rememb this nigga πŸ˜‚ \n", - "14904 he so damn 😩 \n", - "50994 yasss it time for a great show maxloyalβ„’β™› : gi... πŸ˜‚ \n", - "6973 got to love a fish finger sarni πŸ˜‚ \n", - "3269 say dat πŸ˜‚ \n", - "54827 if ur happi i 'm happi 😊 \n", - "19409 rockstar πŸ˜€ \n", - "38703 god fuck me i alreadi fix it onc πŸ˜‚ \n", - "25133 it social accept to listen to ani christma mus... πŸ˜‚ \n", - "15339 i thrash p in imessag now he wan na bet on 2k πŸ˜‚ \n", - "28082 my fatass need some lemon pepper wing w a lil ... πŸ˜‚ \n", - "44462 fck off tristan yes soph collect u in 15 n we ... 😀 \n", - "60212 open the bag πŸ˜‹ \n", - "34950 i ask peopl to guess my zodiac sign and this o... 😀 \n", - "59462 i regret this sm πŸ˜‚ \n", - "19465 fuck colleg πŸ˜‚ \n", - "8135 aye this wa my first time actual see it wendi ... πŸ˜‚ \n", - "\n", - " teacher_sentiment predict \\\n", - "35671 [0.46813021474490496, 0.24716181096977158, 0.2... πŸ˜… \n", - "25683 [0.46813021474490496, 0.24716181096977158, 0.2... 😁 \n", - "8985 [0.7040175768989329, 0.059322033898305086, 0.2... πŸ˜‚ \n", - "5410 [0.46813021474490496, 0.24716181096977158, 0.2... 😒 \n", - "62611 [0.7040175768989329, 0.059322033898305086, 0.2... πŸ˜‚ \n", - "48197 [0.7040175768989329, 0.059322033898305086, 0.2... πŸ˜€ \n", - "23654 [0.4983755685510071, 0.08057179987004548, 0.42... πŸ˜… \n", - "58207 [0.47186147186147187, 0.2922077922077922, 0.23... πŸ˜‚ \n", - "374 [0.7296744771190439, 0.05173769460607014, 0.21... 😌 \n", - "26310 [0.7040175768989329, 0.059322033898305086, 0.2... πŸ˜‚ \n", - "30892 [0.46813021474490496, 0.24716181096977158, 0.2... πŸ˜‚ \n", - "11868 [0.7296744771190439, 0.05173769460607014, 0.21... 😊 \n", - "46219 [0.34310532030401736, 0.4364820846905538, 0.22... πŸ˜” \n", - "13583 [0.34310532030401736, 0.4364820846905538, 0.22... πŸ˜‚ \n", - "43843 [0.46813021474490496, 0.24716181096977158, 0.2... πŸ˜‚ \n", - "63589 [0.46813021474490496, 0.24716181096977158, 0.2... πŸ˜… \n", - "53695 [0.34310532030401736, 0.4364820846905538, 0.22... 😁 \n", - "67529 [0.7296744771190439, 0.05173769460607014, 0.21... πŸ˜‚ \n", - "25493 [0.7546600877192983, 0.05290570175438596, 0.19... πŸ˜… \n", - "19486 [0.7296744771190439, 0.05173769460607014, 0.21... 😊 \n", - "48449 [0.46813021474490496, 0.24716181096977158, 0.2... πŸ˜‚ \n", - "4504 [0.7296744771190439, 0.05173769460607014, 0.21... πŸ˜‚ \n", - "40285 [0.34310532030401736, 0.4364820846905538, 0.22... πŸ˜‚ \n", - "56741 [0.22289823008849557, 0.5912610619469026, 0.18... 😒 \n", - "22948 [0.6240601503759399, 0.13984962406015036, 0.23... πŸ˜‚ \n", - "68426 [0.7546600877192983, 0.05290570175438596, 0.19... 😭 \n", - "13431 [0.22289823008849557, 0.5912610619469026, 0.18... πŸ˜‚ \n", - "66287 [0.5634451019066403, 0.0992767915844839, 0.337... πŸ˜… \n", - "41980 [0.34310532030401736, 0.4364820846905538, 0.22... πŸ˜… \n", - "34632 [0.46813021474490496, 0.24716181096977158, 0.2... πŸ˜… \n", - "... ... ... \n", - "47218 [0.46813021474490496, 0.24716181096977158, 0.2... πŸ˜… \n", - "35087 [0.39118825100133514, 0.38451268357810414, 0.2... πŸ˜… \n", - "61252 [0.4983755685510071, 0.08057179987004548, 0.42... 😁 \n", - "39960 [0.5981432360742706, 0.10477453580901856, 0.29... πŸ˜‚ \n", - "365 [0.47186147186147187, 0.2922077922077922, 0.23... 😌 \n", - "50665 [0.39118825100133514, 0.38451268357810414, 0.2... πŸ˜… \n", - "21007 [0.7546600877192983, 0.05290570175438596, 0.19... πŸ˜… \n", - "18819 [0.7546600877192983, 0.05290570175438596, 0.19... πŸ™Œ \n", - "42421 [0.7040175768989329, 0.059322033898305086, 0.2... 😁 \n", - "38705 [0.46813021474490496, 0.24716181096977158, 0.2... 😒 \n", - "47033 [0.7296744771190439, 0.05173769460607014, 0.21... 😌 \n", - "38742 [0.46813021474490496, 0.24716181096977158, 0.2... 😒 \n", - "67610 [0.34310532030401736, 0.4364820846905538, 0.22... 😒 \n", - "61021 [0.46813021474490496, 0.24716181096977158, 0.2... πŸ˜‚ \n", - "14904 [0.22289823008849557, 0.5912610619469026, 0.18... πŸ˜‚ \n", - "50994 [0.46813021474490496, 0.24716181096977158, 0.2... πŸ™Œ \n", - "6973 [0.46813021474490496, 0.24716181096977158, 0.2... 😌 \n", - "3269 [0.46813021474490496, 0.24716181096977158, 0.2... πŸ˜‚ \n", - "54827 [0.7040175768989329, 0.059322033898305086, 0.2... πŸ˜… \n", - "19409 [0.6560364464692483, 0.08428246013667426, 0.25... πŸ˜‚ \n", - "38703 [0.46813021474490496, 0.24716181096977158, 0.2... πŸ˜” \n", - "25133 [0.46813021474490496, 0.24716181096977158, 0.2... 😁 \n", - "15339 [0.46813021474490496, 0.24716181096977158, 0.2... πŸ˜… \n", - "28082 [0.46813021474490496, 0.24716181096977158, 0.2... 😭 \n", - "44462 [0.2691131498470948, 0.4801223241590214, 0.250... 😌 \n", - "60212 [0.6784741144414169, 0.04495912806539509, 0.27... πŸ˜‚ \n", - "34950 [0.2691131498470948, 0.4801223241590214, 0.250... πŸ˜‚ \n", - "59462 [0.46813021474490496, 0.24716181096977158, 0.2... 😒 \n", - "19465 [0.46813021474490496, 0.24716181096977158, 0.2... πŸ˜… \n", - "8135 [0.46813021474490496, 0.24716181096977158, 0.2... πŸ˜… \n", - "\n", - " predicted_sentiment \n", - "35671 [0.4447824954986572, 0.30056363344192505, 0.27... \n", - "25683 [0.5660845637321472, 0.1737498641014099, 0.284... \n", - "8985 [0.4871470034122467, 0.26607102155685425, 0.27... \n", - "5410 [0.4061833620071411, 0.3226468861103058, 0.273... \n", - "62611 [0.4549962878227234, 0.21886931359767914, 0.33... \n", - "48197 [0.6561306715011597, 0.11821962147951126, 0.27... \n", - "23654 [0.4600130021572113, 0.28595462441444397, 0.27... \n", - "58207 [0.49543458223342896, 0.25571855902671814, 0.2... \n", - "374 [0.6040589213371277, 0.15823380649089813, 0.26... \n", - "26310 [0.452500581741333, 0.2882971167564392, 0.2790... \n", - "30892 [0.4644194543361664, 0.2708289325237274, 0.277... \n", - "11868 [0.7254493236541748, 0.12355809658765793, 0.22... \n", - "46219 [0.3264158070087433, 0.48023173213005066, 0.23... \n", - "13583 [0.4271591007709503, 0.29361462593078613, 0.29... \n", - "43843 [0.44168680906295776, 0.2790682315826416, 0.29... \n", - "63589 [0.41863512992858887, 0.3106093108654022, 0.28... \n", - "53695 [0.5650997757911682, 0.19236208498477936, 0.27... \n", - "67529 [0.45511549711227417, 0.28582143783569336, 0.2... \n", - "25493 [0.4276219606399536, 0.30413898825645447, 0.28... \n", - "19486 [0.7149834036827087, 0.10459273308515549, 0.24... \n", - "48449 [0.5363025665283203, 0.22163532674312592, 0.27... \n", - "4504 [0.4641677737236023, 0.18824045360088348, 0.37... \n", - "40285 [0.44292521476745605, 0.28201037645339966, 0.2... \n", - "56741 [0.40168094635009766, 0.3777309060096741, 0.24... \n", - "22948 [0.4365620017051697, 0.2830066680908203, 0.294... \n", - "68426 [0.3452186584472656, 0.4593580365180969, 0.223... \n", - "13431 [0.4436468482017517, 0.2736954987049103, 0.294... \n", - "66287 [0.47334975004196167, 0.2881445586681366, 0.26... \n", - "41980 [0.4635038673877716, 0.30251604318618774, 0.26... \n", - "34632 [0.43297499418258667, 0.31000325083732605, 0.2... \n", - "... ... \n", - "47218 [0.470976859331131, 0.2997904419898987, 0.2489... \n", - "35087 [0.4478594660758972, 0.30181050300598145, 0.27... \n", - "61252 [0.5783949494361877, 0.15405726432800293, 0.30... \n", - "39960 [0.4938848316669464, 0.24237176775932312, 0.25... \n", - "365 [0.6203110218048096, 0.17863908410072327, 0.23... \n", - "50665 [0.4619033932685852, 0.2977892756462097, 0.267... \n", - "21007 [0.5029815435409546, 0.2711288630962372, 0.253... \n", - "18819 [0.6974080801010132, 0.11416944861412048, 0.24... \n", - "42421 [0.5765037536621094, 0.18341206014156342, 0.26... \n", - "38705 [0.4030645787715912, 0.36543628573417664, 0.25... \n", - "47033 [0.6219494342803955, 0.1795988380908966, 0.226... \n", - "38742 [0.4049544930458069, 0.35102248191833496, 0.25... \n", - "67610 [0.416861355304718, 0.35468244552612305, 0.245... \n", - "61021 [0.4401417672634125, 0.2810788154602051, 0.292... \n", - "14904 [0.5411785244941711, 0.2148503214120865, 0.246... \n", - "50994 [0.7073097229003906, 0.12480126321315765, 0.23... \n", - "6973 [0.6375303864479065, 0.14495858550071716, 0.24... \n", - "3269 [0.5052645802497864, 0.21280284225940704, 0.30... \n", - "54827 [0.5138391852378845, 0.26520460844039917, 0.25... \n", - "19409 [0.520979106426239, 0.22392335534095764, 0.265... \n", - "38703 [0.32009249925613403, 0.4819949269294739, 0.22... \n", - "25133 [0.5703040361404419, 0.17875489592552185, 0.27... \n", - "15339 [0.4571400284767151, 0.2949279546737671, 0.269... \n", - "28082 [0.36291953921318054, 0.4310339391231537, 0.22... \n", - "44462 [0.6122021675109863, 0.15579015016555786, 0.27... \n", - "60212 [0.470508873462677, 0.2711063623428345, 0.2810... \n", - "34950 [0.4431285262107849, 0.27477312088012695, 0.29... \n", - "59462 [0.40375131368637085, 0.3289208710193634, 0.27... \n", - "19465 [0.4494284391403198, 0.31076472997665405, 0.26... \n", - "8135 [0.42708122730255127, 0.31392431259155273, 0.2... \n", - "\n", - "[2620 rows x 5 columns]" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "display(testlist)\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "* exactly correct labeled sentences:" - ] - }, - { - "cell_type": "code", - "execution_count": 34, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "0.1851145038167939\n" - ] - } - ], - "source": [ - "print (sum([1 if sample[1]['teacher'] == sample[1]['predict'] else 0 for sample in testlist.iterrows()]) / testlist.shape[0])" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "* mean squared error:" - ] - }, - { - "cell_type": "code", - "execution_count": 35, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "array([0.02218282, 0.02594105, 0.00323429])" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "teacher_sentiments = np.array([sample[1]['teacher_sentiment'] for sample in testlist.iterrows()])\n", - "predicted_sentiments = np.array([sample[1]['predicted_sentiment'] for sample in testlist.iterrows()])\n", - "\n", - "mean_squared_error = ((teacher_sentiments - predicted_sentiments)**2).mean(axis=0)\n", - "display(mean_squared_error)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "* by an overall variance of:" - ] - }, - { - "cell_type": "code", - "execution_count": 36, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Variance teacher: [0.02183094 0.02513847 0.00285735]\n", - "Variance prediction: [0.00850173 0.00793481 0.00095984]\n" - ] - } - ], - "source": [ - "print(\"Variance teacher: \", np.var(teacher_sentiments, axis=0))\n", - "print(\"Variance prediction: \", np.var(predicted_sentiments, axis=0))" - ] - }, - { - "cell_type": "code", - "execution_count": 37, - "metadata": {}, - "outputs": [], - "source": [ - "testlist.to_csv('test.csv')" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "* save classifier:" - ] - }, - { - "cell_type": "code", - "execution_count": 38, - "metadata": {}, - "outputs": [], - "source": [ - "import pickle\n", - "clf.save(\"clf2.keras\")\n", - "pickle.dump( vectorizer, open( \"vec2.pickle\", \"wb\" ) )" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "----\n", - "## testing area\n", - "\n", - "**for just testing, start from here!**\n", - "\n", - "download the dumped classifier and vectorizer from [here](https://the-cake-is-a-lie.net/nextcloud/index.php/s/NjMXamfwQsyrefG)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "* loading classifier and vectorizer" - ] - }, - { - "cell_type": "code", - "execution_count": 14, - "metadata": {}, - "outputs": [], - "source": [ - "from IPython.display import clear_output, Markdown, Math\n", - "import ipywidgets as widgets\n", - "import sys\n", - "sys.path.append(\"..\")\n", - "\n", - "from Tools.Emoji_Distance import sentiment_vector_to_emoji\n", - "from Tools.Emoji_Distance import emoji_to_sentiment_vector\n", - "\n", - "def emoji2sent(emoji_arr):\n", - " return np.array([emoji_to_sentiment_vector(e) for e in emoji_arr])\n", - "\n", - "def sent2emoji(sent_arr, custom_target_emojis=None):\n", - " return [sentiment_vector_to_emoji(s, custom_target_emojis=custom_target_emojis) for s in sent_arr]" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "import keras\n", - "import pickle\n", - "clf = keras.models.load_model(\"clf.keras\")\n", - "vectorizer = pickle.load( open( \"vec.pickle\", \"rb\" ) )" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "* define lookup emojis here:" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": {}, - "outputs": [], - "source": [ - "lookup_emojis = [#'πŸ˜‚',\n", - " '😭',\n", - " '😍',\n", - " '😩',\n", - " '😊',\n", - " '😘',\n", - " 'πŸ™',\n", - " 'πŸ™Œ',\n", - " 'πŸ˜‰',\n", - " '😁',\n", - " 'πŸ˜…',\n", - " '😎',\n", - " '😒',\n", - " 'πŸ˜’',\n", - " '😏',\n", - " '😌',\n", - " 'πŸ˜”',\n", - " 'πŸ˜‹',\n", - " 'πŸ˜€',\n", - " '😀']" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "* a simple output widget for testing:" - ] - }, - { - "cell_type": "code", - "execution_count": 19, - "metadata": {}, - "outputs": [ - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "755aa31644db4628a3be1ff3b621fa28", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "Text(value='')" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "4601eccc07074e71983c9005d1d329b1", - "version_major": 2, - "version_minor": 0 - }, - "text/plain": [ - "VBox(children=(Button(description='get emoji', icon='check', style=ButtonStyle(), tooltip='Click me'), Output(…" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "out = widgets.Output()\n", - "\n", - "t = widgets.Text()\n", - "b = widgets.Button(\n", - " description='get emoji',\n", - " disabled=False,\n", - " button_style='', # 'success', 'info', 'warning', 'danger' or ''\n", - " tooltip='Click me',\n", - " icon='check'\n", - ")\n", - "\n", - "\n", - "\n", - "def handle_submit(sender):\n", - " with out:\n", - " clear_output()\n", - " with out:\n", - " pred = clf.predict(vectorizer.transform([t.value]))\n", - " \n", - " display(Markdown(\"# Predicted Emoji \" + str(sent2emoji(pred, lookup_emojis)[0])))\n", - " display(Markdown(\"# Sentiment Vector: $$ \\pmatrix{\" + str(pred[0,0]) +\n", - " \"\\\\\\\\\" + str(pred[0,1]) + \"\\\\\\\\\" + str(pred[0,2]) + \"}$$\"))\n", - "\n", - "b.on_click(handle_submit)\n", - " \n", - "display(t)\n", - "display(widgets.VBox([b, out])) " - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": {}, - "outputs": [ - { - "ename": "NameError", - "evalue": "name 'mlb' is not defined", - "output_type": "error", - "traceback": [ - "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", - "\u001b[0;31mNameError\u001b[0m Traceback (most recent call last)", - "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[1;32m 1\u001b[0m \u001b[0;32mimport\u001b[0m \u001b[0mnumpy\u001b[0m \u001b[0;32mas\u001b[0m \u001b[0mnp\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 2\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 3\u001b[0;31m \u001b[0my_trans\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mmlb\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0minverse_transform\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0myt1\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 4\u001b[0m \u001b[0mpred_trans\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mmlb\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0minverse_transform\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0myt1\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 5\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;31mNameError\u001b[0m: name 'mlb' is not defined" - ] - } - ], - "source": [ - "import numpy as np\n", - "\n", - "y_trans = mlb.inverse_transform(yt1)\n", - "pred_trans = mlb.inverse_transform(yt1)\n", - "\n", - "# evaluate accuracy\n", - "pos = 0\n", - "neg = 0\n", - "all = 0\n", - "for entry in range(len(y_trans)):\n", - " if len(np.intersect1d(y_trans[entry], pred_trans[entry])) > 0:\n", - " pos += 1\n", - " else:\n", - " neg += 1\n", - " all += 1\n", - "print(pos/all)\n", - "print(neg)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.6.5" - } - }, - "nbformat": 4, - "nbformat_minor": 2 -}