3707 lines
146 KiB
Plaintext
3707 lines
146 KiB
Plaintext
{
|
||
"cells": [
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 1,
|
||
"metadata": {},
|
||
"outputs": [
|
||
{
|
||
"name": "stderr",
|
||
"output_type": "stream",
|
||
"text": [
|
||
"/home/jonas/.local/lib/python3.6/site-packages/h5py/__init__.py:36: FutureWarning: Conversion of the second argument of issubdtype from `float` to `np.floating` is deprecated. In future, it will be treated as `np.float64 == np.dtype(float).type`.\n",
|
||
" from ._conv import register_converters as _register_converters\n",
|
||
"Using TensorFlow backend.\n"
|
||
]
|
||
},
|
||
{
|
||
"name": "stdout",
|
||
"output_type": "stream",
|
||
"text": [
|
||
"[nltk_data] Downloading package punkt to /home/jonas/nltk_data...\n",
|
||
"[nltk_data] Package punkt is already up-to-date!\n",
|
||
"[nltk_data] Downloading package averaged_perceptron_tagger to\n",
|
||
"[nltk_data] /home/jonas/nltk_data...\n",
|
||
"[nltk_data] Package averaged_perceptron_tagger is already up-to-\n",
|
||
"[nltk_data] date!\n",
|
||
"[nltk_data] Downloading package wordnet to /home/jonas/nltk_data...\n",
|
||
"[nltk_data] Package wordnet is already up-to-date!\n"
|
||
]
|
||
},
|
||
{
|
||
"data": {
|
||
"text/plain": [
|
||
"True"
|
||
]
|
||
},
|
||
"execution_count": 1,
|
||
"metadata": {},
|
||
"output_type": "execute_result"
|
||
}
|
||
],
|
||
"source": [
|
||
"import pandas as pd\n",
|
||
"from IPython.display import clear_output, Markdown, Math\n",
|
||
"import ipywidgets as widgets\n",
|
||
"import os\n",
|
||
"import glob\n",
|
||
"import json\n",
|
||
"import numpy as np\n",
|
||
"import itertools\n",
|
||
"import sklearn.utils as sku\n",
|
||
"from sklearn.feature_extraction.text import CountVectorizer, TfidfVectorizer, HashingVectorizer\n",
|
||
"from sklearn.model_selection import train_test_split\n",
|
||
"from sklearn.preprocessing import MultiLabelBinarizer\n",
|
||
"import nltk\n",
|
||
"from keras.models import load_model\n",
|
||
"from sklearn.externals import joblib\n",
|
||
"import pickle\n",
|
||
"import operator\n",
|
||
"from sklearn.pipeline import Pipeline\n",
|
||
"nltk.download('punkt')\n",
|
||
"nltk.download('averaged_perceptron_tagger')\n",
|
||
"nltk.download('wordnet')"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 2,
|
||
"metadata": {},
|
||
"outputs": [],
|
||
"source": [
|
||
"import sys\n",
|
||
"sys.path.append(\"..\")\n",
|
||
"\n",
|
||
"from Tools.Emoji_Distance import sentiment_vector_to_emoji\n",
|
||
"from Tools.Emoji_Distance import emoji_to_sentiment_vector\n",
|
||
"\n",
|
||
"def emoji2sent(emoji_arr, only_emoticons=True):\n",
|
||
" return np.array([emoji_to_sentiment_vector(e, only_emoticons=only_emoticons) for e in emoji_arr])\n",
|
||
"\n",
|
||
"def sent2emoji(sent_arr, custom_target_emojis=None, only_emoticons=True):\n",
|
||
" return [sentiment_vector_to_emoji(s, custom_target_emojis=custom_target_emojis, only_emoticons=only_emoticons) for s in sent_arr]"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 3,
|
||
"metadata": {},
|
||
"outputs": [],
|
||
"source": [
|
||
"SINGLE_LABEL = True"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {},
|
||
"source": [
|
||
"----\n",
|
||
"## classes and functions we are using later:\n",
|
||
"----"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {},
|
||
"source": [
|
||
"* functions for selecting items from a set / list"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 4,
|
||
"metadata": {},
|
||
"outputs": [],
|
||
"source": [
|
||
"def latest(lst):\n",
|
||
" return lst[-1] if len(lst) > 0 else 'X' \n",
|
||
"def most_common(lst):\n",
|
||
" # trying to find the most common used emoji in the given lst\n",
|
||
" return max(set(lst), key=lst.count) if len(lst) > 0 else \"X\" # setting label to 'X' if there is an empty emoji list"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {},
|
||
"source": [
|
||
"* our emoji blacklist (skin and sex modifiers)"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 5,
|
||
"metadata": {},
|
||
"outputs": [],
|
||
"source": [
|
||
"# defining blacklist for modifier emojis:\n",
|
||
"emoji_blacklist = set([\n",
|
||
" chr(0x1F3FB),\n",
|
||
" chr(0x1F3FC),\n",
|
||
" chr(0x1F3FD),\n",
|
||
" chr(0x1F3FE),\n",
|
||
" chr(0x1F3FF),\n",
|
||
" chr(0x2642),\n",
|
||
" chr(0x2640)\n",
|
||
"])"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {},
|
||
"source": [
|
||
"* lemmatization helper functions"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 6,
|
||
"metadata": {},
|
||
"outputs": [],
|
||
"source": [
|
||
"from nltk.stem.snowball import SnowballStemmer\n",
|
||
"from nltk.stem import WordNetLemmatizer\n",
|
||
"from nltk import pos_tag\n",
|
||
"from nltk import word_tokenize\n",
|
||
"from nltk.corpus import wordnet\n",
|
||
"\n",
|
||
"def get_wordnet_pos(treebank_tag):\n",
|
||
"\n",
|
||
" if treebank_tag.startswith('J'):\n",
|
||
" return wordnet.ADJ\n",
|
||
" elif treebank_tag.startswith('V'):\n",
|
||
" return wordnet.VERB\n",
|
||
" elif treebank_tag.startswith('N'):\n",
|
||
" return wordnet.NOUN\n",
|
||
" elif treebank_tag.startswith('R'):\n",
|
||
" return wordnet.ADV\n",
|
||
" else:\n",
|
||
" return wordnet.NOUN"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {},
|
||
"source": [
|
||
"* the sample data manager loads and preprocesses data"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 11,
|
||
"metadata": {},
|
||
"outputs": [],
|
||
"source": [
|
||
"class sample_data_manager(object):\n",
|
||
" @staticmethod\n",
|
||
" def generate_and_read(path:str, only_emoticons=True, apply_stemming=True, n_top_emojis=-1, file_range=None):\n",
|
||
" sdm = sample_data_manager(path)\n",
|
||
" sdm.read_files(file_index_range=range(sdm.n_files) if file_range is None else file_range, only_emoticons=only_emoticons)\n",
|
||
" if apply_stemming:\n",
|
||
" sdm.apply_stemming_and_lemmatization()\n",
|
||
" \n",
|
||
" sdm.generate_emoji_count_and_weights()\n",
|
||
" \n",
|
||
" if n_top_emojis > 0:\n",
|
||
" sdm.filter_by_top_emojis(n_top=n_top_emojis)\n",
|
||
" \n",
|
||
" return sdm\n",
|
||
" \n",
|
||
" \n",
|
||
" def __init__(self, data_root_folder:str):\n",
|
||
" self.data_root_folder = data_root_folder\n",
|
||
" self.json_files = sorted(glob.glob(self.data_root_folder + \"/*.json\"))\n",
|
||
" self.n_files = len(self.json_files)\n",
|
||
" self.raw_data = None\n",
|
||
" self.emojis = None\n",
|
||
" self.plain_text = None\n",
|
||
" self.labels = None\n",
|
||
" self.emoji_count = None\n",
|
||
" self.emoji_weights = None\n",
|
||
" self.X = None\n",
|
||
" self.y = None\n",
|
||
" self.Xt = None\n",
|
||
" self.yt = None\n",
|
||
" self.top_emojis = None\n",
|
||
" \n",
|
||
" def read_files(self, file_index_range:list, only_emoticons=True):\n",
|
||
" assert np.min(file_index_range) >= 0 and np.max(file_index_range) < self.n_files\n",
|
||
" for i in file_index_range:\n",
|
||
" print(\"reading file: \" + self.json_files[i] + \"...\")\n",
|
||
" if self.raw_data is None:\n",
|
||
" self.raw_data = pd.read_json(self.json_files[i], encoding=\"utf-8\")\n",
|
||
" else:\n",
|
||
" self.raw_data = self.raw_data.append(pd.read_json(self.json_files[i], encoding=\"utf-8\"))\n",
|
||
" \n",
|
||
" self.emojis = self.raw_data['EMOJI']\n",
|
||
" self.plain_text = self.raw_data['text']\n",
|
||
" \n",
|
||
" # replacing keywords. TODO: maybe these information can be extracted and used\n",
|
||
" self.plain_text = self.plain_text.str.replace(\"(<EMOJI>|<USER>|<HASHTAG>)\",\"\").str.replace(\"[\" + \"\".join(list(emoji_blacklist)) + \"]\",\"\")\n",
|
||
" \n",
|
||
" # so far filtering for the latest emoji. TODO: maybe there are also better approaches\n",
|
||
" self.labels = emoji2sent([latest(e) for e in self.emojis], only_emoticons=only_emoticons )\n",
|
||
" \n",
|
||
" # and filter out all samples we have no label for:\n",
|
||
" wrong_labels = np.isnan(np.linalg.norm(self.labels, axis=1)) \n",
|
||
"\n",
|
||
" self.labels = self.labels[np.invert(wrong_labels)]\n",
|
||
" self.plain_text = self.plain_text[np.invert(wrong_labels)]\n",
|
||
" self.emojis = self.emojis[np.invert(wrong_labels)]\n",
|
||
" \n",
|
||
" print(\"imported \" + str(len(self.labels)) + \" samples\")\n",
|
||
" \n",
|
||
" def apply_stemming_and_lemmatization(self):\n",
|
||
" stemmer = SnowballStemmer(\"english\")\n",
|
||
" for key in self.plain_text.keys():\n",
|
||
" stemmed_sent = []\n",
|
||
" for word in self.plain_text[key].split(\" \"):\n",
|
||
" word_stemmed = stemmer.stem(word)\n",
|
||
" stemmed_sent.append(word_stemmed)\n",
|
||
" stemmed_sent = (\" \").join(stemmed_sent)\n",
|
||
" self.plain_text[key] = stemmed_sent\n",
|
||
" \n",
|
||
" lemmatizer = WordNetLemmatizer()\n",
|
||
" for key in self.plain_text.keys():\n",
|
||
" lemmatized_sent = []\n",
|
||
" sent_pos = pos_tag(word_tokenize(self.plain_text[key]))\n",
|
||
" for word in sent_pos:\n",
|
||
" wordnet_pos = get_wordnet_pos(word[1].lower())\n",
|
||
" word_lemmatized = lemmatizer.lemmatize(word[0], pos=wordnet_pos)\n",
|
||
" lemmatized_sent.append(word_lemmatized)\n",
|
||
" lemmatized_sent = (\" \").join(lemmatized_sent)\n",
|
||
" self.plain_text[key] = lemmatized_sent\n",
|
||
" \n",
|
||
" def generate_emoji_count_and_weights(self):\n",
|
||
" self.emoji_count = {}\n",
|
||
" for e_list in self.emojis:\n",
|
||
" for e in set(e_list):\n",
|
||
" if e not in self.emoji_count:\n",
|
||
" self.emoji_count[e] = 0\n",
|
||
" self.emoji_count[e] += 1\n",
|
||
" \n",
|
||
" emoji_sum = sum([self.emoji_count[e] for e in self.emoji_count])\n",
|
||
"\n",
|
||
" self.emoji_weights = {}\n",
|
||
" for e in self.emoji_count:\n",
|
||
" # tfidf for emojis\n",
|
||
" self.emoji_weights[e] = np.log((emoji_sum / self.emoji_count[e]))\n",
|
||
"\n",
|
||
" weights_sum= sum([self.emoji_weights[x] for x in self.emoji_weights])\n",
|
||
"\n",
|
||
" # normalize:\n",
|
||
" for e in self.emoji_weights:\n",
|
||
" self.emoji_weights[e] = self.emoji_weights[e] / weights_sum\n",
|
||
"\n",
|
||
" self.emoji_weights['X'] = 0 # dummy values\n",
|
||
" self.emoji_count['X'] = 0\n",
|
||
" \n",
|
||
" def get_emoji_count(self):\n",
|
||
" sorted_emoji_count = list(reversed(sorted(self.emoji_count.items(), key=operator.itemgetter(1))))\n",
|
||
" #display(sorted_emoji_count)\n",
|
||
" return sorted_emoji_count\n",
|
||
" \n",
|
||
" def filter_by_top_emojis(self,n_top = 20):\n",
|
||
" self.top_emojis = [x[0] for x in self.get_emoji_count()[:n_top]]\n",
|
||
" in_top = [sentiment_vector_to_emoji(x) in self.top_emojis for x in self.labels]\n",
|
||
" self.labels = self.labels[in_top]\n",
|
||
" self.plain_text = self.plain_text[in_top]\n",
|
||
" self.emojis = self.emojis[in_top]\n",
|
||
" print(\"remaining samples after top emoji filtering: \", len(self.labels))\n",
|
||
" \n",
|
||
" def create_train_test_split(self, split = 0.1, random_state = 4222):\n",
|
||
" self.X, self.Xt, self.y, self.yt = train_test_split(self.plain_text, self.labels, test_size=split, random_state=random_state)\n",
|
||
"\n"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {},
|
||
"source": [
|
||
"* the pipeline manager saves and stores sklearn pipelines. Keras models are handled differently, so the have to be named explicitly during save and load operations"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 21,
|
||
"metadata": {},
|
||
"outputs": [],
|
||
"source": [
|
||
"class pipeline_manager(object):\n",
|
||
" @staticmethod\n",
|
||
" def load_pipeline_from_files(file_prefix:str, keras_models = [], all_models = []):\n",
|
||
" pm = pipeline_manager(keras_models=keras_models)\n",
|
||
" pm.load(file_prefix, all_models)\n",
|
||
" return pm\n",
|
||
" \n",
|
||
" @staticmethod\n",
|
||
" def create_keras_pipeline_with_vectorizer(vectorizer, layers, sdm:sample_data_manager):\n",
|
||
" '''\n",
|
||
" creates pipeline with vectorizer and keras classifier\n",
|
||
" '''\n",
|
||
" from keras.models import Sequential\n",
|
||
" from keras.layers import Dense\n",
|
||
" \n",
|
||
" if sdm.X is None:\n",
|
||
" sdm.create_train_test_split()\n",
|
||
" \n",
|
||
" vec_train = vectorizer.fit_transform(sdm.X)\n",
|
||
" vec_test = vectorizer.transform(sdm.Xt)\n",
|
||
" # creating keras model:\n",
|
||
" model=Sequential()\n",
|
||
" \n",
|
||
" keras_layers = []\n",
|
||
" first_layer = True\n",
|
||
" for layer in layers:\n",
|
||
" if first_layer:\n",
|
||
" model.add(Dense(units=layer[0], activation=layer[1], input_dim=vectorizer.transform([\" \"])[0]._shape[1]))\n",
|
||
" first_layer = False\n",
|
||
" else:\n",
|
||
" model.add(Dense(units=layer[0], activation=layer[1]))\n",
|
||
" \n",
|
||
" model.compile(loss='mean_squared_error',\n",
|
||
" optimizer='adam')\n",
|
||
" \n",
|
||
" pipeline = Pipeline([\n",
|
||
" ('vectorizer',vectorizer),\n",
|
||
" ('keras_model', model)\n",
|
||
" ])\n",
|
||
" \n",
|
||
" return pipeline_manager(pipeline=pipeline, keras_models=['keras_model'])\n",
|
||
" \n",
|
||
" @staticmethod\n",
|
||
" def create_pipeline_with_classifier_and_vectorizer(vectorizer, classifier, sdm:sample_data_manager = None):\n",
|
||
" '''\n",
|
||
" creates a pipeline with vectorizer and classifier for non keras classifiers\n",
|
||
" if sample data manager is given, the vectorizer will be also fitted!\n",
|
||
" '''\n",
|
||
" if sdm is not None:\n",
|
||
" if sdm.X is None:\n",
|
||
" sdm.create_train_test_split()\n",
|
||
"\n",
|
||
" vec_train = vectorizer.fit_transform(sdm.X)\n",
|
||
" vec_test = vectorizer.transform(sdm.Xt)\n",
|
||
" \n",
|
||
" pipeline = Pipeline([\n",
|
||
" ('vectorizer',vectorizer),\n",
|
||
" ('classifier', classifier)\n",
|
||
" ])\n",
|
||
" \n",
|
||
" return pipeline_manager(pipeline=pipeline, keras_models=[])\n",
|
||
" \n",
|
||
" def __init__(self, pipeline = None, keras_models = []):\n",
|
||
" self.pipeline = pipeline\n",
|
||
" self.additional_objects = {}\n",
|
||
" self.keras_models = keras_models\n",
|
||
" \n",
|
||
" def save(self, prefix:str):\n",
|
||
" print(self.keras_models)\n",
|
||
" # doing this like explained here: https://stackoverflow.com/a/43415459\n",
|
||
" for step in self.pipeline.named_steps:\n",
|
||
" if step in self.keras_models:\n",
|
||
" self.pipeline.named_steps[step].model.save(prefix + \".\" + step)\n",
|
||
" else:\n",
|
||
" joblib.dump(self.pipeline.named_steps[step], prefix + \".\" + str(step))\n",
|
||
" \n",
|
||
" load_command = \"pipeline_manager.load_pipeline_from_files( '\"\n",
|
||
" load_command += prefix + \"', \" + str(self.keras_models) + \", \"\n",
|
||
" load_command += str(list(self.pipeline.named_steps.keys())) + \")\"\n",
|
||
" \n",
|
||
" import __main__ as main\n",
|
||
" if not hasattr(main, '__file__'):\n",
|
||
" display(\"saved pipeline. It can be loaded the following way:\")\n",
|
||
" display(Markdown(\"> ```\\n\"+load_command+\"\\n```\"))\n",
|
||
" else:\n",
|
||
" print(\"saved pipeline. It can be loaded the following way:\")\n",
|
||
" print(load_command)\n",
|
||
" \n",
|
||
" \n",
|
||
" def load(self, prefix:str, models = []):\n",
|
||
" self.pipeline = None\n",
|
||
" model_list = []\n",
|
||
" for model in models:\n",
|
||
" if model in self.keras_models:\n",
|
||
" model_list.append((model, load_model(prefix + \".\" + model)))\n",
|
||
" else:\n",
|
||
" model_list.append((model, joblib.load(prefix+\".\" + model)))\n",
|
||
" self.pipeline = Pipeline(model_list)\n",
|
||
" \n",
|
||
" def fit(self,X,y):\n",
|
||
" self.pipeline.fit(X,y)\n",
|
||
" \n",
|
||
" def predict(self,X):\n",
|
||
" return self.pipeline.predict(X)\n",
|
||
" "
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {},
|
||
"source": [
|
||
"* the trainer class passes Data from the sample manager to the pipeline manager"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 9,
|
||
"metadata": {},
|
||
"outputs": [],
|
||
"source": [
|
||
"class trainer(object):\n",
|
||
" def __init__(self, sdm:sample_data_manager, pm:pipeline_manager):\n",
|
||
" self.sdm = sdm\n",
|
||
" self.pm = pm\n",
|
||
" \n",
|
||
" def fit(self, max_size=10000, disabled_fit_steps=['vectorizer']):\n",
|
||
" # TODO: make batch fitting available here (eg: continous waiting for data and fitting them)\n",
|
||
" if self.sdm.X is None:\n",
|
||
" self.sdm.create_train_test_split()\n",
|
||
" disabled_fits = {}\n",
|
||
" disabled_fit_transforms = {}\n",
|
||
" \n",
|
||
" named_steps = self.pm.pipeline.named_steps\n",
|
||
" \n",
|
||
" for s in disabled_fit_steps:\n",
|
||
" # now it gets a little bit dirty:\n",
|
||
" # replace fit functions we don't want to call again (e.g. for vectorizers)\n",
|
||
" disabled_fits[s] = named_steps[s].fit\n",
|
||
" disabled_fit_transforms[s] = named_steps[s].fit_transform\n",
|
||
" named_steps[s].fit = lambda self, X, y=None: self\n",
|
||
" named_steps[s].fit_transform = named_steps[s].transform\n",
|
||
" \n",
|
||
" self.pm.fit(X = self.sdm.X[:max_size], y = self.sdm.y[:max_size])\n",
|
||
" \n",
|
||
" # restore replaced fit functions:\n",
|
||
" for s in disabled_fit_steps:\n",
|
||
" named_steps[s].fit = disabled_fits[s]\n",
|
||
" named_steps[s].fit_transform = disabled_fit_transforms[s]\n",
|
||
" \n",
|
||
" def test(self):\n",
|
||
" '''\n",
|
||
" return: prediction:list, teacher:list\n",
|
||
" '''\n",
|
||
" if self.sdm.X is None:\n",
|
||
" self.sdm.create_train_test_split()\n",
|
||
" return self.pm.predict(self.sdm.Xt), self.sdm.yt\n",
|
||
"\n",
|
||
" "
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {},
|
||
"source": [
|
||
"----\n",
|
||
"## Train"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {},
|
||
"source": [
|
||
"* when in notebook environment: run the stuff below:"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 12,
|
||
"metadata": {},
|
||
"outputs": [
|
||
{
|
||
"name": "stdout",
|
||
"output_type": "stream",
|
||
"text": [
|
||
"reading file: ./data_en/2017-11-01.json...\n",
|
||
"imported 33368 samples\n",
|
||
"remaining samples after top emoji filtering: 26197\n"
|
||
]
|
||
}
|
||
],
|
||
"source": [
|
||
"import __main__ as main\n",
|
||
"if not hasattr(main, '__file__'):\n",
|
||
" # we are in an interactive environment (probably in jupyter)\n",
|
||
" # load data:\n",
|
||
" sdm = sample_data_manager.generate_and_read(path=\"./data_en/\", n_top_emojis=20, file_range=range(1))\n",
|
||
" "
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 22,
|
||
"metadata": {},
|
||
"outputs": [
|
||
{
|
||
"name": "stdout",
|
||
"output_type": "stream",
|
||
"text": [
|
||
"Epoch 1/1\n",
|
||
"100/100 [==============================] - 3s 27ms/step - loss: 0.1225\n"
|
||
]
|
||
}
|
||
],
|
||
"source": [
|
||
" #pm = pipeline_manager.create_keras_pipeline_with_vectorizer(vectorizer=TfidfVectorizer(stop_words='english'),\n",
|
||
" # layers=[(10000, 'relu'),(5000, 'relu'),(2500, 'relu'),(y1[0].shape[0],None)], sdm=sdm)\n",
|
||
" pm = pipeline_manager.create_keras_pipeline_with_vectorizer(vectorizer=TfidfVectorizer(stop_words='english'),\n",
|
||
" layers=[(2500, 'relu'),(3,None)], sdm=sdm)\n",
|
||
" \n",
|
||
" tr = trainer(sdm=sdm, pm=pm)\n",
|
||
" tr.fit(100)"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": null,
|
||
"metadata": {},
|
||
"outputs": [],
|
||
"source": []
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {},
|
||
"source": [
|
||
"----\n",
|
||
"## save classifier"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 23,
|
||
"metadata": {},
|
||
"outputs": [
|
||
{
|
||
"name": "stdout",
|
||
"output_type": "stream",
|
||
"text": [
|
||
"['keras_model']\n"
|
||
]
|
||
},
|
||
{
|
||
"data": {
|
||
"text/plain": [
|
||
"'saved pipeline. It can be loaded the following way:'"
|
||
]
|
||
},
|
||
"metadata": {},
|
||
"output_type": "display_data"
|
||
},
|
||
{
|
||
"data": {
|
||
"text/markdown": [
|
||
"> ```\n",
|
||
"pipeline_manager.load_pipeline_from_files( 'custom_classifier', ['keras_model'], ['vectorizer', 'keras_model'])\n",
|
||
"```"
|
||
],
|
||
"text/plain": [
|
||
"<IPython.core.display.Markdown object>"
|
||
]
|
||
},
|
||
"metadata": {},
|
||
"output_type": "display_data"
|
||
}
|
||
],
|
||
"source": [
|
||
"pm.save('custom_classifier')"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {},
|
||
"source": [
|
||
"----\n",
|
||
"## Prediction"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 33,
|
||
"metadata": {},
|
||
"outputs": [
|
||
{
|
||
"data": {
|
||
"text/plain": [
|
||
"array([[0.15801723, 0.11859037, 0.10975348],\n",
|
||
" [0.17035495, 0.10913695, 0.09354854],\n",
|
||
" [0.11777218, 0.06569621, 0.06620223],\n",
|
||
" ...,\n",
|
||
" [0.14746301, 0.09480572, 0.08052498],\n",
|
||
" [0.15932804, 0.11895895, 0.10343507],\n",
|
||
" [0.17135939, 0.1061406 , 0.09402546]], dtype=float32)"
|
||
]
|
||
},
|
||
"metadata": {},
|
||
"output_type": "display_data"
|
||
},
|
||
{
|
||
"data": {
|
||
"text/plain": [
|
||
"array([[0.46813021, 0.24716181, 0.28470797],\n",
|
||
" [0.46813021, 0.24716181, 0.28470797],\n",
|
||
" [0.70401758, 0.05932203, 0.23666039],\n",
|
||
" ...,\n",
|
||
" [0.46813021, 0.24716181, 0.28470797],\n",
|
||
" [0.46813021, 0.24716181, 0.28470797],\n",
|
||
" [0.46813021, 0.24716181, 0.28470797]])"
|
||
]
|
||
},
|
||
"metadata": {},
|
||
"output_type": "display_data"
|
||
},
|
||
{
|
||
"name": "stdout",
|
||
"output_type": "stream",
|
||
"text": [
|
||
"prediction variance: 0.0006294687\n",
|
||
"teacher variance: 0.03341702104519965\n"
|
||
]
|
||
},
|
||
{
|
||
"data": {
|
||
"text/html": [
|
||
"<div>\n",
|
||
"<style scoped>\n",
|
||
" .dataframe tbody tr th:only-of-type {\n",
|
||
" vertical-align: middle;\n",
|
||
" }\n",
|
||
"\n",
|
||
" .dataframe tbody tr th {\n",
|
||
" vertical-align: top;\n",
|
||
" }\n",
|
||
"\n",
|
||
" .dataframe thead th {\n",
|
||
" text-align: right;\n",
|
||
" }\n",
|
||
"</style>\n",
|
||
"<table border=\"1\" class=\"dataframe\">\n",
|
||
" <thead>\n",
|
||
" <tr style=\"text-align: right;\">\n",
|
||
" <th></th>\n",
|
||
" <th>predict</th>\n",
|
||
" <th>predicted_sentiment</th>\n",
|
||
" <th>teacher</th>\n",
|
||
" <th>teacher_sentiment</th>\n",
|
||
" <th>text</th>\n",
|
||
" </tr>\n",
|
||
" </thead>\n",
|
||
" <tbody>\n",
|
||
" <tr>\n",
|
||
" <th>35671</th>\n",
|
||
" <td>😢</td>\n",
|
||
" <td>[0.15801723301410675, 0.11859036982059479, 0.1...</td>\n",
|
||
" <td>😂</td>\n",
|
||
" <td>[0.46813021474490496, 0.24716181096977158, 0.2...</td>\n",
|
||
" <td>i feel like i care so much more in everi situat</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>25683</th>\n",
|
||
" <td>😢</td>\n",
|
||
" <td>[0.1703549474477768, 0.10913695394992828, 0.09...</td>\n",
|
||
" <td>😂</td>\n",
|
||
" <td>[0.46813021474490496, 0.24716181096977158, 0.2...</td>\n",
|
||
" <td>i did not meat to add that 2 there ... hav see...</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>8985</th>\n",
|
||
" <td>😢</td>\n",
|
||
" <td>[0.1177721843123436, 0.06569620966911316, 0.06...</td>\n",
|
||
" <td>😊</td>\n",
|
||
" <td>[0.7040175768989329, 0.059322033898305086, 0.2...</td>\n",
|
||
" <td>never…</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>5410</th>\n",
|
||
" <td>😢</td>\n",
|
||
" <td>[0.18182337284088135, 0.12382747232913971, 0.0...</td>\n",
|
||
" <td>😂</td>\n",
|
||
" <td>[0.46813021474490496, 0.24716181096977158, 0.2...</td>\n",
|
||
" <td>lmao on me ! ! ! wtf wa he suppos to say</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>62611</th>\n",
|
||
" <td>😢</td>\n",
|
||
" <td>[0.1786666363477707, 0.11502400785684586, 0.10...</td>\n",
|
||
" <td>😊</td>\n",
|
||
" <td>[0.7040175768989329, 0.059322033898305086, 0.2...</td>\n",
|
||
" <td>this dude alway help me get through my school ...</td>\n",
|
||
" </tr>\n",
|
||
" </tbody>\n",
|
||
"</table>\n",
|
||
"</div>"
|
||
],
|
||
"text/plain": [
|
||
" predict predicted_sentiment teacher \\\n",
|
||
"35671 😢 [0.15801723301410675, 0.11859036982059479, 0.1... 😂 \n",
|
||
"25683 😢 [0.1703549474477768, 0.10913695394992828, 0.09... 😂 \n",
|
||
"8985 😢 [0.1177721843123436, 0.06569620966911316, 0.06... 😊 \n",
|
||
"5410 😢 [0.18182337284088135, 0.12382747232913971, 0.0... 😂 \n",
|
||
"62611 😢 [0.1786666363477707, 0.11502400785684586, 0.10... 😊 \n",
|
||
"\n",
|
||
" teacher_sentiment \\\n",
|
||
"35671 [0.46813021474490496, 0.24716181096977158, 0.2... \n",
|
||
"25683 [0.46813021474490496, 0.24716181096977158, 0.2... \n",
|
||
"8985 [0.7040175768989329, 0.059322033898305086, 0.2... \n",
|
||
"5410 [0.46813021474490496, 0.24716181096977158, 0.2... \n",
|
||
"62611 [0.7040175768989329, 0.059322033898305086, 0.2... \n",
|
||
"\n",
|
||
" text \n",
|
||
"35671 i feel like i care so much more in everi situat \n",
|
||
"25683 i did not meat to add that 2 there ... hav see... \n",
|
||
"8985 never… \n",
|
||
"5410 lmao on me ! ! ! wtf wa he suppos to say \n",
|
||
"62611 this dude alway help me get through my school ... "
|
||
]
|
||
},
|
||
"metadata": {},
|
||
"output_type": "display_data"
|
||
},
|
||
{
|
||
"name": "stdout",
|
||
"output_type": "stream",
|
||
"text": [
|
||
"Mean Squared Error: [0.14140389 0.04240099 0.02944344]\n",
|
||
"Variance teacher: [0.02183094 0.02513847 0.00285735]\n",
|
||
"Variance prediction: [0.00053908 0.00024232 0.00021658]\n"
|
||
]
|
||
}
|
||
],
|
||
"source": [
|
||
"import __main__ as main\n",
|
||
"if not hasattr(main, '__file__'):\n",
|
||
" pred, teacher = tr.test()\n",
|
||
" \n",
|
||
" display(pred)\n",
|
||
" display(teacher)\n",
|
||
" \n",
|
||
" print('prediction variance: ', np.linalg.norm(np.var(pred, axis=0)))\n",
|
||
" print('teacher variance: ', np.linalg.norm(np.var(teacher, axis=0)))\n",
|
||
" \n",
|
||
" # build a dataframe to visualize test results:\n",
|
||
" testlist = pd.DataFrame({'text': sdm.Xt, \n",
|
||
" 'teacher': sent2emoji(sdm.yt),\n",
|
||
" 'teacher_sentiment': sdm.yt.tolist(),\n",
|
||
" 'predict': sent2emoji(pred, custom_target_emojis=sdm.top_emojis),\n",
|
||
" 'predicted_sentiment': pred.tolist()})\n",
|
||
" # display:\n",
|
||
" display(testlist.head())\n",
|
||
" \n",
|
||
" # mean squared error:\n",
|
||
" teacher_sentiments = np.array([sample[1]['teacher_sentiment'] for sample in testlist.iterrows()])\n",
|
||
" predicted_sentiments = np.array([sample[1]['predicted_sentiment'] for sample in testlist.iterrows()])\n",
|
||
"\n",
|
||
" mean_squared_error = ((teacher_sentiments - predicted_sentiments)**2).mean(axis=0)\n",
|
||
" print(\"Mean Squared Error: \", mean_squared_error)\n",
|
||
" print(\"Variance teacher: \", np.var(teacher_sentiments, axis=0))\n",
|
||
" print(\"Variance prediction: \", np.var(predicted_sentiments, axis=0))\n",
|
||
" \n",
|
||
" # save to csv:\n",
|
||
" testlist.to_csv('test.csv')"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {},
|
||
"source": [
|
||
"----\n",
|
||
"## Load classifier"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": null,
|
||
"metadata": {},
|
||
"outputs": [],
|
||
"source": [
|
||
"import __main__ as main\n",
|
||
"if not hasattr(main, '__file__'):\n",
|
||
" try:\n",
|
||
" pm\n",
|
||
" except NameError:\n",
|
||
" pass\n",
|
||
" else:\n",
|
||
" del pm # delete existing pipeline manager if ther is one\n",
|
||
"\n",
|
||
" pm = pipeline_manager.load_pipeline_from_files( 'custom_classifier', ['keras_model'], ['vectorizer', 'keras_model'])\n",
|
||
" lookup_emojis = [#'😂',\n",
|
||
" '😭',\n",
|
||
" '😍',\n",
|
||
" '😩',\n",
|
||
" '😊',\n",
|
||
" '😘',\n",
|
||
" '🙏',\n",
|
||
" '🙌',\n",
|
||
" '😉',\n",
|
||
" '😁',\n",
|
||
" '😅',\n",
|
||
" '😎',\n",
|
||
" '😢',\n",
|
||
" '😒',\n",
|
||
" '😏',\n",
|
||
" '😌',\n",
|
||
" '😔',\n",
|
||
" '😋',\n",
|
||
" '😀',\n",
|
||
" '😤']\n",
|
||
" out = widgets.Output()\n",
|
||
"\n",
|
||
" t = widgets.Text()\n",
|
||
" b = widgets.Button(\n",
|
||
" description='get emoji',\n",
|
||
" disabled=False,\n",
|
||
" button_style='', # 'success', 'info', 'warning', 'danger' or ''\n",
|
||
" tooltip='Click me',\n",
|
||
" icon='check'\n",
|
||
" )\n",
|
||
"\n",
|
||
"\n",
|
||
"\n",
|
||
" def handle_submit(sender):\n",
|
||
" with out:\n",
|
||
" clear_output()\n",
|
||
" with out:\n",
|
||
" pred = pm.predict([t.value])\n",
|
||
"\n",
|
||
" display(Markdown(\"# Predicted Emoji \" + str(sent2emoji(pred, lookup_emojis)[0])))\n",
|
||
" display(Markdown(\"# Sentiment Vector: $$ \\pmatrix{\" + str(pred[0,0]) +\n",
|
||
" \"\\\\\\\\\" + str(pred[0,1]) + \"\\\\\\\\\" + str(pred[0,2]) + \"}$$\"))\n",
|
||
"\n",
|
||
" b.on_click(handle_submit)\n",
|
||
"\n",
|
||
" display(t)\n",
|
||
" display(widgets.VBox([b, out])) "
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {},
|
||
"source": [
|
||
"# simple twitter approach\n",
|
||
"*for learning emoji usage by single (in the meaning of unconnected) twitter messages*"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {},
|
||
"source": [
|
||
"## loading train data"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {},
|
||
"source": [
|
||
"* Download preprocessed raw data:\n",
|
||
" * [here](https://the-cake-is-a-lie.net/nextcloud/index.php/s/MmXFYj6mGoMQoJN) for english\n",
|
||
" * [here](https://the-cake-is-a-lie.net/nextcloud/index.php/s/HgqpQ6rFadtWSAt) for german"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 4,
|
||
"metadata": {},
|
||
"outputs": [],
|
||
"source": [
|
||
"data_root_folder = \"./data_en/\" # i created a symlink here"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {},
|
||
"source": [
|
||
"* get all json files in `data_root_folder`"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 5,
|
||
"metadata": {},
|
||
"outputs": [],
|
||
"source": [
|
||
"json_files = sorted(glob.glob(data_root_folder + \"/*.json\"))"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {},
|
||
"source": [
|
||
"----"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {},
|
||
"source": [
|
||
"* so far, only load the first file"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 6,
|
||
"metadata": {},
|
||
"outputs": [
|
||
{
|
||
"data": {
|
||
"text/html": [
|
||
"<div>\n",
|
||
"<style scoped>\n",
|
||
" .dataframe tbody tr th:only-of-type {\n",
|
||
" vertical-align: middle;\n",
|
||
" }\n",
|
||
"\n",
|
||
" .dataframe tbody tr th {\n",
|
||
" vertical-align: top;\n",
|
||
" }\n",
|
||
"\n",
|
||
" .dataframe thead th {\n",
|
||
" text-align: right;\n",
|
||
" }\n",
|
||
"</style>\n",
|
||
"<table border=\"1\" class=\"dataframe\">\n",
|
||
" <thead>\n",
|
||
" <tr style=\"text-align: right;\">\n",
|
||
" <th></th>\n",
|
||
" <th>EMOJI</th>\n",
|
||
" <th>HASHTAGS</th>\n",
|
||
" <th>LINKED_USER</th>\n",
|
||
" <th>datetime</th>\n",
|
||
" <th>id</th>\n",
|
||
" <th>lang</th>\n",
|
||
" <th>person</th>\n",
|
||
" <th>reply_to</th>\n",
|
||
" <th>text</th>\n",
|
||
" </tr>\n",
|
||
" </thead>\n",
|
||
" <tbody>\n",
|
||
" <tr>\n",
|
||
" <th>0</th>\n",
|
||
" <td>[🔥, 👏]</td>\n",
|
||
" <td>[]</td>\n",
|
||
" <td>[]</td>\n",
|
||
" <td>2017-11-01 13:29:00</td>\n",
|
||
" <td>925716304635547600</td>\n",
|
||
" <td>en</td>\n",
|
||
" <td>31507978</td>\n",
|
||
" <td>NaN</td>\n",
|
||
" <td>fashionbombdaily's photo <EMOJI><EMOJI>🏼</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>1</th>\n",
|
||
" <td>[🤦]</td>\n",
|
||
" <td>[]</td>\n",
|
||
" <td>[]</td>\n",
|
||
" <td>2017-11-01 13:29:00</td>\n",
|
||
" <td>925716304664911900</td>\n",
|
||
" <td>en</td>\n",
|
||
" <td>231994649</td>\n",
|
||
" <td>NaN</td>\n",
|
||
" <td>It’s scary how on point my horoscope be <EMOJI...</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>2</th>\n",
|
||
" <td>[😄]</td>\n",
|
||
" <td>[]</td>\n",
|
||
" <td>[]</td>\n",
|
||
" <td>2017-11-01 13:29:03</td>\n",
|
||
" <td>925716317214089200</td>\n",
|
||
" <td>en</td>\n",
|
||
" <td>2592765104</td>\n",
|
||
" <td>NaN</td>\n",
|
||
" <td>Woooaaaahhh <EMOJI></td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>3</th>\n",
|
||
" <td>[📷]</td>\n",
|
||
" <td>[]</td>\n",
|
||
" <td>[]</td>\n",
|
||
" <td>2017-11-01 13:29:04</td>\n",
|
||
" <td>925716321416949800</td>\n",
|
||
" <td>en</td>\n",
|
||
" <td>278737933</td>\n",
|
||
" <td>NaN</td>\n",
|
||
" <td><EMOJI> vivalcli: Portraits by Zhao Guojing an...</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>4</th>\n",
|
||
" <td>[😩, 😩]</td>\n",
|
||
" <td>[]</td>\n",
|
||
" <td>[@hiphopphiIes]</td>\n",
|
||
" <td>2017-11-01 13:29:06</td>\n",
|
||
" <td>925716329801310200</td>\n",
|
||
" <td>en</td>\n",
|
||
" <td>824586253634981900</td>\n",
|
||
" <td>9.257162e+17</td>\n",
|
||
" <td><USER> i wanna know too<EMOJI><EMOJI></td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>5</th>\n",
|
||
" <td>[😭, 💓]</td>\n",
|
||
" <td>[]</td>\n",
|
||
" <td>[@WizMommma]</td>\n",
|
||
" <td>2017-11-01 13:29:02</td>\n",
|
||
" <td>925716313019965400</td>\n",
|
||
" <td>en</td>\n",
|
||
" <td>1581953814</td>\n",
|
||
" <td>9.257088e+17</td>\n",
|
||
" <td><USER> veda was yoda too <EMOJI><EMOJI></td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>6</th>\n",
|
||
" <td>[😂]</td>\n",
|
||
" <td>[]</td>\n",
|
||
" <td>[]</td>\n",
|
||
" <td>2017-11-01 13:29:05</td>\n",
|
||
" <td>925716325607133200</td>\n",
|
||
" <td>en</td>\n",
|
||
" <td>1001999683</td>\n",
|
||
" <td>NaN</td>\n",
|
||
" <td>I’m less stressed about turning 30 now <EMOJI>...</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>7</th>\n",
|
||
" <td>[💯]</td>\n",
|
||
" <td>[]</td>\n",
|
||
" <td>[]</td>\n",
|
||
" <td>2017-11-01 13:29:07</td>\n",
|
||
" <td>925716334008082400</td>\n",
|
||
" <td>en</td>\n",
|
||
" <td>745222369183043600</td>\n",
|
||
" <td>NaN</td>\n",
|
||
" <td>Full charged. <EMOJI></td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>8</th>\n",
|
||
" <td>[🙄]</td>\n",
|
||
" <td>[]</td>\n",
|
||
" <td>[@SeaDimon, @lsarsour]</td>\n",
|
||
" <td>2017-11-01 13:29:09</td>\n",
|
||
" <td>925716342401052700</td>\n",
|
||
" <td>en</td>\n",
|
||
" <td>798557155217539100</td>\n",
|
||
" <td>9.257147e+17</td>\n",
|
||
" <td><USER> That’s part of the problem, (they) <USE...</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>9</th>\n",
|
||
" <td>[😟, 😥, 😢]</td>\n",
|
||
" <td>[]</td>\n",
|
||
" <td>[@Ian_khetye]</td>\n",
|
||
" <td>2017-11-01 13:29:10</td>\n",
|
||
" <td>925716346570240000</td>\n",
|
||
" <td>en</td>\n",
|
||
" <td>744396039126421500</td>\n",
|
||
" <td>9.250629e+17</td>\n",
|
||
" <td><USER> got me emotional there<EMOJI><EMOJI><EM...</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>10</th>\n",
|
||
" <td>[🌻]</td>\n",
|
||
" <td>[]</td>\n",
|
||
" <td>[]</td>\n",
|
||
" <td>2017-11-01 13:29:13</td>\n",
|
||
" <td>925716359182520300</td>\n",
|
||
" <td>en</td>\n",
|
||
" <td>721490010118205400</td>\n",
|
||
" <td>8.965900e+17</td>\n",
|
||
" <td>back to the yellow <EMOJI></td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>11</th>\n",
|
||
" <td>[🍁, 🌺, 🍂]</td>\n",
|
||
" <td>[]</td>\n",
|
||
" <td>[@Dimafadma]</td>\n",
|
||
" <td>2017-11-01 13:29:15</td>\n",
|
||
" <td>925716367558545400</td>\n",
|
||
" <td>en</td>\n",
|
||
" <td>520536723</td>\n",
|
||
" <td>9.257159e+17</td>\n",
|
||
" <td><USER> Happy month to you and your loved ones ...</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>12</th>\n",
|
||
" <td>[🍃]</td>\n",
|
||
" <td>[#mortdale, #partofthefamily, #gorgeousboy]</td>\n",
|
||
" <td>[]</td>\n",
|
||
" <td>2017-11-01 13:29:16</td>\n",
|
||
" <td>925716371735900200</td>\n",
|
||
" <td>en</td>\n",
|
||
" <td>850852815941517300</td>\n",
|
||
" <td>NaN</td>\n",
|
||
" <td>Maxx and Patricia. Family hangs at For Good He...</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>13</th>\n",
|
||
" <td>[💭, 🤦]</td>\n",
|
||
" <td>[]</td>\n",
|
||
" <td>[]</td>\n",
|
||
" <td>2017-11-01 13:29:20</td>\n",
|
||
" <td>925716388513230800</td>\n",
|
||
" <td>en</td>\n",
|
||
" <td>914145041588867100</td>\n",
|
||
" <td>NaN</td>\n",
|
||
" <td>I need to STOP beating myself up with my thoug...</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>14</th>\n",
|
||
" <td>[😍, 😘]</td>\n",
|
||
" <td>[#7YearsOfKMH2]</td>\n",
|
||
" <td>[]</td>\n",
|
||
" <td>2017-11-01 13:29:20</td>\n",
|
||
" <td>925716388525645800</td>\n",
|
||
" <td>en</td>\n",
|
||
" <td>2425405622</td>\n",
|
||
" <td>NaN</td>\n",
|
||
" <td>Cutest Son <EMOJI>Roll no. 31 <EMOJI> <HASHTAG></td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>15</th>\n",
|
||
" <td>[😜]</td>\n",
|
||
" <td>[]</td>\n",
|
||
" <td>[]</td>\n",
|
||
" <td>2017-11-01 13:29:22</td>\n",
|
||
" <td>925716396931240000</td>\n",
|
||
" <td>en</td>\n",
|
||
" <td>4614871873</td>\n",
|
||
" <td>NaN</td>\n",
|
||
" <td>By the summer I should have everything up and ...</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>16</th>\n",
|
||
" <td>[😂, 🔥]</td>\n",
|
||
" <td>[]</td>\n",
|
||
" <td>[]</td>\n",
|
||
" <td>2017-11-01 13:29:23</td>\n",
|
||
" <td>925716401125331000</td>\n",
|
||
" <td>en</td>\n",
|
||
" <td>2831608345</td>\n",
|
||
" <td>NaN</td>\n",
|
||
" <td>I know my English is not that good but that do...</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>17</th>\n",
|
||
" <td>[💕]</td>\n",
|
||
" <td>[]</td>\n",
|
||
" <td>[@yungbabytate]</td>\n",
|
||
" <td>2017-11-01 13:29:23</td>\n",
|
||
" <td>925716401133948900</td>\n",
|
||
" <td>en</td>\n",
|
||
" <td>788571974633009200</td>\n",
|
||
" <td>9.255778e+17</td>\n",
|
||
" <td><USER> I <EMOJI> u mama</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>18</th>\n",
|
||
" <td>[😉]</td>\n",
|
||
" <td>[]</td>\n",
|
||
" <td>[@cmckenney]</td>\n",
|
||
" <td>2017-11-01 13:29:23</td>\n",
|
||
" <td>925716401125544000</td>\n",
|
||
" <td>en</td>\n",
|
||
" <td>218307802</td>\n",
|
||
" <td>9.257115e+17</td>\n",
|
||
" <td><USER> That picture was NOT taken this morning...</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>19</th>\n",
|
||
" <td>[👅]</td>\n",
|
||
" <td>[#footfetishnation]</td>\n",
|
||
" <td>[]</td>\n",
|
||
" <td>2017-11-01 13:29:25</td>\n",
|
||
" <td>925716409489002500</td>\n",
|
||
" <td>en</td>\n",
|
||
" <td>885261166146179100</td>\n",
|
||
" <td>NaN</td>\n",
|
||
" <td>Welcome to <HASHTAG> <EMOJI></td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>20</th>\n",
|
||
" <td>[👌, 🙂]</td>\n",
|
||
" <td>[]</td>\n",
|
||
" <td>[]</td>\n",
|
||
" <td>2017-11-01 13:29:25</td>\n",
|
||
" <td>925716409497272300</td>\n",
|
||
" <td>en</td>\n",
|
||
" <td>831437760833609700</td>\n",
|
||
" <td>NaN</td>\n",
|
||
" <td>Awkward <EMOJI><EMOJI></td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>21</th>\n",
|
||
" <td>[🤗, 📺]</td>\n",
|
||
" <td>[]</td>\n",
|
||
" <td>[]</td>\n",
|
||
" <td>2017-11-01 13:29:26</td>\n",
|
||
" <td>925716413699854300</td>\n",
|
||
" <td>en</td>\n",
|
||
" <td>231664542</td>\n",
|
||
" <td>NaN</td>\n",
|
||
" <td>back at it with supernatural <EMOJI><EMOJI></td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>22</th>\n",
|
||
" <td>[💯]</td>\n",
|
||
" <td>[]</td>\n",
|
||
" <td>[]</td>\n",
|
||
" <td>2017-11-01 13:29:26</td>\n",
|
||
" <td>925716413679009800</td>\n",
|
||
" <td>en</td>\n",
|
||
" <td>3196847035</td>\n",
|
||
" <td>NaN</td>\n",
|
||
" <td>One of the best things I've learned was to sto...</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>23</th>\n",
|
||
" <td>[👅, 💦, 🍑]</td>\n",
|
||
" <td>[]</td>\n",
|
||
" <td>[@ctrlpurp]</td>\n",
|
||
" <td>2017-11-01 13:29:29</td>\n",
|
||
" <td>925716426278735900</td>\n",
|
||
" <td>en</td>\n",
|
||
" <td>918492858352635900</td>\n",
|
||
" <td>9.257161e+17</td>\n",
|
||
" <td><USER> Can I taste?<EMOJI><EMOJI><EMOJI></td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>24</th>\n",
|
||
" <td>[💔]</td>\n",
|
||
" <td>[]</td>\n",
|
||
" <td>[@saunders_court1]</td>\n",
|
||
" <td>2017-11-01 13:29:30</td>\n",
|
||
" <td>925716430473039900</td>\n",
|
||
" <td>en</td>\n",
|
||
" <td>3471187337</td>\n",
|
||
" <td>9.257163e+17</td>\n",
|
||
" <td><USER> we miss you ☹️<EMOJI></td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>25</th>\n",
|
||
" <td>[🤐, 🤐, 🤐]</td>\n",
|
||
" <td>[]</td>\n",
|
||
" <td>[]</td>\n",
|
||
" <td>2017-11-01 13:29:31</td>\n",
|
||
" <td>925716434667184100</td>\n",
|
||
" <td>en</td>\n",
|
||
" <td>780060488600199200</td>\n",
|
||
" <td>NaN</td>\n",
|
||
" <td>Actually my bias in WJSN are Eunseo &amp; Bona...</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>26</th>\n",
|
||
" <td>[😴]</td>\n",
|
||
" <td>[]</td>\n",
|
||
" <td>[]</td>\n",
|
||
" <td>2017-11-01 13:29:32</td>\n",
|
||
" <td>925716438853345300</td>\n",
|
||
" <td>en</td>\n",
|
||
" <td>388380690</td>\n",
|
||
" <td>NaN</td>\n",
|
||
" <td>I so cannot be bothered with the rest of the d...</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>27</th>\n",
|
||
" <td>[😂]</td>\n",
|
||
" <td>[]</td>\n",
|
||
" <td>[@xxxtentacion]</td>\n",
|
||
" <td>2017-11-01 13:29:35</td>\n",
|
||
" <td>925716451457163300</td>\n",
|
||
" <td>en</td>\n",
|
||
" <td>899320696869974000</td>\n",
|
||
" <td>NaN</td>\n",
|
||
" <td><USER> 2lit4life<EMOJI></td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>28</th>\n",
|
||
" <td>[😂, 🙄]</td>\n",
|
||
" <td>[]</td>\n",
|
||
" <td>[]</td>\n",
|
||
" <td>2017-11-01 13:29:35</td>\n",
|
||
" <td>925716451461357600</td>\n",
|
||
" <td>en</td>\n",
|
||
" <td>784790670</td>\n",
|
||
" <td>NaN</td>\n",
|
||
" <td>I’m not stop saying that!<EMOJI><EMOJI></td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>29</th>\n",
|
||
" <td>[🎉, 🎂, 🎈, 🎊, 🎁, 💜]</td>\n",
|
||
" <td>[]</td>\n",
|
||
" <td>[@justinerooney_]</td>\n",
|
||
" <td>2017-11-01 13:29:37</td>\n",
|
||
" <td>925716459828936700</td>\n",
|
||
" <td>en</td>\n",
|
||
" <td>3051266655</td>\n",
|
||
" <td>NaN</td>\n",
|
||
" <td><USER> HAPPY BIHDAY <EMOJI><EMOJI><EMOJI><EMOJ...</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>...</th>\n",
|
||
" <td>...</td>\n",
|
||
" <td>...</td>\n",
|
||
" <td>...</td>\n",
|
||
" <td>...</td>\n",
|
||
" <td>...</td>\n",
|
||
" <td>...</td>\n",
|
||
" <td>...</td>\n",
|
||
" <td>...</td>\n",
|
||
" <td>...</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>68703</th>\n",
|
||
" <td>[😕]</td>\n",
|
||
" <td>[#halloweencostumes]</td>\n",
|
||
" <td>[]</td>\n",
|
||
" <td>2017-11-01 07:23:04</td>\n",
|
||
" <td>925624214522036200</td>\n",
|
||
" <td>en</td>\n",
|
||
" <td>1672876458</td>\n",
|
||
" <td>NaN</td>\n",
|
||
" <td><HASHTAG> this one falls under the weird crazy...</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>68704</th>\n",
|
||
" <td>[😂, 😩]</td>\n",
|
||
" <td>[]</td>\n",
|
||
" <td>[]</td>\n",
|
||
" <td>2017-11-01 07:23:05</td>\n",
|
||
" <td>925624218682777600</td>\n",
|
||
" <td>en</td>\n",
|
||
" <td>382473866</td>\n",
|
||
" <td>NaN</td>\n",
|
||
" <td>I'm not allowed to have chocolates yet, then I...</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>68705</th>\n",
|
||
" <td>[😂]</td>\n",
|
||
" <td>[#MUFC]</td>\n",
|
||
" <td>[]</td>\n",
|
||
" <td>2017-11-01 07:23:06</td>\n",
|
||
" <td>925624222889766900</td>\n",
|
||
" <td>en</td>\n",
|
||
" <td>893145405457911800</td>\n",
|
||
" <td>NaN</td>\n",
|
||
" <td>Manchester United manager Mourinho slams 'spec...</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>68706</th>\n",
|
||
" <td>[💖]</td>\n",
|
||
" <td>[]</td>\n",
|
||
" <td>[]</td>\n",
|
||
" <td>2017-11-01 07:23:07</td>\n",
|
||
" <td>925624227088121900</td>\n",
|
||
" <td>en</td>\n",
|
||
" <td>240378516</td>\n",
|
||
" <td>NaN</td>\n",
|
||
" <td><EMOJI> en Bushwhick</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>68707</th>\n",
|
||
" <td>[🌆, 👉, 🚖, 📞]</td>\n",
|
||
" <td>[#BurkeCentre]</td>\n",
|
||
" <td>[]</td>\n",
|
||
" <td>2017-11-01 07:23:18</td>\n",
|
||
" <td>925624273237983200</td>\n",
|
||
" <td>en</td>\n",
|
||
" <td>784620573209002000</td>\n",
|
||
" <td>NaN</td>\n",
|
||
" <td>: <HASHTAG> <EMOJI> <EMOJI><EMOJI> For Taxi <E...</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>68708</th>\n",
|
||
" <td>[😁]</td>\n",
|
||
" <td>[]</td>\n",
|
||
" <td>[@mychosliaheart, @BarrettoJulia, @iamjoshuaga...</td>\n",
|
||
" <td>2017-11-01 07:23:18</td>\n",
|
||
" <td>925624273212805100</td>\n",
|
||
" <td>en</td>\n",
|
||
" <td>170998187</td>\n",
|
||
" <td>9.254136e+17</td>\n",
|
||
" <td><USER> <USER> <USER> Look, Mammeh and Daddeh! ...</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>68709</th>\n",
|
||
" <td>[😋]</td>\n",
|
||
" <td>[]</td>\n",
|
||
" <td>[]</td>\n",
|
||
" <td>2017-11-01 07:23:24</td>\n",
|
||
" <td>925624298395533300</td>\n",
|
||
" <td>en</td>\n",
|
||
" <td>1348667816</td>\n",
|
||
" <td>NaN</td>\n",
|
||
" <td>Life is so good with you <EMOJI></td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>68710</th>\n",
|
||
" <td>[👌, 🎃, 😘]</td>\n",
|
||
" <td>[#portlandoregon, #portlandhalloween, #carrie…]</td>\n",
|
||
" <td>[]</td>\n",
|
||
" <td>2017-11-01 07:23:24</td>\n",
|
||
" <td>925624298378801200</td>\n",
|
||
" <td>en</td>\n",
|
||
" <td>722481645765300200</td>\n",
|
||
" <td>NaN</td>\n",
|
||
" <td>Happy Halloween! <EMOJI>🏽<EMOJI><EMOJI> <HASHT...</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>68711</th>\n",
|
||
" <td>[👍]</td>\n",
|
||
" <td>[]</td>\n",
|
||
" <td>[@8limbsbondi...]</td>\n",
|
||
" <td>2017-11-01 07:23:26</td>\n",
|
||
" <td>925624306779897900</td>\n",
|
||
" <td>en</td>\n",
|
||
" <td>2443251500</td>\n",
|
||
" <td>NaN</td>\n",
|
||
" <td>Some work on the ropes in today’s boxing class...</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>68712</th>\n",
|
||
" <td>[😭]</td>\n",
|
||
" <td>[]</td>\n",
|
||
" <td>[]</td>\n",
|
||
" <td>2017-11-01 07:23:27</td>\n",
|
||
" <td>925624310974136300</td>\n",
|
||
" <td>en</td>\n",
|
||
" <td>2406186390</td>\n",
|
||
" <td>NaN</td>\n",
|
||
" <td>Scotty and Kristen’s halloween costumes <EMOJI></td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>68713</th>\n",
|
||
" <td>[😂]</td>\n",
|
||
" <td>[]</td>\n",
|
||
" <td>[@rfrandrea, @AdaaanAndyyy]</td>\n",
|
||
" <td>2017-11-01 07:23:30</td>\n",
|
||
" <td>925624323557146600</td>\n",
|
||
" <td>en</td>\n",
|
||
" <td>1039448149</td>\n",
|
||
" <td>9.256150e+17</td>\n",
|
||
" <td><USER> <USER> May pre-month celebration sis <E...</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>68714</th>\n",
|
||
" <td>[😭, 😭, 💘]</td>\n",
|
||
" <td>[]</td>\n",
|
||
" <td>[@peachshua1230]</td>\n",
|
||
" <td>2017-11-01 07:23:31</td>\n",
|
||
" <td>925624327755591700</td>\n",
|
||
" <td>en</td>\n",
|
||
" <td>845085544589672400</td>\n",
|
||
" <td>9.256009e+17</td>\n",
|
||
" <td><USER> Awww <EMOJI> Ajsksjdjd im smiling like ...</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>68715</th>\n",
|
||
" <td>[😫, ✋]</td>\n",
|
||
" <td>[]</td>\n",
|
||
" <td>[]</td>\n",
|
||
" <td>2017-11-01 07:23:34</td>\n",
|
||
" <td>925624340342812700</td>\n",
|
||
" <td>en</td>\n",
|
||
" <td>924752524871131100</td>\n",
|
||
" <td>NaN</td>\n",
|
||
" <td>I hate when I send a text or snap n I'm so anx...</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>68716</th>\n",
|
||
" <td>[😂, 🙁]</td>\n",
|
||
" <td>[]</td>\n",
|
||
" <td>[]</td>\n",
|
||
" <td>2017-11-01 07:23:34</td>\n",
|
||
" <td>925624340355280900</td>\n",
|
||
" <td>en</td>\n",
|
||
" <td>419493819</td>\n",
|
||
" <td>NaN</td>\n",
|
||
" <td>The answer is no I have no plans and I never l...</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>68717</th>\n",
|
||
" <td>[😭]</td>\n",
|
||
" <td>[]</td>\n",
|
||
" <td>[@BeachBoy_Gab]</td>\n",
|
||
" <td>2017-11-01 07:23:34</td>\n",
|
||
" <td>925624340346937300</td>\n",
|
||
" <td>en</td>\n",
|
||
" <td>2457745952</td>\n",
|
||
" <td>9.256219e+17</td>\n",
|
||
" <td><USER> LMAOOO I'm so proud <EMOJI></td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>68718</th>\n",
|
||
" <td>[😢, 💔]</td>\n",
|
||
" <td>[]</td>\n",
|
||
" <td>[]</td>\n",
|
||
" <td>2017-11-01 07:23:34</td>\n",
|
||
" <td>925624340338507800</td>\n",
|
||
" <td>en</td>\n",
|
||
" <td>1955767531</td>\n",
|
||
" <td>NaN</td>\n",
|
||
" <td>My cousin/little sister is leaving to San Fran...</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>68719</th>\n",
|
||
" <td>[🙃]</td>\n",
|
||
" <td>[]</td>\n",
|
||
" <td>[]</td>\n",
|
||
" <td>2017-11-01 07:23:35</td>\n",
|
||
" <td>925624344524361700</td>\n",
|
||
" <td>en</td>\n",
|
||
" <td>796490344581898200</td>\n",
|
||
" <td>NaN</td>\n",
|
||
" <td>Can't be alone w my thoughts tonight so just g...</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>68720</th>\n",
|
||
" <td>[😂, 😂, 😂, 😂, 😂]</td>\n",
|
||
" <td>[]</td>\n",
|
||
" <td>[]</td>\n",
|
||
" <td>2017-11-01 07:23:36</td>\n",
|
||
" <td>925624348710285300</td>\n",
|
||
" <td>en</td>\n",
|
||
" <td>907808317124177900</td>\n",
|
||
" <td>NaN</td>\n",
|
||
" <td><EMOJI><EMOJI><EMOJI><EMOJI><EMOJI> ambot!!!</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>68721</th>\n",
|
||
" <td>[👌, 👊, 🙌]</td>\n",
|
||
" <td>[]</td>\n",
|
||
" <td>[]</td>\n",
|
||
" <td>2017-11-01 07:23:37</td>\n",
|
||
" <td>925624352929910800</td>\n",
|
||
" <td>en</td>\n",
|
||
" <td>262162415</td>\n",
|
||
" <td>NaN</td>\n",
|
||
" <td><EMOJI>🏽<EMOJI>🏽 1st of the month!!Happy 1st o...</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>68722</th>\n",
|
||
" <td>[😴]</td>\n",
|
||
" <td>[#WednesdayWisdom]</td>\n",
|
||
" <td>[]</td>\n",
|
||
" <td>2017-11-01 07:23:41</td>\n",
|
||
" <td>925624369715515400</td>\n",
|
||
" <td>en</td>\n",
|
||
" <td>574882525</td>\n",
|
||
" <td>NaN</td>\n",
|
||
" <td><HASHTAG> ... stay in bed <EMOJI></td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>68723</th>\n",
|
||
" <td>[😂, 😂, 😂, 😂]</td>\n",
|
||
" <td>[]</td>\n",
|
||
" <td>[@Louis_Tomlinson, @NiallOfficial]</td>\n",
|
||
" <td>2017-11-01 07:23:44</td>\n",
|
||
" <td>925624382269124600</td>\n",
|
||
" <td>en</td>\n",
|
||
" <td>556175173</td>\n",
|
||
" <td>9.254038e+17</td>\n",
|
||
" <td><USER> <USER> THIS IS GOLD. GOLD. <EMOJI><EMOJ...</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>68724</th>\n",
|
||
" <td>[😀]</td>\n",
|
||
" <td>[]</td>\n",
|
||
" <td>[]</td>\n",
|
||
" <td>2017-11-01 07:23:45</td>\n",
|
||
" <td>925624386455031800</td>\n",
|
||
" <td>en</td>\n",
|
||
" <td>1610265588</td>\n",
|
||
" <td>NaN</td>\n",
|
||
" <td><EMOJI> thank you for the kind compliment</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>68725</th>\n",
|
||
" <td>[😎]</td>\n",
|
||
" <td>[]</td>\n",
|
||
" <td>[]</td>\n",
|
||
" <td>2017-11-01 07:23:45</td>\n",
|
||
" <td>925624386454937600</td>\n",
|
||
" <td>en</td>\n",
|
||
" <td>4760724450</td>\n",
|
||
" <td>NaN</td>\n",
|
||
" <td>Enjoyed the silence <EMOJI></td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>68726</th>\n",
|
||
" <td>[✨]</td>\n",
|
||
" <td>[]</td>\n",
|
||
" <td>[]</td>\n",
|
||
" <td>2017-11-01 07:23:46</td>\n",
|
||
" <td>925624390657572900</td>\n",
|
||
" <td>en</td>\n",
|
||
" <td>882858115636514800</td>\n",
|
||
" <td>NaN</td>\n",
|
||
" <td>OS: Spiderman Homecoming <EMOJI></td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>68727</th>\n",
|
||
" <td>[🤷]</td>\n",
|
||
" <td>[]</td>\n",
|
||
" <td>[]</td>\n",
|
||
" <td>2017-11-01 07:23:46</td>\n",
|
||
" <td>925624390682849300</td>\n",
|
||
" <td>en</td>\n",
|
||
" <td>188129628</td>\n",
|
||
" <td>NaN</td>\n",
|
||
" <td>Go to hell <EMOJI>🏽♀️</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>68728</th>\n",
|
||
" <td>[😘]</td>\n",
|
||
" <td>[]</td>\n",
|
||
" <td>[]</td>\n",
|
||
" <td>2017-11-01 07:23:46</td>\n",
|
||
" <td>925624390666129400</td>\n",
|
||
" <td>en</td>\n",
|
||
" <td>2473135939</td>\n",
|
||
" <td>NaN</td>\n",
|
||
" <td>Thank you Yomi! <EMOJI></td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>68729</th>\n",
|
||
" <td>[😂]</td>\n",
|
||
" <td>[]</td>\n",
|
||
" <td>[@discopiggu]</td>\n",
|
||
" <td>2017-11-01 07:23:46</td>\n",
|
||
" <td>925624390670106600</td>\n",
|
||
" <td>en</td>\n",
|
||
" <td>2373584209</td>\n",
|
||
" <td>9.256241e+17</td>\n",
|
||
" <td><USER> Lol. Just enjoy the stars. Music Kidhar...</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>68730</th>\n",
|
||
" <td>[🙏]</td>\n",
|
||
" <td>[#NYCStrong]</td>\n",
|
||
" <td>[]</td>\n",
|
||
" <td>2017-11-01 07:23:50</td>\n",
|
||
" <td>925624407459971100</td>\n",
|
||
" <td>en</td>\n",
|
||
" <td>181689756</td>\n",
|
||
" <td>NaN</td>\n",
|
||
" <td>Thoughts and prayers for NY<EMOJI>🏻 <HASHTAG></td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>68731</th>\n",
|
||
" <td>[💁]</td>\n",
|
||
" <td>[#GreatSuccess]</td>\n",
|
||
" <td>[@BrianyH]</td>\n",
|
||
" <td>2017-11-01 07:23:50</td>\n",
|
||
" <td>925624407460057100</td>\n",
|
||
" <td>en</td>\n",
|
||
" <td>60160788</td>\n",
|
||
" <td>9.254610e+17</td>\n",
|
||
" <td><USER> I searched COCK, PENIS, SHLONG, WINKY, ...</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>68732</th>\n",
|
||
" <td>[🍃, 🌻, 🌻, 🍃, 🍃, 💐, 💐, 🍃, 🙋]</td>\n",
|
||
" <td>[]</td>\n",
|
||
" <td>[@amitbarman520]</td>\n",
|
||
" <td>2017-11-01 07:23:53</td>\n",
|
||
" <td>925624420022063100</td>\n",
|
||
" <td>en</td>\n",
|
||
" <td>3792290725</td>\n",
|
||
" <td>9.256215e+17</td>\n",
|
||
" <td><USER> Thank you so much<EMOJI><EMOJI><EMOJI><...</td>\n",
|
||
" </tr>\n",
|
||
" </tbody>\n",
|
||
"</table>\n",
|
||
"<p>68733 rows × 9 columns</p>\n",
|
||
"</div>"
|
||
],
|
||
"text/plain": [
|
||
" EMOJI \\\n",
|
||
"0 [🔥, 👏] \n",
|
||
"1 [🤦] \n",
|
||
"2 [😄] \n",
|
||
"3 [📷] \n",
|
||
"4 [😩, 😩] \n",
|
||
"5 [😭, 💓] \n",
|
||
"6 [😂] \n",
|
||
"7 [💯] \n",
|
||
"8 [🙄] \n",
|
||
"9 [😟, 😥, 😢] \n",
|
||
"10 [🌻] \n",
|
||
"11 [🍁, 🌺, 🍂] \n",
|
||
"12 [🍃] \n",
|
||
"13 [💭, 🤦] \n",
|
||
"14 [😍, 😘] \n",
|
||
"15 [😜] \n",
|
||
"16 [😂, 🔥] \n",
|
||
"17 [💕] \n",
|
||
"18 [😉] \n",
|
||
"19 [👅] \n",
|
||
"20 [👌, 🙂] \n",
|
||
"21 [🤗, 📺] \n",
|
||
"22 [💯] \n",
|
||
"23 [👅, 💦, 🍑] \n",
|
||
"24 [💔] \n",
|
||
"25 [🤐, 🤐, 🤐] \n",
|
||
"26 [😴] \n",
|
||
"27 [😂] \n",
|
||
"28 [😂, 🙄] \n",
|
||
"29 [🎉, 🎂, 🎈, 🎊, 🎁, 💜] \n",
|
||
"... ... \n",
|
||
"68703 [😕] \n",
|
||
"68704 [😂, 😩] \n",
|
||
"68705 [😂] \n",
|
||
"68706 [💖] \n",
|
||
"68707 [🌆, 👉, 🚖, 📞] \n",
|
||
"68708 [😁] \n",
|
||
"68709 [😋] \n",
|
||
"68710 [👌, 🎃, 😘] \n",
|
||
"68711 [👍] \n",
|
||
"68712 [😭] \n",
|
||
"68713 [😂] \n",
|
||
"68714 [😭, 😭, 💘] \n",
|
||
"68715 [😫, ✋] \n",
|
||
"68716 [😂, 🙁] \n",
|
||
"68717 [😭] \n",
|
||
"68718 [😢, 💔] \n",
|
||
"68719 [🙃] \n",
|
||
"68720 [😂, 😂, 😂, 😂, 😂] \n",
|
||
"68721 [👌, 👊, 🙌] \n",
|
||
"68722 [😴] \n",
|
||
"68723 [😂, 😂, 😂, 😂] \n",
|
||
"68724 [😀] \n",
|
||
"68725 [😎] \n",
|
||
"68726 [✨] \n",
|
||
"68727 [🤷] \n",
|
||
"68728 [😘] \n",
|
||
"68729 [😂] \n",
|
||
"68730 [🙏] \n",
|
||
"68731 [💁] \n",
|
||
"68732 [🍃, 🌻, 🌻, 🍃, 🍃, 💐, 💐, 🍃, 🙋] \n",
|
||
"\n",
|
||
" HASHTAGS \\\n",
|
||
"0 [] \n",
|
||
"1 [] \n",
|
||
"2 [] \n",
|
||
"3 [] \n",
|
||
"4 [] \n",
|
||
"5 [] \n",
|
||
"6 [] \n",
|
||
"7 [] \n",
|
||
"8 [] \n",
|
||
"9 [] \n",
|
||
"10 [] \n",
|
||
"11 [] \n",
|
||
"12 [#mortdale, #partofthefamily, #gorgeousboy] \n",
|
||
"13 [] \n",
|
||
"14 [#7YearsOfKMH2] \n",
|
||
"15 [] \n",
|
||
"16 [] \n",
|
||
"17 [] \n",
|
||
"18 [] \n",
|
||
"19 [#footfetishnation] \n",
|
||
"20 [] \n",
|
||
"21 [] \n",
|
||
"22 [] \n",
|
||
"23 [] \n",
|
||
"24 [] \n",
|
||
"25 [] \n",
|
||
"26 [] \n",
|
||
"27 [] \n",
|
||
"28 [] \n",
|
||
"29 [] \n",
|
||
"... ... \n",
|
||
"68703 [#halloweencostumes] \n",
|
||
"68704 [] \n",
|
||
"68705 [#MUFC] \n",
|
||
"68706 [] \n",
|
||
"68707 [#BurkeCentre] \n",
|
||
"68708 [] \n",
|
||
"68709 [] \n",
|
||
"68710 [#portlandoregon, #portlandhalloween, #carrie…] \n",
|
||
"68711 [] \n",
|
||
"68712 [] \n",
|
||
"68713 [] \n",
|
||
"68714 [] \n",
|
||
"68715 [] \n",
|
||
"68716 [] \n",
|
||
"68717 [] \n",
|
||
"68718 [] \n",
|
||
"68719 [] \n",
|
||
"68720 [] \n",
|
||
"68721 [] \n",
|
||
"68722 [#WednesdayWisdom] \n",
|
||
"68723 [] \n",
|
||
"68724 [] \n",
|
||
"68725 [] \n",
|
||
"68726 [] \n",
|
||
"68727 [] \n",
|
||
"68728 [] \n",
|
||
"68729 [] \n",
|
||
"68730 [#NYCStrong] \n",
|
||
"68731 [#GreatSuccess] \n",
|
||
"68732 [] \n",
|
||
"\n",
|
||
" LINKED_USER datetime \\\n",
|
||
"0 [] 2017-11-01 13:29:00 \n",
|
||
"1 [] 2017-11-01 13:29:00 \n",
|
||
"2 [] 2017-11-01 13:29:03 \n",
|
||
"3 [] 2017-11-01 13:29:04 \n",
|
||
"4 [@hiphopphiIes] 2017-11-01 13:29:06 \n",
|
||
"5 [@WizMommma] 2017-11-01 13:29:02 \n",
|
||
"6 [] 2017-11-01 13:29:05 \n",
|
||
"7 [] 2017-11-01 13:29:07 \n",
|
||
"8 [@SeaDimon, @lsarsour] 2017-11-01 13:29:09 \n",
|
||
"9 [@Ian_khetye] 2017-11-01 13:29:10 \n",
|
||
"10 [] 2017-11-01 13:29:13 \n",
|
||
"11 [@Dimafadma] 2017-11-01 13:29:15 \n",
|
||
"12 [] 2017-11-01 13:29:16 \n",
|
||
"13 [] 2017-11-01 13:29:20 \n",
|
||
"14 [] 2017-11-01 13:29:20 \n",
|
||
"15 [] 2017-11-01 13:29:22 \n",
|
||
"16 [] 2017-11-01 13:29:23 \n",
|
||
"17 [@yungbabytate] 2017-11-01 13:29:23 \n",
|
||
"18 [@cmckenney] 2017-11-01 13:29:23 \n",
|
||
"19 [] 2017-11-01 13:29:25 \n",
|
||
"20 [] 2017-11-01 13:29:25 \n",
|
||
"21 [] 2017-11-01 13:29:26 \n",
|
||
"22 [] 2017-11-01 13:29:26 \n",
|
||
"23 [@ctrlpurp] 2017-11-01 13:29:29 \n",
|
||
"24 [@saunders_court1] 2017-11-01 13:29:30 \n",
|
||
"25 [] 2017-11-01 13:29:31 \n",
|
||
"26 [] 2017-11-01 13:29:32 \n",
|
||
"27 [@xxxtentacion] 2017-11-01 13:29:35 \n",
|
||
"28 [] 2017-11-01 13:29:35 \n",
|
||
"29 [@justinerooney_] 2017-11-01 13:29:37 \n",
|
||
"... ... ... \n",
|
||
"68703 [] 2017-11-01 07:23:04 \n",
|
||
"68704 [] 2017-11-01 07:23:05 \n",
|
||
"68705 [] 2017-11-01 07:23:06 \n",
|
||
"68706 [] 2017-11-01 07:23:07 \n",
|
||
"68707 [] 2017-11-01 07:23:18 \n",
|
||
"68708 [@mychosliaheart, @BarrettoJulia, @iamjoshuaga... 2017-11-01 07:23:18 \n",
|
||
"68709 [] 2017-11-01 07:23:24 \n",
|
||
"68710 [] 2017-11-01 07:23:24 \n",
|
||
"68711 [@8limbsbondi...] 2017-11-01 07:23:26 \n",
|
||
"68712 [] 2017-11-01 07:23:27 \n",
|
||
"68713 [@rfrandrea, @AdaaanAndyyy] 2017-11-01 07:23:30 \n",
|
||
"68714 [@peachshua1230] 2017-11-01 07:23:31 \n",
|
||
"68715 [] 2017-11-01 07:23:34 \n",
|
||
"68716 [] 2017-11-01 07:23:34 \n",
|
||
"68717 [@BeachBoy_Gab] 2017-11-01 07:23:34 \n",
|
||
"68718 [] 2017-11-01 07:23:34 \n",
|
||
"68719 [] 2017-11-01 07:23:35 \n",
|
||
"68720 [] 2017-11-01 07:23:36 \n",
|
||
"68721 [] 2017-11-01 07:23:37 \n",
|
||
"68722 [] 2017-11-01 07:23:41 \n",
|
||
"68723 [@Louis_Tomlinson, @NiallOfficial] 2017-11-01 07:23:44 \n",
|
||
"68724 [] 2017-11-01 07:23:45 \n",
|
||
"68725 [] 2017-11-01 07:23:45 \n",
|
||
"68726 [] 2017-11-01 07:23:46 \n",
|
||
"68727 [] 2017-11-01 07:23:46 \n",
|
||
"68728 [] 2017-11-01 07:23:46 \n",
|
||
"68729 [@discopiggu] 2017-11-01 07:23:46 \n",
|
||
"68730 [] 2017-11-01 07:23:50 \n",
|
||
"68731 [@BrianyH] 2017-11-01 07:23:50 \n",
|
||
"68732 [@amitbarman520] 2017-11-01 07:23:53 \n",
|
||
"\n",
|
||
" id lang person reply_to \\\n",
|
||
"0 925716304635547600 en 31507978 NaN \n",
|
||
"1 925716304664911900 en 231994649 NaN \n",
|
||
"2 925716317214089200 en 2592765104 NaN \n",
|
||
"3 925716321416949800 en 278737933 NaN \n",
|
||
"4 925716329801310200 en 824586253634981900 9.257162e+17 \n",
|
||
"5 925716313019965400 en 1581953814 9.257088e+17 \n",
|
||
"6 925716325607133200 en 1001999683 NaN \n",
|
||
"7 925716334008082400 en 745222369183043600 NaN \n",
|
||
"8 925716342401052700 en 798557155217539100 9.257147e+17 \n",
|
||
"9 925716346570240000 en 744396039126421500 9.250629e+17 \n",
|
||
"10 925716359182520300 en 721490010118205400 8.965900e+17 \n",
|
||
"11 925716367558545400 en 520536723 9.257159e+17 \n",
|
||
"12 925716371735900200 en 850852815941517300 NaN \n",
|
||
"13 925716388513230800 en 914145041588867100 NaN \n",
|
||
"14 925716388525645800 en 2425405622 NaN \n",
|
||
"15 925716396931240000 en 4614871873 NaN \n",
|
||
"16 925716401125331000 en 2831608345 NaN \n",
|
||
"17 925716401133948900 en 788571974633009200 9.255778e+17 \n",
|
||
"18 925716401125544000 en 218307802 9.257115e+17 \n",
|
||
"19 925716409489002500 en 885261166146179100 NaN \n",
|
||
"20 925716409497272300 en 831437760833609700 NaN \n",
|
||
"21 925716413699854300 en 231664542 NaN \n",
|
||
"22 925716413679009800 en 3196847035 NaN \n",
|
||
"23 925716426278735900 en 918492858352635900 9.257161e+17 \n",
|
||
"24 925716430473039900 en 3471187337 9.257163e+17 \n",
|
||
"25 925716434667184100 en 780060488600199200 NaN \n",
|
||
"26 925716438853345300 en 388380690 NaN \n",
|
||
"27 925716451457163300 en 899320696869974000 NaN \n",
|
||
"28 925716451461357600 en 784790670 NaN \n",
|
||
"29 925716459828936700 en 3051266655 NaN \n",
|
||
"... ... ... ... ... \n",
|
||
"68703 925624214522036200 en 1672876458 NaN \n",
|
||
"68704 925624218682777600 en 382473866 NaN \n",
|
||
"68705 925624222889766900 en 893145405457911800 NaN \n",
|
||
"68706 925624227088121900 en 240378516 NaN \n",
|
||
"68707 925624273237983200 en 784620573209002000 NaN \n",
|
||
"68708 925624273212805100 en 170998187 9.254136e+17 \n",
|
||
"68709 925624298395533300 en 1348667816 NaN \n",
|
||
"68710 925624298378801200 en 722481645765300200 NaN \n",
|
||
"68711 925624306779897900 en 2443251500 NaN \n",
|
||
"68712 925624310974136300 en 2406186390 NaN \n",
|
||
"68713 925624323557146600 en 1039448149 9.256150e+17 \n",
|
||
"68714 925624327755591700 en 845085544589672400 9.256009e+17 \n",
|
||
"68715 925624340342812700 en 924752524871131100 NaN \n",
|
||
"68716 925624340355280900 en 419493819 NaN \n",
|
||
"68717 925624340346937300 en 2457745952 9.256219e+17 \n",
|
||
"68718 925624340338507800 en 1955767531 NaN \n",
|
||
"68719 925624344524361700 en 796490344581898200 NaN \n",
|
||
"68720 925624348710285300 en 907808317124177900 NaN \n",
|
||
"68721 925624352929910800 en 262162415 NaN \n",
|
||
"68722 925624369715515400 en 574882525 NaN \n",
|
||
"68723 925624382269124600 en 556175173 9.254038e+17 \n",
|
||
"68724 925624386455031800 en 1610265588 NaN \n",
|
||
"68725 925624386454937600 en 4760724450 NaN \n",
|
||
"68726 925624390657572900 en 882858115636514800 NaN \n",
|
||
"68727 925624390682849300 en 188129628 NaN \n",
|
||
"68728 925624390666129400 en 2473135939 NaN \n",
|
||
"68729 925624390670106600 en 2373584209 9.256241e+17 \n",
|
||
"68730 925624407459971100 en 181689756 NaN \n",
|
||
"68731 925624407460057100 en 60160788 9.254610e+17 \n",
|
||
"68732 925624420022063100 en 3792290725 9.256215e+17 \n",
|
||
"\n",
|
||
" text \n",
|
||
"0 fashionbombdaily's photo <EMOJI><EMOJI>🏼 \n",
|
||
"1 It’s scary how on point my horoscope be <EMOJI... \n",
|
||
"2 Woooaaaahhh <EMOJI> \n",
|
||
"3 <EMOJI> vivalcli: Portraits by Zhao Guojing an... \n",
|
||
"4 <USER> i wanna know too<EMOJI><EMOJI> \n",
|
||
"5 <USER> veda was yoda too <EMOJI><EMOJI> \n",
|
||
"6 I’m less stressed about turning 30 now <EMOJI>... \n",
|
||
"7 Full charged. <EMOJI> \n",
|
||
"8 <USER> That’s part of the problem, (they) <USE... \n",
|
||
"9 <USER> got me emotional there<EMOJI><EMOJI><EM... \n",
|
||
"10 back to the yellow <EMOJI> \n",
|
||
"11 <USER> Happy month to you and your loved ones ... \n",
|
||
"12 Maxx and Patricia. Family hangs at For Good He... \n",
|
||
"13 I need to STOP beating myself up with my thoug... \n",
|
||
"14 Cutest Son <EMOJI>Roll no. 31 <EMOJI> <HASHTAG> \n",
|
||
"15 By the summer I should have everything up and ... \n",
|
||
"16 I know my English is not that good but that do... \n",
|
||
"17 <USER> I <EMOJI> u mama \n",
|
||
"18 <USER> That picture was NOT taken this morning... \n",
|
||
"19 Welcome to <HASHTAG> <EMOJI> \n",
|
||
"20 Awkward <EMOJI><EMOJI> \n",
|
||
"21 back at it with supernatural <EMOJI><EMOJI> \n",
|
||
"22 One of the best things I've learned was to sto... \n",
|
||
"23 <USER> Can I taste?<EMOJI><EMOJI><EMOJI> \n",
|
||
"24 <USER> we miss you ☹️<EMOJI> \n",
|
||
"25 Actually my bias in WJSN are Eunseo & Bona... \n",
|
||
"26 I so cannot be bothered with the rest of the d... \n",
|
||
"27 <USER> 2lit4life<EMOJI> \n",
|
||
"28 I’m not stop saying that!<EMOJI><EMOJI> \n",
|
||
"29 <USER> HAPPY BIHDAY <EMOJI><EMOJI><EMOJI><EMOJ... \n",
|
||
"... ... \n",
|
||
"68703 <HASHTAG> this one falls under the weird crazy... \n",
|
||
"68704 I'm not allowed to have chocolates yet, then I... \n",
|
||
"68705 Manchester United manager Mourinho slams 'spec... \n",
|
||
"68706 <EMOJI> en Bushwhick \n",
|
||
"68707 : <HASHTAG> <EMOJI> <EMOJI><EMOJI> For Taxi <E... \n",
|
||
"68708 <USER> <USER> <USER> Look, Mammeh and Daddeh! ... \n",
|
||
"68709 Life is so good with you <EMOJI> \n",
|
||
"68710 Happy Halloween! <EMOJI>🏽<EMOJI><EMOJI> <HASHT... \n",
|
||
"68711 Some work on the ropes in today’s boxing class... \n",
|
||
"68712 Scotty and Kristen’s halloween costumes <EMOJI> \n",
|
||
"68713 <USER> <USER> May pre-month celebration sis <E... \n",
|
||
"68714 <USER> Awww <EMOJI> Ajsksjdjd im smiling like ... \n",
|
||
"68715 I hate when I send a text or snap n I'm so anx... \n",
|
||
"68716 The answer is no I have no plans and I never l... \n",
|
||
"68717 <USER> LMAOOO I'm so proud <EMOJI> \n",
|
||
"68718 My cousin/little sister is leaving to San Fran... \n",
|
||
"68719 Can't be alone w my thoughts tonight so just g... \n",
|
||
"68720 <EMOJI><EMOJI><EMOJI><EMOJI><EMOJI> ambot!!! \n",
|
||
"68721 <EMOJI>🏽<EMOJI>🏽 1st of the month!!Happy 1st o... \n",
|
||
"68722 <HASHTAG> ... stay in bed <EMOJI> \n",
|
||
"68723 <USER> <USER> THIS IS GOLD. GOLD. <EMOJI><EMOJ... \n",
|
||
"68724 <EMOJI> thank you for the kind compliment \n",
|
||
"68725 Enjoyed the silence <EMOJI> \n",
|
||
"68726 OS: Spiderman Homecoming <EMOJI> \n",
|
||
"68727 Go to hell <EMOJI>🏽♀️ \n",
|
||
"68728 Thank you Yomi! <EMOJI> \n",
|
||
"68729 <USER> Lol. Just enjoy the stars. Music Kidhar... \n",
|
||
"68730 Thoughts and prayers for NY<EMOJI>🏻 <HASHTAG> \n",
|
||
"68731 <USER> I searched COCK, PENIS, SHLONG, WINKY, ... \n",
|
||
"68732 <USER> Thank you so much<EMOJI><EMOJI><EMOJI><... \n",
|
||
"\n",
|
||
"[68733 rows x 9 columns]"
|
||
]
|
||
},
|
||
"execution_count": 6,
|
||
"metadata": {},
|
||
"output_type": "execute_result"
|
||
}
|
||
],
|
||
"source": [
|
||
"twitter_data = pd.read_json(json_files[0], encoding=\"utf-8\")\n",
|
||
"twitter_data"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {},
|
||
"source": [
|
||
"* extracting emojis and text"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 7,
|
||
"metadata": {},
|
||
"outputs": [],
|
||
"source": [
|
||
"emojis = twitter_data['EMOJI']\n",
|
||
"plain_text = twitter_data['text']"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {},
|
||
"source": [
|
||
"* make our plain text more \"plain\":\n",
|
||
" * removing the keyword `<EMOJI>` (just for the beginning)\n",
|
||
" * removing remaining useless emojis, like skin modifier etc."
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 8,
|
||
"metadata": {},
|
||
"outputs": [],
|
||
"source": [
|
||
"# defining blacklist for modifier emojis:\n",
|
||
"emoji_blacklist = set([\n",
|
||
" chr(0x1F3FB),\n",
|
||
" chr(0x1F3FC),\n",
|
||
" chr(0x1F3FD),\n",
|
||
" chr(0x1F3FE),\n",
|
||
" chr(0x1F3FF),\n",
|
||
" chr(0x2642),\n",
|
||
" chr(0x2640)\n",
|
||
"])"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 9,
|
||
"metadata": {},
|
||
"outputs": [],
|
||
"source": [
|
||
"# filtering them and the EMOJI keyword out:\n",
|
||
"plain_text = plain_text.str.replace(\"(<EMOJI>|<USER>|<HASHTAG>)\",\"\").str.replace(\"[\" + \"\".join(list(emoji_blacklist)) + \"]\",\"\")"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {},
|
||
"source": [
|
||
"* convert all emojis to a sentiment vector"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 11,
|
||
"metadata": {},
|
||
"outputs": [],
|
||
"source": [
|
||
"labels = emoji2sent([latest(e) for e in emojis])\n"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 12,
|
||
"metadata": {},
|
||
"outputs": [
|
||
{
|
||
"data": {
|
||
"text/plain": [
|
||
"68733"
|
||
]
|
||
},
|
||
"execution_count": 12,
|
||
"metadata": {},
|
||
"output_type": "execute_result"
|
||
}
|
||
],
|
||
"source": [
|
||
"len(labels)"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 13,
|
||
"metadata": {},
|
||
"outputs": [],
|
||
"source": [
|
||
"wrong_labels = np.isnan(np.linalg.norm(labels, axis=1))"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {},
|
||
"source": [
|
||
"* remove all data we have no label for"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 14,
|
||
"metadata": {},
|
||
"outputs": [],
|
||
"source": [
|
||
"labels = labels[np.invert(wrong_labels)]\n",
|
||
"plain_text = plain_text[np.invert(wrong_labels)]\n",
|
||
"emojis = emojis[np.invert(wrong_labels)]"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 15,
|
||
"metadata": {},
|
||
"outputs": [
|
||
{
|
||
"name": "stdout",
|
||
"output_type": "stream",
|
||
"text": [
|
||
"33368 33368 33368\n"
|
||
]
|
||
}
|
||
],
|
||
"source": [
|
||
"print(len(labels), len(emojis), len(plain_text))"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {},
|
||
"source": [
|
||
"* Apply stemming and lemmatization (if needed)"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 16,
|
||
"metadata": {},
|
||
"outputs": [],
|
||
"source": [
|
||
"from nltk.stem.snowball import SnowballStemmer\n",
|
||
"from nltk.stem import WordNetLemmatizer\n",
|
||
"from nltk import pos_tag\n",
|
||
"from nltk import word_tokenize\n",
|
||
"from nltk.corpus import wordnet"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 17,
|
||
"metadata": {},
|
||
"outputs": [],
|
||
"source": [
|
||
"def get_wordnet_pos(treebank_tag):\n",
|
||
"\n",
|
||
" if treebank_tag.startswith('J'):\n",
|
||
" return wordnet.ADJ\n",
|
||
" elif treebank_tag.startswith('V'):\n",
|
||
" return wordnet.VERB\n",
|
||
" elif treebank_tag.startswith('N'):\n",
|
||
" return wordnet.NOUN\n",
|
||
" elif treebank_tag.startswith('R'):\n",
|
||
" return wordnet.ADV\n",
|
||
" else:\n",
|
||
" return wordnet.NOUN"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 18,
|
||
"metadata": {},
|
||
"outputs": [],
|
||
"source": [
|
||
"stemmer = SnowballStemmer(\"english\")\n",
|
||
"for key in plain_text.keys():\n",
|
||
" stemmed_sent = []\n",
|
||
" for word in plain_text[key].split(\" \"):\n",
|
||
" word_stemmed = stemmer.stem(word)\n",
|
||
" stemmed_sent.append(word_stemmed)\n",
|
||
" stemmed_sent = (\" \").join(stemmed_sent)\n",
|
||
" plain_text[key] = stemmed_sent"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 19,
|
||
"metadata": {},
|
||
"outputs": [
|
||
{
|
||
"name": "stdout",
|
||
"output_type": "stream",
|
||
"text": [
|
||
"2 woooaaaahhh\n",
|
||
"4 i wan na know too\n",
|
||
"6 i 'm le stress about turn 30 now i think i'v r...\n",
|
||
"9 got me emot there\n",
|
||
"14 cutest son roll no . 31\n",
|
||
"15 by the summer i should have everyth up and run...\n",
|
||
"18 that pictur wa not taken this morning !\n",
|
||
"26 i so can not be bother with the rest of the da...\n",
|
||
"27 2lit4lif\n",
|
||
"35 hate fall asleep befor i put my phone on the c...\n",
|
||
"36 unexpect saw two of my crush today . this day ...\n",
|
||
"40 elvi whi o whi ? our girl wa such a love stori...\n",
|
||
"42 you'r late i ate them all\n",
|
||
"43 me toooo\n",
|
||
"47 the pressur is just too much\n",
|
||
"51 i broke grammar\n",
|
||
"52 have not desir to go to work today\n",
|
||
"53 omg do n't it scari all i know is that i do no...\n",
|
||
"56 achoo mr. fuck nigga you , you done caught cau...\n",
|
||
"58 i can never catch a dang break !\n",
|
||
"59 pas my p on two hour of sleep\n",
|
||
"60 i 'm realli not amus\n",
|
||
"65 i can help you\n",
|
||
"71 whew i slept good af last night\n",
|
||
"74 this would be epic . pizza and play perfect gi...\n",
|
||
"76 hey , it 1st novemb\n",
|
||
"80 u is to press bitch for me to have been speak ...\n",
|
||
"88 lmfao thought it wa just me be bitter\n",
|
||
"89 yupp yuppp . super prettttyyy , my heart cant ...\n",
|
||
"90 bakit halo halong seri binanggit mo be ? none ...\n",
|
||
" ... \n",
|
||
"68675 go back to dark hair tomorrow , mhmm yasss\n",
|
||
"68677 i miss them so much\n",
|
||
"68678 i wan na feel your gut too\n",
|
||
"68683 everi time\n",
|
||
"68687 i neither own nor watch tv . now go watch cnn\n",
|
||
"68688 revolutionari love\n",
|
||
"68694 ear worm is run in the famili after sing an aw...\n",
|
||
"68696 ill never look at you the same . yeah you got ...\n",
|
||
"68699 it our 3 year anniversari today to celebrate ,...\n",
|
||
"68700 person that scare me\n",
|
||
"68701 damn girl . can u look ani hotter than this ? ...\n",
|
||
"68703 this one fall under the weird crazi one .\n",
|
||
"68704 i 'm not allow to have chocol yet , then i uni...\n",
|
||
"68705 manchest unit manag mourinho slam specialists'...\n",
|
||
"68708 look , mammeh and daddeh ! cuuutee..\n",
|
||
"68709 life is so good with you\n",
|
||
"68710 happi halloween !\n",
|
||
"68712 scotti and kristen halloween costum\n",
|
||
"68713 may pre-month celebr si\n",
|
||
"68717 lmaooo i 'm so proud\n",
|
||
"68720 ambot ! ! !\n",
|
||
"68721 1st of the month ! ! happi 1st of novemb *53 d...\n",
|
||
"68722 ... stay in bed\n",
|
||
"68723 this is gold . gold .\n",
|
||
"68724 thank you for the kind compliment\n",
|
||
"68725 enjoy the silenc\n",
|
||
"68728 thank you yomi !\n",
|
||
"68729 lol . just enjoy the star . music kidhar aur b...\n",
|
||
"68730 thought and prayer for ny\n",
|
||
"68732 thank you so muchhav a happi wednesday and a g...\n",
|
||
"Name: text, Length: 33368, dtype: object\n"
|
||
]
|
||
}
|
||
],
|
||
"source": [
|
||
"lemmatizer = WordNetLemmatizer()\n",
|
||
"for key in plain_text.keys():\n",
|
||
" lemmatized_sent = []\n",
|
||
" sent_pos = pos_tag(word_tokenize(plain_text[key]))\n",
|
||
" for word in sent_pos:\n",
|
||
" wordnet_pos = get_wordnet_pos(word[1].lower())\n",
|
||
" word_lemmatized = lemmatizer.lemmatize(word[0], pos=wordnet_pos)\n",
|
||
" lemmatized_sent.append(word_lemmatized)\n",
|
||
" lemmatized_sent = (\" \").join(lemmatized_sent)\n",
|
||
" plain_text[key] = lemmatized_sent\n",
|
||
"print(plain_text)"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {},
|
||
"source": [
|
||
"* generate weights:"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 20,
|
||
"metadata": {},
|
||
"outputs": [],
|
||
"source": [
|
||
"# at first count over our table\n",
|
||
"emoji_count = {}\n",
|
||
"\n",
|
||
"\n",
|
||
"for e_list in emojis:\n",
|
||
" for e in set(e_list):\n",
|
||
" if e not in emoji_count:\n",
|
||
" emoji_count[e] = 0\n",
|
||
" emoji_count[e] += 1\n",
|
||
"\n",
|
||
"emoji_count\n",
|
||
"emoji_sum = sum([emoji_count[e] for e in emoji_count])\n",
|
||
"\n",
|
||
"emoji_weights = {}\n",
|
||
"for e in emoji_count:\n",
|
||
" # tfidf for emojis\n",
|
||
" emoji_weights[e] = np.log((emoji_sum / emoji_count[e]))\n",
|
||
"\n",
|
||
"weights_sum= sum([emoji_weights[x] for x in emoji_weights])\n",
|
||
" \n",
|
||
"# normalize:\n",
|
||
"for e in emoji_weights:\n",
|
||
" emoji_weights[e] = emoji_weights[e] / weights_sum\n",
|
||
"\n",
|
||
"emoji_weights['X'] = 0 # dummy values\n",
|
||
"emoji_count['X'] = 0"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {},
|
||
"source": [
|
||
"* most used emojis in Dataset"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 21,
|
||
"metadata": {},
|
||
"outputs": [],
|
||
"source": [
|
||
"import operator"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 22,
|
||
"metadata": {},
|
||
"outputs": [
|
||
{
|
||
"data": {
|
||
"text/plain": [
|
||
"[('😂', 10182),\n",
|
||
" ('😭', 3893),\n",
|
||
" ('😍', 2866),\n",
|
||
" ('😩', 1647),\n",
|
||
" ('😊', 1450),\n",
|
||
" ('😘', 1151),\n",
|
||
" ('🙏', 1089),\n",
|
||
" ('🙌', 1003),\n",
|
||
" ('😉', 752),\n",
|
||
" ('😁', 697),\n",
|
||
" ('😅', 651),\n",
|
||
" ('😎', 606),\n",
|
||
" ('😢', 544),\n",
|
||
" ('😒', 539),\n",
|
||
" ('😏', 478),\n",
|
||
" ('😌', 434),\n",
|
||
" ('😔', 415),\n",
|
||
" ('😋', 397),\n",
|
||
" ('😀', 392),\n",
|
||
" ('😤', 368)]"
|
||
]
|
||
},
|
||
"metadata": {},
|
||
"output_type": "display_data"
|
||
},
|
||
{
|
||
"data": {
|
||
"text/plain": [
|
||
"['😂',\n",
|
||
" '😭',\n",
|
||
" '😍',\n",
|
||
" '😩',\n",
|
||
" '😊',\n",
|
||
" '😘',\n",
|
||
" '🙏',\n",
|
||
" '🙌',\n",
|
||
" '😉',\n",
|
||
" '😁',\n",
|
||
" '😅',\n",
|
||
" '😎',\n",
|
||
" '😢',\n",
|
||
" '😒',\n",
|
||
" '😏',\n",
|
||
" '😌',\n",
|
||
" '😔',\n",
|
||
" '😋',\n",
|
||
" '😀',\n",
|
||
" '😤']"
|
||
]
|
||
},
|
||
"metadata": {},
|
||
"output_type": "display_data"
|
||
}
|
||
],
|
||
"source": [
|
||
"sorted_emoji_count = list(reversed(sorted(emoji_count.items(), key=operator.itemgetter(1))))\n",
|
||
"display(sorted_emoji_count[:20])\n",
|
||
"\n",
|
||
"top_emojis = [x[0] for x in sorted_emoji_count[:20]]\n",
|
||
"display(top_emojis)"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {},
|
||
"source": [
|
||
"* only learn the most used ones:"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 23,
|
||
"metadata": {},
|
||
"outputs": [
|
||
{
|
||
"name": "stdout",
|
||
"output_type": "stream",
|
||
"text": [
|
||
"26197 26197 26197\n"
|
||
]
|
||
}
|
||
],
|
||
"source": [
|
||
"in_top = [sentiment_vector_to_emoji(x) in top_emojis for x in labels]\n",
|
||
"labels = labels[in_top]\n",
|
||
"plain_text = plain_text[in_top]\n",
|
||
"emojis = emojis[in_top]\n",
|
||
"print(len(labels), len(emojis), len(plain_text))"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {},
|
||
"source": [
|
||
"* generating train and test set:"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 24,
|
||
"metadata": {},
|
||
"outputs": [],
|
||
"source": [
|
||
"X1, Xt1, y1, yt1 = train_test_split(plain_text, labels, test_size=0.1, random_state=4222)"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 25,
|
||
"metadata": {},
|
||
"outputs": [],
|
||
"source": [
|
||
"#y1_weights = np.array([(sum([emoji_weights[e] for e in e_list]) / len(e_list)) if len(e_list) > 0 else 0 for e_list in sent2emoji(y1)])"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 26,
|
||
"metadata": {},
|
||
"outputs": [],
|
||
"source": [
|
||
"vectorizer = TfidfVectorizer(stop_words='english')\n",
|
||
"vec_train = vectorizer.fit_transform(X1)\n",
|
||
"vec_test = vectorizer.transform(Xt1)"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {},
|
||
"source": [
|
||
"* train. this can take a very long time..."
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 27,
|
||
"metadata": {},
|
||
"outputs": [
|
||
{
|
||
"name": "stderr",
|
||
"output_type": "stream",
|
||
"text": [
|
||
"Using TensorFlow backend.\n"
|
||
]
|
||
}
|
||
],
|
||
"source": [
|
||
"from sklearn.neural_network import MLPClassifier as MLP\n",
|
||
"from sklearn.multiclass import OneVsRestClassifier as OVRC\n",
|
||
"from sklearn.tree import DecisionTreeClassifier as DTC\n",
|
||
"\n",
|
||
"from keras.models import Sequential\n",
|
||
"from keras.layers import Dense"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 28,
|
||
"metadata": {},
|
||
"outputs": [],
|
||
"source": [
|
||
"def train(max_size = 10000, layers=[(1024, 'relu'),(y1[0].shape[0],'softmax')], random_state=4222, ovrc=False, n_iter=5):\n",
|
||
" \n",
|
||
" model = Sequential()\n",
|
||
" \n",
|
||
" # build mlp layers:\n",
|
||
" keras_layers = []\n",
|
||
" first_layer = True\n",
|
||
" for layer in layers:\n",
|
||
" if first_layer:\n",
|
||
" model.add(Dense(units=layer[0], activation=layer[1], input_dim=vectorizer.transform([\" \"])[0]._shape[1]))\n",
|
||
" first_layer = False\n",
|
||
" else:\n",
|
||
" model.add(Dense(units=layer[0], activation=layer[1]))\n",
|
||
" \n",
|
||
" #mlp = MLPClassifier(layers=sknn_layers, random_state=random_state, verbose=True, n_iter=n_iter, batch_size=100)\n",
|
||
" \n",
|
||
" model.compile(loss='mean_squared_error',\n",
|
||
" optimizer='adam')\n",
|
||
" \n",
|
||
" clf = OVRC(model) if ovrc else model\n",
|
||
"\n",
|
||
" clf.fit(vec_train[:max_size].A, y1[:max_size], validation_split=0.2, epochs=n_iter)#, sample_weight=y1_weights[:max_size])\n",
|
||
" \n",
|
||
" return clf"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 29,
|
||
"metadata": {},
|
||
"outputs": [
|
||
{
|
||
"name": "stdout",
|
||
"output_type": "stream",
|
||
"text": [
|
||
"Train on 18861 samples, validate on 4716 samples\n",
|
||
"Epoch 1/3\n",
|
||
"18861/18861 [==============================] - 1106s 59ms/step - loss: 0.0185 - val_loss: 0.0152\n",
|
||
"Epoch 2/3\n",
|
||
"18861/18861 [==============================] - 1104s 59ms/step - loss: 0.0107 - val_loss: 0.0163\n",
|
||
"Epoch 3/3\n",
|
||
"18861/18861 [==============================] - 1106s 59ms/step - loss: 0.0065 - val_loss: 0.0166\n"
|
||
]
|
||
}
|
||
],
|
||
"source": [
|
||
"clf = train(max_size=100000,layers=[(10000, 'relu'),(5000, 'relu'),(2500, 'relu'),(y1[0].shape[0],None)], n_iter=3)"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {},
|
||
"source": [
|
||
"* make a prediction and store it in a csv file:"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 30,
|
||
"metadata": {},
|
||
"outputs": [],
|
||
"source": [
|
||
"pred = clf.predict(vectorizer.transform(Xt1))"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 31,
|
||
"metadata": {},
|
||
"outputs": [
|
||
{
|
||
"name": "stdout",
|
||
"output_type": "stream",
|
||
"text": [
|
||
"0.011668838\n",
|
||
"0.03387511671001757\n"
|
||
]
|
||
}
|
||
],
|
||
"source": [
|
||
"print(np.linalg.norm(np.var(pred, axis=0)))\n",
|
||
"print(np.linalg.norm(np.var(labels, axis=0)))\n"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 32,
|
||
"metadata": {},
|
||
"outputs": [],
|
||
"source": [
|
||
"# build a dataframe to visualize test results:\n",
|
||
"testlist = pd.DataFrame({'text': Xt1, \n",
|
||
" 'teacher': sent2emoji(yt1),\n",
|
||
" 'teacher_sentiment': yt1.tolist(),\n",
|
||
" 'predict': sent2emoji(pred, custom_target_emojis=top_emojis),\n",
|
||
" 'predicted_sentiment': pred.tolist()})"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 33,
|
||
"metadata": {},
|
||
"outputs": [
|
||
{
|
||
"data": {
|
||
"text/html": [
|
||
"<div>\n",
|
||
"<style scoped>\n",
|
||
" .dataframe tbody tr th:only-of-type {\n",
|
||
" vertical-align: middle;\n",
|
||
" }\n",
|
||
"\n",
|
||
" .dataframe tbody tr th {\n",
|
||
" vertical-align: top;\n",
|
||
" }\n",
|
||
"\n",
|
||
" .dataframe thead th {\n",
|
||
" text-align: right;\n",
|
||
" }\n",
|
||
"</style>\n",
|
||
"<table border=\"1\" class=\"dataframe\">\n",
|
||
" <thead>\n",
|
||
" <tr style=\"text-align: right;\">\n",
|
||
" <th></th>\n",
|
||
" <th>text</th>\n",
|
||
" <th>teacher</th>\n",
|
||
" <th>teacher_sentiment</th>\n",
|
||
" <th>predict</th>\n",
|
||
" <th>predicted_sentiment</th>\n",
|
||
" </tr>\n",
|
||
" </thead>\n",
|
||
" <tbody>\n",
|
||
" <tr>\n",
|
||
" <th>35671</th>\n",
|
||
" <td>i feel like i care so much more in everi situat</td>\n",
|
||
" <td>😂</td>\n",
|
||
" <td>[0.46813021474490496, 0.24716181096977158, 0.2...</td>\n",
|
||
" <td>😅</td>\n",
|
||
" <td>[0.4447824954986572, 0.30056363344192505, 0.27...</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>25683</th>\n",
|
||
" <td>i did not meat to add that 2 there ... hav see...</td>\n",
|
||
" <td>😂</td>\n",
|
||
" <td>[0.46813021474490496, 0.24716181096977158, 0.2...</td>\n",
|
||
" <td>😁</td>\n",
|
||
" <td>[0.5660845637321472, 0.1737498641014099, 0.284...</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>8985</th>\n",
|
||
" <td>never…</td>\n",
|
||
" <td>😊</td>\n",
|
||
" <td>[0.7040175768989329, 0.059322033898305086, 0.2...</td>\n",
|
||
" <td>😂</td>\n",
|
||
" <td>[0.4871470034122467, 0.26607102155685425, 0.27...</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>5410</th>\n",
|
||
" <td>lmao on me ! ! ! wtf wa he suppos to say</td>\n",
|
||
" <td>😂</td>\n",
|
||
" <td>[0.46813021474490496, 0.24716181096977158, 0.2...</td>\n",
|
||
" <td>😢</td>\n",
|
||
" <td>[0.4061833620071411, 0.3226468861103058, 0.273...</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>62611</th>\n",
|
||
" <td>this dude alway help me get through my school ...</td>\n",
|
||
" <td>😊</td>\n",
|
||
" <td>[0.7040175768989329, 0.059322033898305086, 0.2...</td>\n",
|
||
" <td>😂</td>\n",
|
||
" <td>[0.4549962878227234, 0.21886931359767914, 0.33...</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>48197</th>\n",
|
||
" <td>happi b'day sir</td>\n",
|
||
" <td>😊</td>\n",
|
||
" <td>[0.7040175768989329, 0.059322033898305086, 0.2...</td>\n",
|
||
" <td>😀</td>\n",
|
||
" <td>[0.6561306715011597, 0.11821962147951126, 0.27...</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>23654</th>\n",
|
||
" <td>you need some good old fashion swedish jesus</td>\n",
|
||
" <td>🙏</td>\n",
|
||
" <td>[0.4983755685510071, 0.08057179987004548, 0.42...</td>\n",
|
||
" <td>😅</td>\n",
|
||
" <td>[0.4600130021572113, 0.28595462441444397, 0.27...</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>58207</th>\n",
|
||
" <td>these late shift are make me not have a social...</td>\n",
|
||
" <td>😅</td>\n",
|
||
" <td>[0.47186147186147187, 0.2922077922077922, 0.23...</td>\n",
|
||
" <td>😂</td>\n",
|
||
" <td>[0.49543458223342896, 0.25571855902671814, 0.2...</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>374</th>\n",
|
||
" <td>dc this weekend</td>\n",
|
||
" <td>😍</td>\n",
|
||
" <td>[0.7296744771190439, 0.05173769460607014, 0.21...</td>\n",
|
||
" <td>😌</td>\n",
|
||
" <td>[0.6040589213371277, 0.15823380649089813, 0.26...</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>26310</th>\n",
|
||
" <td>paul lad you 'll make e blush</td>\n",
|
||
" <td>😊</td>\n",
|
||
" <td>[0.7040175768989329, 0.059322033898305086, 0.2...</td>\n",
|
||
" <td>😂</td>\n",
|
||
" <td>[0.452500581741333, 0.2882971167564392, 0.2790...</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>30892</th>\n",
|
||
" <td>did you have a fun halloween ?</td>\n",
|
||
" <td>😂</td>\n",
|
||
" <td>[0.46813021474490496, 0.24716181096977158, 0.2...</td>\n",
|
||
" <td>😂</td>\n",
|
||
" <td>[0.4644194543361664, 0.2708289325237274, 0.277...</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>11868</th>\n",
|
||
" <td>hi handsom</td>\n",
|
||
" <td>😍</td>\n",
|
||
" <td>[0.7296744771190439, 0.05173769460607014, 0.21...</td>\n",
|
||
" <td>😊</td>\n",
|
||
" <td>[0.7254493236541748, 0.12355809658765793, 0.22...</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>46219</th>\n",
|
||
" <td>i 'm not okay with this , i 'm su snapchat</td>\n",
|
||
" <td>😭</td>\n",
|
||
" <td>[0.34310532030401736, 0.4364820846905538, 0.22...</td>\n",
|
||
" <td>😔</td>\n",
|
||
" <td>[0.3264158070087433, 0.48023173213005066, 0.23...</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>13583</th>\n",
|
||
" <td>my parent be so mad i be buy new stuff & amp ;...</td>\n",
|
||
" <td>😭</td>\n",
|
||
" <td>[0.34310532030401736, 0.4364820846905538, 0.22...</td>\n",
|
||
" <td>😂</td>\n",
|
||
" <td>[0.4271591007709503, 0.29361462593078613, 0.29...</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>43843</th>\n",
|
||
" <td>one of the few song that calm me down esp on f...</td>\n",
|
||
" <td>😂</td>\n",
|
||
" <td>[0.46813021474490496, 0.24716181096977158, 0.2...</td>\n",
|
||
" <td>😂</td>\n",
|
||
" <td>[0.44168680906295776, 0.2790682315826416, 0.29...</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>63589</th>\n",
|
||
" <td>iphon x bouta be the last phone we ever buy</td>\n",
|
||
" <td>😂</td>\n",
|
||
" <td>[0.46813021474490496, 0.24716181096977158, 0.2...</td>\n",
|
||
" <td>😅</td>\n",
|
||
" <td>[0.41863512992858887, 0.3106093108654022, 0.28...</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>53695</th>\n",
|
||
" <td>visit my main man today ❤ i miss u papa</td>\n",
|
||
" <td>😭</td>\n",
|
||
" <td>[0.34310532030401736, 0.4364820846905538, 0.22...</td>\n",
|
||
" <td>😁</td>\n",
|
||
" <td>[0.5650997757911682, 0.19236208498477936, 0.27...</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>67529</th>\n",
|
||
" <td>donut</td>\n",
|
||
" <td>😍</td>\n",
|
||
" <td>[0.7296744771190439, 0.05173769460607014, 0.21...</td>\n",
|
||
" <td>😂</td>\n",
|
||
" <td>[0.45511549711227417, 0.28582143783569336, 0.2...</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>25493</th>\n",
|
||
" <td>ha anyon heard this by ? who the fuck knew he ...</td>\n",
|
||
" <td>😘</td>\n",
|
||
" <td>[0.7546600877192983, 0.05290570175438596, 0.19...</td>\n",
|
||
" <td>😅</td>\n",
|
||
" <td>[0.4276219606399536, 0.30413898825645447, 0.28...</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>19486</th>\n",
|
||
" <td>wow superrbb</td>\n",
|
||
" <td>😍</td>\n",
|
||
" <td>[0.7296744771190439, 0.05173769460607014, 0.21...</td>\n",
|
||
" <td>😊</td>\n",
|
||
" <td>[0.7149834036827087, 0.10459273308515549, 0.24...</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>48449</th>\n",
|
||
" <td>of cours they do n't . their perfect model of ...</td>\n",
|
||
" <td>😂</td>\n",
|
||
" <td>[0.46813021474490496, 0.24716181096977158, 0.2...</td>\n",
|
||
" <td>😂</td>\n",
|
||
" <td>[0.5363025665283203, 0.22163532674312592, 0.27...</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>4504</th>\n",
|
||
" <td>plea pick me . pick me . pick me . please .</td>\n",
|
||
" <td>😍</td>\n",
|
||
" <td>[0.7296744771190439, 0.05173769460607014, 0.21...</td>\n",
|
||
" <td>😂</td>\n",
|
||
" <td>[0.4641677737236023, 0.18824045360088348, 0.37...</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>40285</th>\n",
|
||
" <td>shiid no crack is wack</td>\n",
|
||
" <td>😭</td>\n",
|
||
" <td>[0.34310532030401736, 0.4364820846905538, 0.22...</td>\n",
|
||
" <td>😂</td>\n",
|
||
" <td>[0.44292521476745605, 0.28201037645339966, 0.2...</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>56741</th>\n",
|
||
" <td>pj still sleep like a newborn</td>\n",
|
||
" <td>😩</td>\n",
|
||
" <td>[0.22289823008849557, 0.5912610619469026, 0.18...</td>\n",
|
||
" <td>😢</td>\n",
|
||
" <td>[0.40168094635009766, 0.3777309060096741, 0.24...</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>22948</th>\n",
|
||
" <td>an opinion doe n't mean you make sen first off...</td>\n",
|
||
" <td>😌</td>\n",
|
||
" <td>[0.6240601503759399, 0.13984962406015036, 0.23...</td>\n",
|
||
" <td>😂</td>\n",
|
||
" <td>[0.4365620017051697, 0.2830066680908203, 0.294...</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>68426</th>\n",
|
||
" <td>missyou too</td>\n",
|
||
" <td>😘</td>\n",
|
||
" <td>[0.7546600877192983, 0.05290570175438596, 0.19...</td>\n",
|
||
" <td>😭</td>\n",
|
||
" <td>[0.3452186584472656, 0.4593580365180969, 0.223...</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>13431</th>\n",
|
||
" <td>i swear she did</td>\n",
|
||
" <td>😩</td>\n",
|
||
" <td>[0.22289823008849557, 0.5912610619469026, 0.18...</td>\n",
|
||
" <td>😂</td>\n",
|
||
" <td>[0.4436468482017517, 0.2736954987049103, 0.294...</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>66287</th>\n",
|
||
" <td>it true , he wa the mutt ( big-d ) nut .</td>\n",
|
||
" <td>😉</td>\n",
|
||
" <td>[0.5634451019066403, 0.0992767915844839, 0.337...</td>\n",
|
||
" <td>😅</td>\n",
|
||
" <td>[0.47334975004196167, 0.2881445586681366, 0.26...</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>41980</th>\n",
|
||
" <td>is happen so happi</td>\n",
|
||
" <td>😭</td>\n",
|
||
" <td>[0.34310532030401736, 0.4364820846905538, 0.22...</td>\n",
|
||
" <td>😅</td>\n",
|
||
" <td>[0.4635038673877716, 0.30251604318618774, 0.26...</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>34632</th>\n",
|
||
" <td>saw that the first one said sose you tmmrw and np</td>\n",
|
||
" <td>😂</td>\n",
|
||
" <td>[0.46813021474490496, 0.24716181096977158, 0.2...</td>\n",
|
||
" <td>😅</td>\n",
|
||
" <td>[0.43297499418258667, 0.31000325083732605, 0.2...</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>...</th>\n",
|
||
" <td>...</td>\n",
|
||
" <td>...</td>\n",
|
||
" <td>...</td>\n",
|
||
" <td>...</td>\n",
|
||
" <td>...</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>47218</th>\n",
|
||
" <td>keep it i do n't want it</td>\n",
|
||
" <td>😂</td>\n",
|
||
" <td>[0.46813021474490496, 0.24716181096977158, 0.2...</td>\n",
|
||
" <td>😅</td>\n",
|
||
" <td>[0.470976859331131, 0.2997904419898987, 0.2489...</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>35087</th>\n",
|
||
" <td>6 year ago today we said our final good bye , ...</td>\n",
|
||
" <td>😢</td>\n",
|
||
" <td>[0.39118825100133514, 0.38451268357810414, 0.2...</td>\n",
|
||
" <td>😅</td>\n",
|
||
" <td>[0.4478594660758972, 0.30181050300598145, 0.27...</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>61252</th>\n",
|
||
" <td>amen</td>\n",
|
||
" <td>🙏</td>\n",
|
||
" <td>[0.4983755685510071, 0.08057179987004548, 0.42...</td>\n",
|
||
" <td>😁</td>\n",
|
||
" <td>[0.5783949494361877, 0.15405726432800293, 0.30...</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>39960</th>\n",
|
||
" <td>fact</td>\n",
|
||
" <td>😎</td>\n",
|
||
" <td>[0.5981432360742706, 0.10477453580901856, 0.29...</td>\n",
|
||
" <td>😂</td>\n",
|
||
" <td>[0.4938848316669464, 0.24237176775932312, 0.25...</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>365</th>\n",
|
||
" <td>nw final found someon hi loydi</td>\n",
|
||
" <td>😅</td>\n",
|
||
" <td>[0.47186147186147187, 0.2922077922077922, 0.23...</td>\n",
|
||
" <td>😌</td>\n",
|
||
" <td>[0.6203110218048096, 0.17863908410072327, 0.23...</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>50665</th>\n",
|
||
" <td>i need ur shoulder to cri on</td>\n",
|
||
" <td>😢</td>\n",
|
||
" <td>[0.39118825100133514, 0.38451268357810414, 0.2...</td>\n",
|
||
" <td>😅</td>\n",
|
||
" <td>[0.4619033932685852, 0.2977892756462097, 0.267...</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>21007</th>\n",
|
||
" <td>awww you 'll get me teari eye gurl !</td>\n",
|
||
" <td>😘</td>\n",
|
||
" <td>[0.7546600877192983, 0.05290570175438596, 0.19...</td>\n",
|
||
" <td>😅</td>\n",
|
||
" <td>[0.5029815435409546, 0.2711288630962372, 0.253...</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>18819</th>\n",
|
||
" <td>〰happi hump day to all my ladi is def sweet like</td>\n",
|
||
" <td>😘</td>\n",
|
||
" <td>[0.7546600877192983, 0.05290570175438596, 0.19...</td>\n",
|
||
" <td>🙌</td>\n",
|
||
" <td>[0.6974080801010132, 0.11416944861412048, 0.24...</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>42421</th>\n",
|
||
" <td>thanks .</td>\n",
|
||
" <td>😊</td>\n",
|
||
" <td>[0.7040175768989329, 0.059322033898305086, 0.2...</td>\n",
|
||
" <td>😁</td>\n",
|
||
" <td>[0.5765037536621094, 0.18341206014156342, 0.26...</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>38705</th>\n",
|
||
" <td>it okay i did n't see you until i wa in your face</td>\n",
|
||
" <td>😂</td>\n",
|
||
" <td>[0.46813021474490496, 0.24716181096977158, 0.2...</td>\n",
|
||
" <td>😢</td>\n",
|
||
" <td>[0.4030645787715912, 0.36543628573417664, 0.25...</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>47033</th>\n",
|
||
" <td>fall in with alexissdang ⬅️</td>\n",
|
||
" <td>😍</td>\n",
|
||
" <td>[0.7296744771190439, 0.05173769460607014, 0.21...</td>\n",
|
||
" <td>😌</td>\n",
|
||
" <td>[0.6219494342803955, 0.1795988380908966, 0.226...</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>38742</th>\n",
|
||
" <td>let your wild side free girl ! ! ! ! !</td>\n",
|
||
" <td>😂</td>\n",
|
||
" <td>[0.46813021474490496, 0.24716181096977158, 0.2...</td>\n",
|
||
" <td>😢</td>\n",
|
||
" <td>[0.4049544930458069, 0.35102248191833496, 0.25...</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>67610</th>\n",
|
||
" <td>can we get season 3 already ! ?</td>\n",
|
||
" <td>😭</td>\n",
|
||
" <td>[0.34310532030401736, 0.4364820846905538, 0.22...</td>\n",
|
||
" <td>😢</td>\n",
|
||
" <td>[0.416861355304718, 0.35468244552612305, 0.245...</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>61021</th>\n",
|
||
" <td>have to quot this bih rememb this nigga</td>\n",
|
||
" <td>😂</td>\n",
|
||
" <td>[0.46813021474490496, 0.24716181096977158, 0.2...</td>\n",
|
||
" <td>😂</td>\n",
|
||
" <td>[0.4401417672634125, 0.2810788154602051, 0.292...</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>14904</th>\n",
|
||
" <td>he so damn</td>\n",
|
||
" <td>😩</td>\n",
|
||
" <td>[0.22289823008849557, 0.5912610619469026, 0.18...</td>\n",
|
||
" <td>😂</td>\n",
|
||
" <td>[0.5411785244941711, 0.2148503214120865, 0.246...</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>50994</th>\n",
|
||
" <td>yasss it time for a great show maxloyal™♛ : gi...</td>\n",
|
||
" <td>😂</td>\n",
|
||
" <td>[0.46813021474490496, 0.24716181096977158, 0.2...</td>\n",
|
||
" <td>🙌</td>\n",
|
||
" <td>[0.7073097229003906, 0.12480126321315765, 0.23...</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>6973</th>\n",
|
||
" <td>got to love a fish finger sarni</td>\n",
|
||
" <td>😂</td>\n",
|
||
" <td>[0.46813021474490496, 0.24716181096977158, 0.2...</td>\n",
|
||
" <td>😌</td>\n",
|
||
" <td>[0.6375303864479065, 0.14495858550071716, 0.24...</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>3269</th>\n",
|
||
" <td>say dat</td>\n",
|
||
" <td>😂</td>\n",
|
||
" <td>[0.46813021474490496, 0.24716181096977158, 0.2...</td>\n",
|
||
" <td>😂</td>\n",
|
||
" <td>[0.5052645802497864, 0.21280284225940704, 0.30...</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>54827</th>\n",
|
||
" <td>if ur happi i 'm happi</td>\n",
|
||
" <td>😊</td>\n",
|
||
" <td>[0.7040175768989329, 0.059322033898305086, 0.2...</td>\n",
|
||
" <td>😅</td>\n",
|
||
" <td>[0.5138391852378845, 0.26520460844039917, 0.25...</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>19409</th>\n",
|
||
" <td>rockstar</td>\n",
|
||
" <td>😀</td>\n",
|
||
" <td>[0.6560364464692483, 0.08428246013667426, 0.25...</td>\n",
|
||
" <td>😂</td>\n",
|
||
" <td>[0.520979106426239, 0.22392335534095764, 0.265...</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>38703</th>\n",
|
||
" <td>god fuck me i alreadi fix it onc</td>\n",
|
||
" <td>😂</td>\n",
|
||
" <td>[0.46813021474490496, 0.24716181096977158, 0.2...</td>\n",
|
||
" <td>😔</td>\n",
|
||
" <td>[0.32009249925613403, 0.4819949269294739, 0.22...</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>25133</th>\n",
|
||
" <td>it social accept to listen to ani christma mus...</td>\n",
|
||
" <td>😂</td>\n",
|
||
" <td>[0.46813021474490496, 0.24716181096977158, 0.2...</td>\n",
|
||
" <td>😁</td>\n",
|
||
" <td>[0.5703040361404419, 0.17875489592552185, 0.27...</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>15339</th>\n",
|
||
" <td>i thrash p in imessag now he wan na bet on 2k</td>\n",
|
||
" <td>😂</td>\n",
|
||
" <td>[0.46813021474490496, 0.24716181096977158, 0.2...</td>\n",
|
||
" <td>😅</td>\n",
|
||
" <td>[0.4571400284767151, 0.2949279546737671, 0.269...</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>28082</th>\n",
|
||
" <td>my fatass need some lemon pepper wing w a lil ...</td>\n",
|
||
" <td>😂</td>\n",
|
||
" <td>[0.46813021474490496, 0.24716181096977158, 0.2...</td>\n",
|
||
" <td>😭</td>\n",
|
||
" <td>[0.36291953921318054, 0.4310339391231537, 0.22...</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>44462</th>\n",
|
||
" <td>fck off tristan yes soph collect u in 15 n we ...</td>\n",
|
||
" <td>😤</td>\n",
|
||
" <td>[0.2691131498470948, 0.4801223241590214, 0.250...</td>\n",
|
||
" <td>😌</td>\n",
|
||
" <td>[0.6122021675109863, 0.15579015016555786, 0.27...</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>60212</th>\n",
|
||
" <td>open the bag</td>\n",
|
||
" <td>😋</td>\n",
|
||
" <td>[0.6784741144414169, 0.04495912806539509, 0.27...</td>\n",
|
||
" <td>😂</td>\n",
|
||
" <td>[0.470508873462677, 0.2711063623428345, 0.2810...</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>34950</th>\n",
|
||
" <td>i ask peopl to guess my zodiac sign and this o...</td>\n",
|
||
" <td>😤</td>\n",
|
||
" <td>[0.2691131498470948, 0.4801223241590214, 0.250...</td>\n",
|
||
" <td>😂</td>\n",
|
||
" <td>[0.4431285262107849, 0.27477312088012695, 0.29...</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>59462</th>\n",
|
||
" <td>i regret this sm</td>\n",
|
||
" <td>😂</td>\n",
|
||
" <td>[0.46813021474490496, 0.24716181096977158, 0.2...</td>\n",
|
||
" <td>😢</td>\n",
|
||
" <td>[0.40375131368637085, 0.3289208710193634, 0.27...</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>19465</th>\n",
|
||
" <td>fuck colleg</td>\n",
|
||
" <td>😂</td>\n",
|
||
" <td>[0.46813021474490496, 0.24716181096977158, 0.2...</td>\n",
|
||
" <td>😅</td>\n",
|
||
" <td>[0.4494284391403198, 0.31076472997665405, 0.26...</td>\n",
|
||
" </tr>\n",
|
||
" <tr>\n",
|
||
" <th>8135</th>\n",
|
||
" <td>aye this wa my first time actual see it wendi ...</td>\n",
|
||
" <td>😂</td>\n",
|
||
" <td>[0.46813021474490496, 0.24716181096977158, 0.2...</td>\n",
|
||
" <td>😅</td>\n",
|
||
" <td>[0.42708122730255127, 0.31392431259155273, 0.2...</td>\n",
|
||
" </tr>\n",
|
||
" </tbody>\n",
|
||
"</table>\n",
|
||
"<p>2620 rows × 5 columns</p>\n",
|
||
"</div>"
|
||
],
|
||
"text/plain": [
|
||
" text teacher \\\n",
|
||
"35671 i feel like i care so much more in everi situat 😂 \n",
|
||
"25683 i did not meat to add that 2 there ... hav see... 😂 \n",
|
||
"8985 never… 😊 \n",
|
||
"5410 lmao on me ! ! ! wtf wa he suppos to say 😂 \n",
|
||
"62611 this dude alway help me get through my school ... 😊 \n",
|
||
"48197 happi b'day sir 😊 \n",
|
||
"23654 you need some good old fashion swedish jesus 🙏 \n",
|
||
"58207 these late shift are make me not have a social... 😅 \n",
|
||
"374 dc this weekend 😍 \n",
|
||
"26310 paul lad you 'll make e blush 😊 \n",
|
||
"30892 did you have a fun halloween ? 😂 \n",
|
||
"11868 hi handsom 😍 \n",
|
||
"46219 i 'm not okay with this , i 'm su snapchat 😭 \n",
|
||
"13583 my parent be so mad i be buy new stuff & amp ;... 😭 \n",
|
||
"43843 one of the few song that calm me down esp on f... 😂 \n",
|
||
"63589 iphon x bouta be the last phone we ever buy 😂 \n",
|
||
"53695 visit my main man today ❤ i miss u papa 😭 \n",
|
||
"67529 donut 😍 \n",
|
||
"25493 ha anyon heard this by ? who the fuck knew he ... 😘 \n",
|
||
"19486 wow superrbb 😍 \n",
|
||
"48449 of cours they do n't . their perfect model of ... 😂 \n",
|
||
"4504 plea pick me . pick me . pick me . please . 😍 \n",
|
||
"40285 shiid no crack is wack 😭 \n",
|
||
"56741 pj still sleep like a newborn 😩 \n",
|
||
"22948 an opinion doe n't mean you make sen first off... 😌 \n",
|
||
"68426 missyou too 😘 \n",
|
||
"13431 i swear she did 😩 \n",
|
||
"66287 it true , he wa the mutt ( big-d ) nut . 😉 \n",
|
||
"41980 is happen so happi 😭 \n",
|
||
"34632 saw that the first one said sose you tmmrw and np 😂 \n",
|
||
"... ... ... \n",
|
||
"47218 keep it i do n't want it 😂 \n",
|
||
"35087 6 year ago today we said our final good bye , ... 😢 \n",
|
||
"61252 amen 🙏 \n",
|
||
"39960 fact 😎 \n",
|
||
"365 nw final found someon hi loydi 😅 \n",
|
||
"50665 i need ur shoulder to cri on 😢 \n",
|
||
"21007 awww you 'll get me teari eye gurl ! 😘 \n",
|
||
"18819 〰happi hump day to all my ladi is def sweet like 😘 \n",
|
||
"42421 thanks . 😊 \n",
|
||
"38705 it okay i did n't see you until i wa in your face 😂 \n",
|
||
"47033 fall in with alexissdang ⬅️ 😍 \n",
|
||
"38742 let your wild side free girl ! ! ! ! ! 😂 \n",
|
||
"67610 can we get season 3 already ! ? 😭 \n",
|
||
"61021 have to quot this bih rememb this nigga 😂 \n",
|
||
"14904 he so damn 😩 \n",
|
||
"50994 yasss it time for a great show maxloyal™♛ : gi... 😂 \n",
|
||
"6973 got to love a fish finger sarni 😂 \n",
|
||
"3269 say dat 😂 \n",
|
||
"54827 if ur happi i 'm happi 😊 \n",
|
||
"19409 rockstar 😀 \n",
|
||
"38703 god fuck me i alreadi fix it onc 😂 \n",
|
||
"25133 it social accept to listen to ani christma mus... 😂 \n",
|
||
"15339 i thrash p in imessag now he wan na bet on 2k 😂 \n",
|
||
"28082 my fatass need some lemon pepper wing w a lil ... 😂 \n",
|
||
"44462 fck off tristan yes soph collect u in 15 n we ... 😤 \n",
|
||
"60212 open the bag 😋 \n",
|
||
"34950 i ask peopl to guess my zodiac sign and this o... 😤 \n",
|
||
"59462 i regret this sm 😂 \n",
|
||
"19465 fuck colleg 😂 \n",
|
||
"8135 aye this wa my first time actual see it wendi ... 😂 \n",
|
||
"\n",
|
||
" teacher_sentiment predict \\\n",
|
||
"35671 [0.46813021474490496, 0.24716181096977158, 0.2... 😅 \n",
|
||
"25683 [0.46813021474490496, 0.24716181096977158, 0.2... 😁 \n",
|
||
"8985 [0.7040175768989329, 0.059322033898305086, 0.2... 😂 \n",
|
||
"5410 [0.46813021474490496, 0.24716181096977158, 0.2... 😢 \n",
|
||
"62611 [0.7040175768989329, 0.059322033898305086, 0.2... 😂 \n",
|
||
"48197 [0.7040175768989329, 0.059322033898305086, 0.2... 😀 \n",
|
||
"23654 [0.4983755685510071, 0.08057179987004548, 0.42... 😅 \n",
|
||
"58207 [0.47186147186147187, 0.2922077922077922, 0.23... 😂 \n",
|
||
"374 [0.7296744771190439, 0.05173769460607014, 0.21... 😌 \n",
|
||
"26310 [0.7040175768989329, 0.059322033898305086, 0.2... 😂 \n",
|
||
"30892 [0.46813021474490496, 0.24716181096977158, 0.2... 😂 \n",
|
||
"11868 [0.7296744771190439, 0.05173769460607014, 0.21... 😊 \n",
|
||
"46219 [0.34310532030401736, 0.4364820846905538, 0.22... 😔 \n",
|
||
"13583 [0.34310532030401736, 0.4364820846905538, 0.22... 😂 \n",
|
||
"43843 [0.46813021474490496, 0.24716181096977158, 0.2... 😂 \n",
|
||
"63589 [0.46813021474490496, 0.24716181096977158, 0.2... 😅 \n",
|
||
"53695 [0.34310532030401736, 0.4364820846905538, 0.22... 😁 \n",
|
||
"67529 [0.7296744771190439, 0.05173769460607014, 0.21... 😂 \n",
|
||
"25493 [0.7546600877192983, 0.05290570175438596, 0.19... 😅 \n",
|
||
"19486 [0.7296744771190439, 0.05173769460607014, 0.21... 😊 \n",
|
||
"48449 [0.46813021474490496, 0.24716181096977158, 0.2... 😂 \n",
|
||
"4504 [0.7296744771190439, 0.05173769460607014, 0.21... 😂 \n",
|
||
"40285 [0.34310532030401736, 0.4364820846905538, 0.22... 😂 \n",
|
||
"56741 [0.22289823008849557, 0.5912610619469026, 0.18... 😢 \n",
|
||
"22948 [0.6240601503759399, 0.13984962406015036, 0.23... 😂 \n",
|
||
"68426 [0.7546600877192983, 0.05290570175438596, 0.19... 😭 \n",
|
||
"13431 [0.22289823008849557, 0.5912610619469026, 0.18... 😂 \n",
|
||
"66287 [0.5634451019066403, 0.0992767915844839, 0.337... 😅 \n",
|
||
"41980 [0.34310532030401736, 0.4364820846905538, 0.22... 😅 \n",
|
||
"34632 [0.46813021474490496, 0.24716181096977158, 0.2... 😅 \n",
|
||
"... ... ... \n",
|
||
"47218 [0.46813021474490496, 0.24716181096977158, 0.2... 😅 \n",
|
||
"35087 [0.39118825100133514, 0.38451268357810414, 0.2... 😅 \n",
|
||
"61252 [0.4983755685510071, 0.08057179987004548, 0.42... 😁 \n",
|
||
"39960 [0.5981432360742706, 0.10477453580901856, 0.29... 😂 \n",
|
||
"365 [0.47186147186147187, 0.2922077922077922, 0.23... 😌 \n",
|
||
"50665 [0.39118825100133514, 0.38451268357810414, 0.2... 😅 \n",
|
||
"21007 [0.7546600877192983, 0.05290570175438596, 0.19... 😅 \n",
|
||
"18819 [0.7546600877192983, 0.05290570175438596, 0.19... 🙌 \n",
|
||
"42421 [0.7040175768989329, 0.059322033898305086, 0.2... 😁 \n",
|
||
"38705 [0.46813021474490496, 0.24716181096977158, 0.2... 😢 \n",
|
||
"47033 [0.7296744771190439, 0.05173769460607014, 0.21... 😌 \n",
|
||
"38742 [0.46813021474490496, 0.24716181096977158, 0.2... 😢 \n",
|
||
"67610 [0.34310532030401736, 0.4364820846905538, 0.22... 😢 \n",
|
||
"61021 [0.46813021474490496, 0.24716181096977158, 0.2... 😂 \n",
|
||
"14904 [0.22289823008849557, 0.5912610619469026, 0.18... 😂 \n",
|
||
"50994 [0.46813021474490496, 0.24716181096977158, 0.2... 🙌 \n",
|
||
"6973 [0.46813021474490496, 0.24716181096977158, 0.2... 😌 \n",
|
||
"3269 [0.46813021474490496, 0.24716181096977158, 0.2... 😂 \n",
|
||
"54827 [0.7040175768989329, 0.059322033898305086, 0.2... 😅 \n",
|
||
"19409 [0.6560364464692483, 0.08428246013667426, 0.25... 😂 \n",
|
||
"38703 [0.46813021474490496, 0.24716181096977158, 0.2... 😔 \n",
|
||
"25133 [0.46813021474490496, 0.24716181096977158, 0.2... 😁 \n",
|
||
"15339 [0.46813021474490496, 0.24716181096977158, 0.2... 😅 \n",
|
||
"28082 [0.46813021474490496, 0.24716181096977158, 0.2... 😭 \n",
|
||
"44462 [0.2691131498470948, 0.4801223241590214, 0.250... 😌 \n",
|
||
"60212 [0.6784741144414169, 0.04495912806539509, 0.27... 😂 \n",
|
||
"34950 [0.2691131498470948, 0.4801223241590214, 0.250... 😂 \n",
|
||
"59462 [0.46813021474490496, 0.24716181096977158, 0.2... 😢 \n",
|
||
"19465 [0.46813021474490496, 0.24716181096977158, 0.2... 😅 \n",
|
||
"8135 [0.46813021474490496, 0.24716181096977158, 0.2... 😅 \n",
|
||
"\n",
|
||
" predicted_sentiment \n",
|
||
"35671 [0.4447824954986572, 0.30056363344192505, 0.27... \n",
|
||
"25683 [0.5660845637321472, 0.1737498641014099, 0.284... \n",
|
||
"8985 [0.4871470034122467, 0.26607102155685425, 0.27... \n",
|
||
"5410 [0.4061833620071411, 0.3226468861103058, 0.273... \n",
|
||
"62611 [0.4549962878227234, 0.21886931359767914, 0.33... \n",
|
||
"48197 [0.6561306715011597, 0.11821962147951126, 0.27... \n",
|
||
"23654 [0.4600130021572113, 0.28595462441444397, 0.27... \n",
|
||
"58207 [0.49543458223342896, 0.25571855902671814, 0.2... \n",
|
||
"374 [0.6040589213371277, 0.15823380649089813, 0.26... \n",
|
||
"26310 [0.452500581741333, 0.2882971167564392, 0.2790... \n",
|
||
"30892 [0.4644194543361664, 0.2708289325237274, 0.277... \n",
|
||
"11868 [0.7254493236541748, 0.12355809658765793, 0.22... \n",
|
||
"46219 [0.3264158070087433, 0.48023173213005066, 0.23... \n",
|
||
"13583 [0.4271591007709503, 0.29361462593078613, 0.29... \n",
|
||
"43843 [0.44168680906295776, 0.2790682315826416, 0.29... \n",
|
||
"63589 [0.41863512992858887, 0.3106093108654022, 0.28... \n",
|
||
"53695 [0.5650997757911682, 0.19236208498477936, 0.27... \n",
|
||
"67529 [0.45511549711227417, 0.28582143783569336, 0.2... \n",
|
||
"25493 [0.4276219606399536, 0.30413898825645447, 0.28... \n",
|
||
"19486 [0.7149834036827087, 0.10459273308515549, 0.24... \n",
|
||
"48449 [0.5363025665283203, 0.22163532674312592, 0.27... \n",
|
||
"4504 [0.4641677737236023, 0.18824045360088348, 0.37... \n",
|
||
"40285 [0.44292521476745605, 0.28201037645339966, 0.2... \n",
|
||
"56741 [0.40168094635009766, 0.3777309060096741, 0.24... \n",
|
||
"22948 [0.4365620017051697, 0.2830066680908203, 0.294... \n",
|
||
"68426 [0.3452186584472656, 0.4593580365180969, 0.223... \n",
|
||
"13431 [0.4436468482017517, 0.2736954987049103, 0.294... \n",
|
||
"66287 [0.47334975004196167, 0.2881445586681366, 0.26... \n",
|
||
"41980 [0.4635038673877716, 0.30251604318618774, 0.26... \n",
|
||
"34632 [0.43297499418258667, 0.31000325083732605, 0.2... \n",
|
||
"... ... \n",
|
||
"47218 [0.470976859331131, 0.2997904419898987, 0.2489... \n",
|
||
"35087 [0.4478594660758972, 0.30181050300598145, 0.27... \n",
|
||
"61252 [0.5783949494361877, 0.15405726432800293, 0.30... \n",
|
||
"39960 [0.4938848316669464, 0.24237176775932312, 0.25... \n",
|
||
"365 [0.6203110218048096, 0.17863908410072327, 0.23... \n",
|
||
"50665 [0.4619033932685852, 0.2977892756462097, 0.267... \n",
|
||
"21007 [0.5029815435409546, 0.2711288630962372, 0.253... \n",
|
||
"18819 [0.6974080801010132, 0.11416944861412048, 0.24... \n",
|
||
"42421 [0.5765037536621094, 0.18341206014156342, 0.26... \n",
|
||
"38705 [0.4030645787715912, 0.36543628573417664, 0.25... \n",
|
||
"47033 [0.6219494342803955, 0.1795988380908966, 0.226... \n",
|
||
"38742 [0.4049544930458069, 0.35102248191833496, 0.25... \n",
|
||
"67610 [0.416861355304718, 0.35468244552612305, 0.245... \n",
|
||
"61021 [0.4401417672634125, 0.2810788154602051, 0.292... \n",
|
||
"14904 [0.5411785244941711, 0.2148503214120865, 0.246... \n",
|
||
"50994 [0.7073097229003906, 0.12480126321315765, 0.23... \n",
|
||
"6973 [0.6375303864479065, 0.14495858550071716, 0.24... \n",
|
||
"3269 [0.5052645802497864, 0.21280284225940704, 0.30... \n",
|
||
"54827 [0.5138391852378845, 0.26520460844039917, 0.25... \n",
|
||
"19409 [0.520979106426239, 0.22392335534095764, 0.265... \n",
|
||
"38703 [0.32009249925613403, 0.4819949269294739, 0.22... \n",
|
||
"25133 [0.5703040361404419, 0.17875489592552185, 0.27... \n",
|
||
"15339 [0.4571400284767151, 0.2949279546737671, 0.269... \n",
|
||
"28082 [0.36291953921318054, 0.4310339391231537, 0.22... \n",
|
||
"44462 [0.6122021675109863, 0.15579015016555786, 0.27... \n",
|
||
"60212 [0.470508873462677, 0.2711063623428345, 0.2810... \n",
|
||
"34950 [0.4431285262107849, 0.27477312088012695, 0.29... \n",
|
||
"59462 [0.40375131368637085, 0.3289208710193634, 0.27... \n",
|
||
"19465 [0.4494284391403198, 0.31076472997665405, 0.26... \n",
|
||
"8135 [0.42708122730255127, 0.31392431259155273, 0.2... \n",
|
||
"\n",
|
||
"[2620 rows x 5 columns]"
|
||
]
|
||
},
|
||
"metadata": {},
|
||
"output_type": "display_data"
|
||
}
|
||
],
|
||
"source": [
|
||
"display(testlist)\n"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {},
|
||
"source": [
|
||
"* exactly correct labeled sentences:"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 34,
|
||
"metadata": {},
|
||
"outputs": [
|
||
{
|
||
"name": "stdout",
|
||
"output_type": "stream",
|
||
"text": [
|
||
"0.1851145038167939\n"
|
||
]
|
||
}
|
||
],
|
||
"source": [
|
||
"print (sum([1 if sample[1]['teacher'] == sample[1]['predict'] else 0 for sample in testlist.iterrows()]) / testlist.shape[0])"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {},
|
||
"source": [
|
||
"* mean squared error:"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 35,
|
||
"metadata": {},
|
||
"outputs": [
|
||
{
|
||
"data": {
|
||
"text/plain": [
|
||
"array([0.02218282, 0.02594105, 0.00323429])"
|
||
]
|
||
},
|
||
"metadata": {},
|
||
"output_type": "display_data"
|
||
}
|
||
],
|
||
"source": [
|
||
"teacher_sentiments = np.array([sample[1]['teacher_sentiment'] for sample in testlist.iterrows()])\n",
|
||
"predicted_sentiments = np.array([sample[1]['predicted_sentiment'] for sample in testlist.iterrows()])\n",
|
||
"\n",
|
||
"mean_squared_error = ((teacher_sentiments - predicted_sentiments)**2).mean(axis=0)\n",
|
||
"display(mean_squared_error)"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {},
|
||
"source": [
|
||
"* by an overall variance of:"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 36,
|
||
"metadata": {},
|
||
"outputs": [
|
||
{
|
||
"name": "stdout",
|
||
"output_type": "stream",
|
||
"text": [
|
||
"Variance teacher: [0.02183094 0.02513847 0.00285735]\n",
|
||
"Variance prediction: [0.00850173 0.00793481 0.00095984]\n"
|
||
]
|
||
}
|
||
],
|
||
"source": [
|
||
"print(\"Variance teacher: \", np.var(teacher_sentiments, axis=0))\n",
|
||
"print(\"Variance prediction: \", np.var(predicted_sentiments, axis=0))"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 37,
|
||
"metadata": {},
|
||
"outputs": [],
|
||
"source": [
|
||
"testlist.to_csv('test.csv')"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {},
|
||
"source": [
|
||
"* save classifier:"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 38,
|
||
"metadata": {},
|
||
"outputs": [],
|
||
"source": [
|
||
"import pickle\n",
|
||
"clf.save(\"clf2.keras\")\n",
|
||
"pickle.dump( vectorizer, open( \"vec2.pickle\", \"wb\" ) )"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {},
|
||
"source": [
|
||
"----\n",
|
||
"## testing area\n",
|
||
"\n",
|
||
"**for just testing, start from here!**\n",
|
||
"\n",
|
||
"download the dumped classifier and vectorizer from [here](https://the-cake-is-a-lie.net/nextcloud/index.php/s/NjMXamfwQsyrefG)"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {},
|
||
"source": [
|
||
"* loading classifier and vectorizer"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 14,
|
||
"metadata": {},
|
||
"outputs": [],
|
||
"source": [
|
||
"from IPython.display import clear_output, Markdown, Math\n",
|
||
"import ipywidgets as widgets\n",
|
||
"import sys\n",
|
||
"sys.path.append(\"..\")\n",
|
||
"\n",
|
||
"from Tools.Emoji_Distance import sentiment_vector_to_emoji\n",
|
||
"from Tools.Emoji_Distance import emoji_to_sentiment_vector\n",
|
||
"\n",
|
||
"def emoji2sent(emoji_arr):\n",
|
||
" return np.array([emoji_to_sentiment_vector(e) for e in emoji_arr])\n",
|
||
"\n",
|
||
"def sent2emoji(sent_arr, custom_target_emojis=None):\n",
|
||
" return [sentiment_vector_to_emoji(s, custom_target_emojis=custom_target_emojis) for s in sent_arr]"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": null,
|
||
"metadata": {},
|
||
"outputs": [],
|
||
"source": [
|
||
"import keras\n",
|
||
"import pickle\n",
|
||
"clf = keras.models.load_model(\"clf.keras\")\n",
|
||
"vectorizer = pickle.load( open( \"vec.pickle\", \"rb\" ) )"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {},
|
||
"source": [
|
||
"* define lookup emojis here:"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 6,
|
||
"metadata": {},
|
||
"outputs": [],
|
||
"source": [
|
||
"lookup_emojis = [#'😂',\n",
|
||
" '😭',\n",
|
||
" '😍',\n",
|
||
" '😩',\n",
|
||
" '😊',\n",
|
||
" '😘',\n",
|
||
" '🙏',\n",
|
||
" '🙌',\n",
|
||
" '😉',\n",
|
||
" '😁',\n",
|
||
" '😅',\n",
|
||
" '😎',\n",
|
||
" '😢',\n",
|
||
" '😒',\n",
|
||
" '😏',\n",
|
||
" '😌',\n",
|
||
" '😔',\n",
|
||
" '😋',\n",
|
||
" '😀',\n",
|
||
" '😤']"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {},
|
||
"source": [
|
||
"* a simple output widget for testing:"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 19,
|
||
"metadata": {},
|
||
"outputs": [
|
||
{
|
||
"data": {
|
||
"application/vnd.jupyter.widget-view+json": {
|
||
"model_id": "755aa31644db4628a3be1ff3b621fa28",
|
||
"version_major": 2,
|
||
"version_minor": 0
|
||
},
|
||
"text/plain": [
|
||
"Text(value='')"
|
||
]
|
||
},
|
||
"metadata": {},
|
||
"output_type": "display_data"
|
||
},
|
||
{
|
||
"data": {
|
||
"application/vnd.jupyter.widget-view+json": {
|
||
"model_id": "4601eccc07074e71983c9005d1d329b1",
|
||
"version_major": 2,
|
||
"version_minor": 0
|
||
},
|
||
"text/plain": [
|
||
"VBox(children=(Button(description='get emoji', icon='check', style=ButtonStyle(), tooltip='Click me'), Output(…"
|
||
]
|
||
},
|
||
"metadata": {},
|
||
"output_type": "display_data"
|
||
}
|
||
],
|
||
"source": [
|
||
"out = widgets.Output()\n",
|
||
"\n",
|
||
"t = widgets.Text()\n",
|
||
"b = widgets.Button(\n",
|
||
" description='get emoji',\n",
|
||
" disabled=False,\n",
|
||
" button_style='', # 'success', 'info', 'warning', 'danger' or ''\n",
|
||
" tooltip='Click me',\n",
|
||
" icon='check'\n",
|
||
")\n",
|
||
"\n",
|
||
"\n",
|
||
"\n",
|
||
"def handle_submit(sender):\n",
|
||
" with out:\n",
|
||
" clear_output()\n",
|
||
" with out:\n",
|
||
" pred = clf.predict(vectorizer.transform([t.value]))\n",
|
||
" \n",
|
||
" display(Markdown(\"# Predicted Emoji \" + str(sent2emoji(pred, lookup_emojis)[0])))\n",
|
||
" display(Markdown(\"# Sentiment Vector: $$ \\pmatrix{\" + str(pred[0,0]) +\n",
|
||
" \"\\\\\\\\\" + str(pred[0,1]) + \"\\\\\\\\\" + str(pred[0,2]) + \"}$$\"))\n",
|
||
"\n",
|
||
"b.on_click(handle_submit)\n",
|
||
" \n",
|
||
"display(t)\n",
|
||
"display(widgets.VBox([b, out])) "
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": 5,
|
||
"metadata": {},
|
||
"outputs": [
|
||
{
|
||
"ename": "NameError",
|
||
"evalue": "name 'mlb' is not defined",
|
||
"output_type": "error",
|
||
"traceback": [
|
||
"\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
|
||
"\u001b[0;31mNameError\u001b[0m Traceback (most recent call last)",
|
||
"\u001b[0;32m<ipython-input-5-cb3b65f1446b>\u001b[0m in \u001b[0;36m<module>\u001b[0;34m()\u001b[0m\n\u001b[1;32m 1\u001b[0m \u001b[0;32mimport\u001b[0m \u001b[0mnumpy\u001b[0m \u001b[0;32mas\u001b[0m \u001b[0mnp\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 2\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 3\u001b[0;31m \u001b[0my_trans\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mmlb\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0minverse_transform\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0myt1\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 4\u001b[0m \u001b[0mpred_trans\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mmlb\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0minverse_transform\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0myt1\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 5\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
|
||
"\u001b[0;31mNameError\u001b[0m: name 'mlb' is not defined"
|
||
]
|
||
}
|
||
],
|
||
"source": [
|
||
"import numpy as np\n",
|
||
"\n",
|
||
"y_trans = mlb.inverse_transform(yt1)\n",
|
||
"pred_trans = mlb.inverse_transform(yt1)\n",
|
||
"\n",
|
||
"# evaluate accuracy\n",
|
||
"pos = 0\n",
|
||
"neg = 0\n",
|
||
"all = 0\n",
|
||
"for entry in range(len(y_trans)):\n",
|
||
" if len(np.intersect1d(y_trans[entry], pred_trans[entry])) > 0:\n",
|
||
" pos += 1\n",
|
||
" else:\n",
|
||
" neg += 1\n",
|
||
" all += 1\n",
|
||
"print(pos/all)\n",
|
||
"print(neg)"
|
||
]
|
||
},
|
||
{
|
||
"cell_type": "markdown",
|
||
"metadata": {},
|
||
"source": []
|
||
},
|
||
{
|
||
"cell_type": "code",
|
||
"execution_count": null,
|
||
"metadata": {},
|
||
"outputs": [],
|
||
"source": []
|
||
}
|
||
],
|
||
"metadata": {
|
||
"kernelspec": {
|
||
"display_name": "Python 3",
|
||
"language": "python",
|
||
"name": "python3"
|
||
},
|
||
"language_info": {
|
||
"codemirror_mode": {
|
||
"name": "ipython",
|
||
"version": 3
|
||
},
|
||
"file_extension": ".py",
|
||
"mimetype": "text/x-python",
|
||
"name": "python",
|
||
"nbconvert_exporter": "python",
|
||
"pygments_lexer": "ipython3",
|
||
"version": "3.6.5"
|
||
}
|
||
},
|
||
"nbformat": 4,
|
||
"nbformat_minor": 2
|
||
}
|