diff --git a/Project/simple_approach/simple_twitter_learning.ipynb b/Project/simple_approach/simple_twitter_learning.ipynb index c0599e5..cf52f2b 100644 --- a/Project/simple_approach/simple_twitter_learning.ipynb +++ b/Project/simple_approach/simple_twitter_learning.ipynb @@ -1215,7 +1215,7 @@ }, { "cell_type": "code", - "execution_count": 12, + "execution_count": 7, "metadata": {}, "outputs": [], "source": [ @@ -1234,7 +1234,7 @@ }, { "cell_type": "code", - "execution_count": 13, + "execution_count": 8, "metadata": {}, "outputs": [], "source": [ @@ -1252,7 +1252,7 @@ }, { "cell_type": "code", - "execution_count": 14, + "execution_count": 9, "metadata": {}, "outputs": [], "source": [ @@ -1405,7 +1405,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "* generating train and test set:" + "* most used emojis in Dataset" ] }, { @@ -1413,13 +1413,89 @@ "execution_count": 17, "metadata": {}, "outputs": [], + "source": [ + "import operator" + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "[('๐Ÿ˜‚', 10182), ('๐Ÿ˜ญ', 3893), ('๐Ÿ˜', 2866), ('๐Ÿ˜ฉ', 1647), ('๐Ÿ˜Š', 1450)]" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/plain": [ + "['๐Ÿ˜‚', '๐Ÿ˜ญ', '๐Ÿ˜', '๐Ÿ˜ฉ', '๐Ÿ˜Š']" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "sorted_emoji_count = list(reversed(sorted(emoji_count.items(), key=operator.itemgetter(1))))\n", + "display(sorted_emoji_count[:5])\n", + "\n", + "top_emojis = [x[0] for x in sorted_emoji_count[:5]]\n", + "display(top_emojis)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "* only learn the most used ones:" + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "17815 17815 17815\n" + ] + } + ], + "source": [ + "in_top = [sentiment_vector_to_emoji(x) in top_emojis for x in labels]\n", + "labels = labels[in_top]\n", + "plain_text = plain_text[in_top]\n", + "emojis = emojis[in_top]\n", + "print(len(labels), len(emojis), len(plain_text))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "* generating train and test set:" + ] + }, + { + "cell_type": "code", + "execution_count": 20, + "metadata": {}, + "outputs": [], "source": [ "X1, Xt1, y1, yt1 = train_test_split(plain_text, labels, test_size=0.1, random_state=4222)" ] }, { "cell_type": "code", - "execution_count": 38, + "execution_count": 21, "metadata": {}, "outputs": [], "source": [ @@ -1428,7 +1504,7 @@ }, { "cell_type": "code", - "execution_count": 39, + "execution_count": 22, "metadata": {}, "outputs": [], "source": [ @@ -1446,9 +1522,17 @@ }, { "cell_type": "code", - "execution_count": 40, + "execution_count": 23, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Using TensorFlow backend.\n" + ] + } + ], "source": [ "from sklearn.neural_network import MLPClassifier as MLP\n", "from sklearn.multiclass import OneVsRestClassifier as OVRC\n", @@ -1460,7 +1544,7 @@ }, { "cell_type": "code", - "execution_count": 53, + "execution_count": 25, "metadata": {}, "outputs": [], "source": [ @@ -1486,23 +1570,79 @@ " \n", " clf = OVRC(model) if ovrc else model\n", "\n", - " clf.fit(vec_train[:max_size].A, y1[:max_size], validation_split=0.2, epochs=1)#, sample_weight=y1_weights[:max_size])\n", + " clf.fit(vec_train[:max_size].A, y1[:max_size], validation_split=0.2, epochs=n_iter)#, sample_weight=y1_weights[:max_size])\n", " \n", " return clf" ] }, { "cell_type": "code", - "execution_count": 54, + "execution_count": 27, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "Train on 24024 samples, validate on 6007 samples\n", - "Epoch 1/1\n", - " 8576/24024 [=========>....................] - ETA: 10:31 - loss: 1.0637 - acc: 0.7273" + "Train on 800 samples, validate on 200 samples\n", + "Epoch 1/100\n", + "800/800 [==============================] - 1s 690us/step - loss: 1.0883 - acc: 0.6925 - val_loss: 1.0760 - val_acc: 0.7400\n", + "Epoch 2/100\n", + "800/800 [==============================] - 0s 527us/step - loss: 1.0732 - acc: 0.7150 - val_loss: 1.0628 - val_acc: 0.7400\n", + "Epoch 3/100\n", + "800/800 [==============================] - 0s 528us/step - loss: 1.0651 - acc: 0.7150 - val_loss: 1.0556 - val_acc: 0.7400\n", + "Epoch 4/100\n", + "800/800 [==============================] - 0s 534us/step - loss: 1.0610 - acc: 0.7150 - val_loss: 1.0515 - val_acc: 0.7400\n", + "Epoch 5/100\n", + "800/800 [==============================] - 0s 534us/step - loss: 1.0589 - acc: 0.7150 - val_loss: 1.0493 - val_acc: 0.7400\n", + "Epoch 6/100\n", + "800/800 [==============================] - 0s 511us/step - loss: 1.0579 - acc: 0.7150 - val_loss: 1.0481 - val_acc: 0.7400\n", + "Epoch 7/100\n", + "800/800 [==============================] - 0s 546us/step - loss: 1.0573 - acc: 0.7150 - val_loss: 1.0473 - val_acc: 0.7400\n", + "Epoch 8/100\n", + "800/800 [==============================] - 0s 545us/step - loss: 1.0571 - acc: 0.7150 - val_loss: 1.0469 - val_acc: 0.7400\n", + "Epoch 9/100\n", + "800/800 [==============================] - 0s 522us/step - loss: 1.0570 - acc: 0.7150 - val_loss: 1.0467 - val_acc: 0.7400\n", + "Epoch 10/100\n", + "800/800 [==============================] - 0s 529us/step - loss: 1.0569 - acc: 0.7150 - val_loss: 1.0466 - val_acc: 0.7400\n", + "Epoch 11/100\n", + "800/800 [==============================] - 0s 551us/step - loss: 1.0568 - acc: 0.7150 - val_loss: 1.0465 - val_acc: 0.7400\n", + "Epoch 12/100\n", + "800/800 [==============================] - 0s 529us/step - loss: 1.0568 - acc: 0.7150 - val_loss: 1.0464 - val_acc: 0.7400\n", + "Epoch 13/100\n", + "800/800 [==============================] - 0s 546us/step - loss: 1.0568 - acc: 0.7150 - val_loss: 1.0464 - val_acc: 0.7400\n", + "Epoch 14/100\n", + "800/800 [==============================] - 0s 609us/step - loss: 1.0567 - acc: 0.7150 - val_loss: 1.0464 - val_acc: 0.7400\n", + "Epoch 15/100\n", + "800/800 [==============================] - 0s 610us/step - loss: 1.0567 - acc: 0.7150 - val_loss: 1.0463 - val_acc: 0.7400\n", + "Epoch 16/100\n", + "800/800 [==============================] - 0s 540us/step - loss: 1.0567 - acc: 0.7150 - val_loss: 1.0464 - val_acc: 0.7400\n", + "Epoch 17/100\n", + "800/800 [==============================] - 0s 518us/step - loss: 1.0567 - acc: 0.7150 - val_loss: 1.0463 - val_acc: 0.7400\n", + "Epoch 18/100\n", + "800/800 [==============================] - 0s 547us/step - loss: 1.0567 - acc: 0.7150 - val_loss: 1.0463 - val_acc: 0.7400\n", + "Epoch 19/100\n", + "800/800 [==============================] - 0s 543us/step - loss: 1.0566 - acc: 0.7150 - val_loss: 1.0463 - val_acc: 0.7400\n", + "Epoch 20/100\n", + "800/800 [==============================] - 0s 550us/step - loss: 1.0566 - acc: 0.7150 - val_loss: 1.0463 - val_acc: 0.7400\n", + "Epoch 21/100\n", + "800/800 [==============================] - 0s 551us/step - loss: 1.0566 - acc: 0.7150 - val_loss: 1.0463 - val_acc: 0.7400\n", + "Epoch 22/100\n", + "800/800 [==============================] - 0s 552us/step - loss: 1.0566 - acc: 0.7150 - val_loss: 1.0463 - val_acc: 0.7400\n", + "Epoch 23/100\n", + "800/800 [==============================] - 0s 620us/step - loss: 1.0566 - acc: 0.7150 - val_loss: 1.0463 - val_acc: 0.7400\n", + "Epoch 24/100\n", + "800/800 [==============================] - 0s 587us/step - loss: 1.0566 - acc: 0.7150 - val_loss: 1.0463 - val_acc: 0.7400\n", + "Epoch 25/100\n", + "800/800 [==============================] - 0s 608us/step - loss: 1.0565 - acc: 0.7150 - val_loss: 1.0464 - val_acc: 0.7400\n", + "Epoch 26/100\n", + "800/800 [==============================] - 0s 538us/step - loss: 1.0565 - acc: 0.7150 - val_loss: 1.0463 - val_acc: 0.7400\n", + "Epoch 27/100\n", + "800/800 [==============================] - 0s 545us/step - loss: 1.0565 - acc: 0.7150 - val_loss: 1.0464 - val_acc: 0.7400\n", + "Epoch 28/100\n", + "800/800 [==============================] - 0s 567us/step - loss: 1.0565 - acc: 0.7150 - val_loss: 1.0464 - val_acc: 0.7400\n", + "Epoch 29/100\n", + "256/800 [========>.....................] - ETA: 0s - loss: 1.0609 - acc: 0.6992" ] }, { @@ -1512,105 +1652,19 @@ "traceback": [ "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", "\u001b[0;31mKeyboardInterrupt\u001b[0m Traceback (most recent call last)", - "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[0;32m----> 1\u001b[0;31m \u001b[0mclf\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtrain\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mmax_size\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;36m100000\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mlayers\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;36m6200\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m'relu'\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0my1\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mshape\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m'softmax'\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m", - "\u001b[0;32m\u001b[0m in \u001b[0;36mtrain\u001b[0;34m(max_size, layers, random_state, ovrc, n_iter)\u001b[0m\n\u001b[1;32m 21\u001b[0m \u001b[0mclf\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mOVRC\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mmodel\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0movrc\u001b[0m \u001b[0;32melse\u001b[0m \u001b[0mmodel\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 22\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 23\u001b[0;31m \u001b[0mclf\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mfit\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mvec_train\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0mmax_size\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mA\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0my1\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0mmax_size\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mvalidation_split\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;36m0.2\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mepochs\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;36m1\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;31m#, sample_weight=y1_weights[:max_size])\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 24\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 25\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0mclf\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;32m~/.local/lib/python3.6/site-packages/keras/models.py\u001b[0m in \u001b[0;36mfit\u001b[0;34m(self, x, y, batch_size, epochs, verbose, callbacks, validation_split, validation_data, shuffle, class_weight, sample_weight, initial_epoch, steps_per_epoch, validation_steps, **kwargs)\u001b[0m\n\u001b[1;32m 1000\u001b[0m \u001b[0minitial_epoch\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0minitial_epoch\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1001\u001b[0m \u001b[0msteps_per_epoch\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0msteps_per_epoch\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1002\u001b[0;31m validation_steps=validation_steps)\n\u001b[0m\u001b[1;32m 1003\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1004\u001b[0m def evaluate(self, x=None, y=None,\n", - "\u001b[0;32m~/.local/lib/python3.6/site-packages/keras/engine/training.py\u001b[0m in \u001b[0;36mfit\u001b[0;34m(self, x, y, batch_size, epochs, verbose, callbacks, validation_split, validation_data, shuffle, class_weight, sample_weight, initial_epoch, steps_per_epoch, validation_steps, **kwargs)\u001b[0m\n\u001b[1;32m 1703\u001b[0m \u001b[0minitial_epoch\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0minitial_epoch\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1704\u001b[0m \u001b[0msteps_per_epoch\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0msteps_per_epoch\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1705\u001b[0;31m validation_steps=validation_steps)\n\u001b[0m\u001b[1;32m 1706\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1707\u001b[0m def evaluate(self, x=None, y=None,\n", - "\u001b[0;32m~/.local/lib/python3.6/site-packages/keras/engine/training.py\u001b[0m in \u001b[0;36m_fit_loop\u001b[0;34m(self, f, ins, out_labels, batch_size, epochs, verbose, callbacks, val_f, val_ins, shuffle, callback_metrics, initial_epoch, steps_per_epoch, validation_steps)\u001b[0m\n\u001b[1;32m 1234\u001b[0m \u001b[0mins_batch\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mi\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mins_batch\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mi\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mtoarray\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1235\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1236\u001b[0;31m \u001b[0mouts\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mf\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mins_batch\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 1237\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0;32mnot\u001b[0m \u001b[0misinstance\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mouts\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mlist\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1238\u001b[0m \u001b[0mouts\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0mouts\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;32m~/.local/lib/python3.6/site-packages/keras/backend/tensorflow_backend.py\u001b[0m in \u001b[0;36m__call__\u001b[0;34m(self, inputs)\u001b[0m\n\u001b[1;32m 2480\u001b[0m \u001b[0msession\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mget_session\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 2481\u001b[0m updated = session.run(fetches=fetches, feed_dict=feed_dict,\n\u001b[0;32m-> 2482\u001b[0;31m **self.session_kwargs)\n\u001b[0m\u001b[1;32m 2483\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0mupdated\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0mlen\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0moutputs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 2484\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;32m~/.local/lib/python3.6/site-packages/tensorflow/python/client/session.py\u001b[0m in \u001b[0;36mrun\u001b[0;34m(self, fetches, feed_dict, options, run_metadata)\u001b[0m\n\u001b[1;32m 898\u001b[0m \u001b[0;32mtry\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 899\u001b[0m result = self._run(None, fetches, feed_dict, options_ptr,\n\u001b[0;32m--> 900\u001b[0;31m run_metadata_ptr)\n\u001b[0m\u001b[1;32m 901\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mrun_metadata\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 902\u001b[0m \u001b[0mproto_data\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtf_session\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mTF_GetBuffer\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mrun_metadata_ptr\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;32m~/.local/lib/python3.6/site-packages/tensorflow/python/client/session.py\u001b[0m in \u001b[0;36m_run\u001b[0;34m(self, handle, fetches, feed_dict, options, run_metadata)\u001b[0m\n\u001b[1;32m 1133\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mfinal_fetches\u001b[0m \u001b[0;32mor\u001b[0m \u001b[0mfinal_targets\u001b[0m \u001b[0;32mor\u001b[0m \u001b[0;34m(\u001b[0m\u001b[0mhandle\u001b[0m \u001b[0;32mand\u001b[0m \u001b[0mfeed_dict_tensor\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1134\u001b[0m results = self._do_run(handle, final_targets, final_fetches,\n\u001b[0;32m-> 1135\u001b[0;31m feed_dict_tensor, options, run_metadata)\n\u001b[0m\u001b[1;32m 1136\u001b[0m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1137\u001b[0m \u001b[0mresults\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;32m~/.local/lib/python3.6/site-packages/tensorflow/python/client/session.py\u001b[0m in \u001b[0;36m_do_run\u001b[0;34m(self, handle, target_list, fetch_list, feed_dict, options, run_metadata)\u001b[0m\n\u001b[1;32m 1314\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mhandle\u001b[0m \u001b[0;32mis\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1315\u001b[0m return self._do_call(_run_fn, feeds, fetches, targets, options,\n\u001b[0;32m-> 1316\u001b[0;31m run_metadata)\n\u001b[0m\u001b[1;32m 1317\u001b[0m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1318\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_do_call\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0m_prun_fn\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mhandle\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mfeeds\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mfetches\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;32m~/.local/lib/python3.6/site-packages/tensorflow/python/client/session.py\u001b[0m in \u001b[0;36m_do_call\u001b[0;34m(self, fn, *args)\u001b[0m\n\u001b[1;32m 1320\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0m_do_call\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mfn\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m*\u001b[0m\u001b[0margs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1321\u001b[0m \u001b[0;32mtry\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1322\u001b[0;31m \u001b[0;32mreturn\u001b[0m \u001b[0mfn\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0margs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 1323\u001b[0m \u001b[0;32mexcept\u001b[0m \u001b[0merrors\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mOpError\u001b[0m \u001b[0;32mas\u001b[0m \u001b[0me\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1324\u001b[0m \u001b[0mmessage\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mcompat\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mas_text\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0me\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mmessage\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;32m~/.local/lib/python3.6/site-packages/tensorflow/python/client/session.py\u001b[0m in \u001b[0;36m_run_fn\u001b[0;34m(feed_dict, fetch_list, target_list, options, run_metadata)\u001b[0m\n\u001b[1;32m 1305\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_extend_graph\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1306\u001b[0m return self._call_tf_sessionrun(\n\u001b[0;32m-> 1307\u001b[0;31m options, feed_dict, fetch_list, target_list, run_metadata)\n\u001b[0m\u001b[1;32m 1308\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1309\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0m_prun_fn\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mhandle\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mfeed_dict\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mfetch_list\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;32m~/.local/lib/python3.6/site-packages/tensorflow/python/client/session.py\u001b[0m in \u001b[0;36m_call_tf_sessionrun\u001b[0;34m(self, options, feed_dict, fetch_list, target_list, run_metadata)\u001b[0m\n\u001b[1;32m 1407\u001b[0m return tf_session.TF_SessionRun_wrapper(\n\u001b[1;32m 1408\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_session\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0moptions\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mfeed_dict\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mfetch_list\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mtarget_list\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1409\u001b[0;31m run_metadata)\n\u001b[0m\u001b[1;32m 1410\u001b[0m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1411\u001b[0m \u001b[0;32mwith\u001b[0m \u001b[0merrors\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mraise_exception_on_not_ok_status\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;32mas\u001b[0m \u001b[0mstatus\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[0;32m----> 1\u001b[0;31m \u001b[0mclf\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtrain\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mmax_size\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;36m1000\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mlayers\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;36m200\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m'relu'\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;36m200\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m'relu'\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;36m200\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m'linear'\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0my1\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mshape\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m'softmax'\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mn_iter\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;36m100\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m", + "\u001b[0;32m\u001b[0m in \u001b[0;36mtrain\u001b[0;34m(max_size, layers, random_state, ovrc, n_iter)\u001b[0m\n\u001b[1;32m 21\u001b[0m \u001b[0mclf\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mOVRC\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mmodel\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0movrc\u001b[0m \u001b[0;32melse\u001b[0m \u001b[0mmodel\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 22\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 23\u001b[0;31m \u001b[0mclf\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mfit\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mvec_train\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0mmax_size\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mA\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0my1\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0mmax_size\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mvalidation_split\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;36m0.2\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mepochs\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mn_iter\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;31m#, sample_weight=y1_weights[:max_size])\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 24\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 25\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0mclf\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m~/.local/lib/python3.6/site-packages/keras/engine/training.py\u001b[0m in \u001b[0;36mfit\u001b[0;34m(self, x, y, batch_size, epochs, verbose, callbacks, validation_split, validation_data, shuffle, class_weight, sample_weight, initial_epoch, steps_per_epoch, validation_steps, **kwargs)\u001b[0m\n\u001b[1;32m 1040\u001b[0m \u001b[0minitial_epoch\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0minitial_epoch\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1041\u001b[0m \u001b[0msteps_per_epoch\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0msteps_per_epoch\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1042\u001b[0;31m validation_steps=validation_steps)\n\u001b[0m\u001b[1;32m 1043\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1044\u001b[0m def evaluate(self, x=None, y=None,\n", + "\u001b[0;32m~/.local/lib/python3.6/site-packages/keras/engine/training_arrays.py\u001b[0m in \u001b[0;36mfit_loop\u001b[0;34m(model, f, ins, out_labels, batch_size, epochs, verbose, callbacks, val_f, val_ins, shuffle, callback_metrics, initial_epoch, steps_per_epoch, validation_steps)\u001b[0m\n\u001b[1;32m 197\u001b[0m \u001b[0mins_batch\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mi\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mins_batch\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mi\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mtoarray\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 198\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 199\u001b[0;31m \u001b[0mouts\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mf\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mins_batch\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 200\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0;32mnot\u001b[0m \u001b[0misinstance\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mouts\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mlist\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 201\u001b[0m \u001b[0mouts\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0mouts\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m~/.local/lib/python3.6/site-packages/keras/backend/tensorflow_backend.py\u001b[0m in \u001b[0;36m__call__\u001b[0;34m(self, inputs)\u001b[0m\n\u001b[1;32m 2659\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_legacy_call\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0minputs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 2660\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 2661\u001b[0;31m \u001b[0;32mreturn\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_call\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0minputs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 2662\u001b[0m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 2663\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mpy_any\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mis_tensor\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mx\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0mx\u001b[0m \u001b[0;32min\u001b[0m \u001b[0minputs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m~/.local/lib/python3.6/site-packages/keras/backend/tensorflow_backend.py\u001b[0m in \u001b[0;36m_call\u001b[0;34m(self, inputs)\u001b[0m\n\u001b[1;32m 2629\u001b[0m \u001b[0msymbol_vals\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 2630\u001b[0m session)\n\u001b[0;32m-> 2631\u001b[0;31m \u001b[0mfetched\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_callable_fn\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0marray_vals\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 2632\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0mfetched\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0mlen\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0moutputs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 2633\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m~/.local/lib/python3.6/site-packages/tensorflow/python/client/session.py\u001b[0m in \u001b[0;36m__call__\u001b[0;34m(self, *args)\u001b[0m\n\u001b[1;32m 1449\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_session\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_created_with_new_api\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1450\u001b[0m return tf_session.TF_SessionRunCallable(\n\u001b[0;32m-> 1451\u001b[0;31m self._session._session, self._handle, args, status, None)\n\u001b[0m\u001b[1;32m 1452\u001b[0m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1453\u001b[0m return tf_session.TF_DeprecatedSessionRunCallable(\n", "\u001b[0;31mKeyboardInterrupt\u001b[0m: " ] } ], "source": [ - "clf = train(max_size=10000,layers=[(6200, 'relu'),(y1[0].shape[0],'softmax')])" - ] - }, - { - "cell_type": "code", - "execution_count": 43, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "61236 Look at you!! Smart and beautiful \n", - "66498 Missing home \n", - "3920 Lmfao \n", - "10415 thats true.. \n", - "61218 wrong \n", - "49421 Lmao no o, it's almost 8pm. Cheating time \n", - "44693 noted \n", - "21987 Lol the stupid question mark boxes ruined my t...\n", - "6458 She look just like u \n", - "18291 Popeyes chicken with a KFC biscuit though \n", - "21859 Still so unreal that my man an i have our own ...\n", - "103 Thatโ€™s what I said when I seen it thatโ€™s craz...\n", - "68215 dnt really knw what jst happened \n", - "66229 im so tired but not after seeing him \n", - "58987 As if I don't know \n", - "1272 Hahaha was it big? Iโ€™m not afraid of sp...\n", - "57222 Me seeing pics of my abusive ex with a kitten:...\n", - "43792 hey how is it going in Lucca? Wish I wa...\n", - "2041 Feeling so positive for weigh in \n", - "41344 Yeah I knocked the fuck out \n", - "5932 Would rather watch and the boys thanks \n", - "12743 Ralph Angel pissy drunk to drown out the hurt ...\n", - "31462 & sis he donโ€™t fuck with you !!! ๐Ÿ—ฃ \n", - "59079 I just need to know!!\n", - "57842 did all the butt stuff with and ...\n", - "8144 Sometimes my life is interesting \n", - "60991 Can I just say, I rlly fckin love ur pr...\n", - "54675 yeahwhat was she doing?\n", - "50329 Hahaha so give her befor a Starb...\n", - "55947 Damn! Right before he brought pe...\n", - " ... \n", - "9155 Hahaha true!! I didn't notice that ! \n", - "44871 and 'poof' .. He'll be great \n", - "42359 Going to a salon to have my hair cut tonight a...\n", - "7957 Donโ€™t try to fwm I will ignore the shit out of...\n", - "42418 Class when your car gives up on you the day be...\n", - "11077 Hi โ™กUr the reason that I smile everyday...\n", - "20842 See you then \n", - "46515 I wrote all my cards today \n", - "29521 Oml!! That kitty n...\n", - "24751 me and my mom are watching pickler and ...\n", - "67690 Lmao havent eaten kapana in months\n", - "51642 We do out best, don't we? \n", - "39359 nahhhhh, thatโ€™s what fans are for \n", - "965 Fuck you! Iโ€™m so jealous \n", - "60335 Good ur getting th...\n", - "4447 I don't think so but they are v good Th...\n", - "62531 can I just say y'all coming @ me for that post...\n", - "14965 I know I need you too \n", - "6004 Got me a Benihana date \n", - "11002 My only captain\n", - "8944 Come join our small digi team as to ...\n", - "11353 What a catch that was!A sprint with a dive! WO...\n", - "62892 Bit like the gunners then no def...\n", - "6238 He's a horrible horrible dad \n", - "16551 โ€œInfinityโ€ \n", - "38812 they be killing me\n", - "46147 Y'all will roast my ass. Akhant \n", - "65314 If this was America supporters would...\n", - "37843 That double \n", - "2921 Fucking up a correction, GG chalks \n", - "Name: text, Length: 30031, dtype: object" - ] - }, - "execution_count": 43, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "X1" + "clf = train(max_size=1000,layers=[(200, 'relu'),(200, 'relu'),(200, 'linear'),(y1[0].shape[0],'softmax')], n_iter=100)" ] }, { @@ -1622,7 +1676,7 @@ }, { "cell_type": "code", - "execution_count": 44, + "execution_count": 26, "metadata": {}, "outputs": [], "source": [ @@ -1631,33 +1685,26 @@ }, { "cell_type": "code", - "execution_count": 45, + "execution_count": 36, "metadata": {}, "outputs": [ { - "data": { - "text/plain": [ - "array([[0.45662233, 0.2710931 , 0.27228457],\n", - " [0.4544683 , 0.27005136, 0.2754803 ],\n", - " [0.45572498, 0.2714926 , 0.27278242],\n", - " ...,\n", - " [0.4522461 , 0.27333662, 0.2744173 ],\n", - " [0.45729154, 0.26964816, 0.27306038],\n", - " [0.45031306, 0.2748751 , 0.2748118 ]], dtype=float32)" - ] - }, - "execution_count": 45, - "metadata": {}, - "output_type": "execute_result" + "name": "stdout", + "output_type": "stream", + "text": [ + "8.1356275e-06\n", + "0.02568319204819897\n" + ] } ], "source": [ - "pred" + "print(np.linalg.norm(np.var(pred, axis=0)))\n", + "print(np.linalg.norm(np.var(labels)))\n" ] }, { "cell_type": "code", - "execution_count": 49, + "execution_count": 28, "metadata": {}, "outputs": [], "source": [ @@ -1671,7 +1718,7 @@ }, { "cell_type": "code", - "execution_count": 50, + "execution_count": 36, "metadata": {}, "outputs": [ { @@ -1695,253 +1742,253 @@ " \n", " \n", " \n", - " predict\n", - " predicted_sentiment\n", + " text\n", " teacher\n", " teacher_sentiment\n", - " text\n", + " predict\n", + " predicted_sentiment\n", " \n", " \n", " \n", " \n", " 7618\n", - " ๐Ÿ˜ฌ\n", - " [0.45662233233451843, 0.27109310030937195, 0.2...\n", + " There's fucking snow outside\n", " ๐Ÿ˜’\n", " [0.21660649819494585, 0.5913357400722021, 0.19...\n", - " There's fucking snow outside\n", + " ๐Ÿ˜ฌ\n", + " [0.46037721633911133, 0.2677566707134247, 0.27...\n", " \n", " \n", " 6910\n", - " ๐Ÿ˜ฑ\n", - " [0.454468309879303, 0.27005136013031006, 0.275...\n", + " You look so animated bro! *Rimshot*, Get it? ...\n", " ๐Ÿ˜‚\n", " [0.46813021474490496, 0.24716181096977158, 0.2...\n", - " <USER> You look so animated bro! *Rimshot*, Ge...\n", + " ๐Ÿ˜ฌ\n", + " [0.4636985957622528, 0.26727861166000366, 0.26...\n", " \n", " \n", " 35783\n", - " ๐Ÿ˜ฌ\n", - " [0.4557249844074249, 0.271492600440979, 0.2727...\n", + " Jon ordered it from a restaurant\n", " ๐Ÿ˜ญ\n", " [0.34310532030401736, 0.4364820846905538, 0.22...\n", - " <USER> Jon ordered it from a restaurant\n", + " ๐Ÿ˜ฌ\n", + " [0.4635818898677826, 0.2673886716365814, 0.269...\n", " \n", " \n", " 15623\n", - " ๐Ÿ˜ฌ\n", - " [0.4551065266132355, 0.27315083146095276, 0.27...\n", + " I just want to move back in with my mom\n", " ๐Ÿ˜…\n", " [0.47186147186147187, 0.2922077922077922, 0.23...\n", - " I just want to move back in with my mom\n", + " ๐Ÿ˜ฌ\n", + " [0.46267321705818176, 0.2673114538192749, 0.27...\n", " \n", " \n", " 12023\n", - " ๐Ÿ˜ฑ\n", - " [0.44956669211387634, 0.2718639373779297, 0.27...\n", + " guys lets vote!!!\n", " ๐Ÿ˜ญ\n", " [0.34310532030401736, 0.4364820846905538, 0.22...\n", - " guys lets vote!!!\n", + " ๐Ÿ˜ฌ\n", + " [0.4621191620826721, 0.26737213134765625, 0.27...\n", " \n", " \n", " 15763\n", - " ๐Ÿ˜ฑ\n", - " [0.4546020030975342, 0.27004513144493103, 0.27...\n", + " Bruhhhh this man need a show ASAP!!!\n", " ๐Ÿ˜‚\n", " [0.46813021474490496, 0.24716181096977158, 0.2...\n", - " Bruhhhh this man need a show ASAP!!!\n", + " ๐Ÿ˜ฌ\n", + " [0.46237292885780334, 0.2675153613090515, 0.27...\n", " \n", " \n", " 57240\n", - " ๐Ÿ˜ฑ\n", - " [0.4514758884906769, 0.2734237313270569, 0.275...\n", + " AND THE RICH SIT IN A LOW PLACE\n", " ๐Ÿ˜Ž\n", " [0.5981432360742706, 0.10477453580901856, 0.29...\n", - " AND THE RICH SIT IN A LOW PLACE\n", + " ๐Ÿ˜ฌ\n", + " [0.4622046649456024, 0.2677682340145111, 0.270...\n", " \n", " \n", " 2418\n", - " ๐Ÿ˜ฌ\n", - " [0.4555293321609497, 0.2700120508670807, 0.274...\n", + " Time to go to bed\n", " ๐Ÿ˜‚\n", " [0.46813021474490496, 0.24716181096977158, 0.2...\n", - " Time to go to bed\n", + " ๐Ÿ˜ฌ\n", + " [0.4639696180820465, 0.26709380745887756, 0.26...\n", " \n", " \n", " 66384\n", - " ๐Ÿ˜ฌ\n", - " [0.4541899561882019, 0.27133306860923767, 0.27...\n", + " May sound like a hillbilly, but girls that can...\n", " ๐Ÿ˜\n", " [0.7296744771190439, 0.05173769460607014, 0.21...\n", - " May sound like a hillbilly, but girls that can...\n", + " ๐Ÿ˜ฌ\n", + " [0.4639129638671875, 0.26741719245910645, 0.26...\n", " \n", " \n", " 44639\n", - " ๐Ÿ˜ฌ\n", - " [0.45672306418418884, 0.2679496109485626, 0.27...\n", + " IM DYING\n", " ๐Ÿ˜‚\n", " [0.46813021474490496, 0.24716181096977158, 0.2...\n", - " <USER> <USER> <USER> IM DYING\n", + " ๐Ÿ˜ฌ\n", + " [0.46023082733154297, 0.27063173055648804, 0.2...\n", " \n", " \n", " 54463\n", - " ๐Ÿ˜ฑ\n", - " [0.45094504952430725, 0.2715355157852173, 0.27...\n", + " Black forest hot chocolate is coming back to C...\n", " ๐Ÿ™Œ\n", " [0.6613545816733067, 0.10092961487383798, 0.23...\n", - " Black forest hot chocolate is coming back to C...\n", + " ๐Ÿ˜ฌ\n", + " [0.46065258979797363, 0.2685313820838928, 0.27...\n", " \n", " \n", " 38407\n", - " ๐Ÿ˜ฑ\n", - " [0.45006927847862244, 0.2722932994365692, 0.27...\n", + " 5 days till my 21st.\n", " ๐Ÿ˜Ž\n", " [0.5981432360742706, 0.10477453580901856, 0.29...\n", - " 5 days till my 21st.\n", + " ๐Ÿ˜ฌ\n", + " [0.4624767005443573, 0.2678660750389099, 0.269...\n", " \n", " \n", " 31342\n", - " ๐Ÿ˜ฌ\n", - " [0.45655250549316406, 0.26978597044944763, 0.2...\n", + " I'm just going to leave my Twitter acct open ...\n", " ๐Ÿ˜„\n", " [0.5586552217453505, 0.13662374821173104, 0.30...\n", - " <USER> I'm just going to leave my Twitter acct...\n", + " ๐Ÿ˜ฌ\n", + " [0.4628538191318512, 0.2673165500164032, 0.269...\n", " \n", " \n", " 41255\n", - " ๐Ÿ˜ฑ\n", - " [0.45397621393203735, 0.27026963233947754, 0.2...\n", + " Oh god im so happy\n", " ๐Ÿ˜‚\n", " [0.46813021474490496, 0.24716181096977158, 0.2...\n", - " Oh god im so happy\n", + " ๐Ÿ˜ฌ\n", + " [0.4628150463104248, 0.26765838265419006, 0.26...\n", " \n", " \n", " 19686\n", - " ๐Ÿ˜ฑ\n", - " [0.4501204788684845, 0.27288398146629333, 0.27...\n", + " In my head\n", " ๐Ÿ˜ญ\n", " [0.34310532030401736, 0.4364820846905538, 0.22...\n", - " In my head\n", + " ๐Ÿ˜ฌ\n", + " [0.4624774158000946, 0.26805710792541504, 0.26...\n", " \n", " \n", " 67193\n", - " ๐Ÿ˜ฑ\n", - " [0.45182353258132935, 0.2724774479866028, 0.27...\n", + " Jo, chilllllllll. This kid is mentally beati...\n", " ๐Ÿ˜‚\n", " [0.46813021474490496, 0.24716181096977158, 0.2...\n", - " <USER> Jo, chilllllllll. This kid is mentally...\n", + " ๐Ÿ˜ฌ\n", + " [0.4607327878475189, 0.26832473278045654, 0.27...\n", " \n", " \n", " 40650\n", - " ๐Ÿ˜ฑ\n", - " [0.45002350211143494, 0.2710316777229309, 0.27...\n", + " Iโ€™m ready sis\n", " ๐Ÿ˜‡\n", " [0.6666666666666666, 0.06666666666666667, 0.26...\n", - " Iโ€™m ready sis\n", + " ๐Ÿ˜ฌ\n", + " [0.4610658884048462, 0.26784956455230713, 0.27...\n", " \n", " \n", " 64416\n", - " ๐Ÿ˜ฑ\n", - " [0.45193251967430115, 0.2698008418083191, 0.27...\n", + " Sometimes we need to know how to respect our p...\n", " ๐Ÿ˜Š\n", " [0.7040175768989329, 0.059322033898305086, 0.2...\n", - " Sometimes we need to know how to respect our p...\n", + " ๐Ÿ˜ฌ\n", + " [0.46121352910995483, 0.27051451802253723, 0.2...\n", " \n", " \n", " 19525\n", - " ๐Ÿ˜ฑ\n", - " [0.45155930519104004, 0.27194666862487793, 0.2...\n", + " Right?\n", " ๐Ÿ™†\n", " [0.5964912280701754, 0.08771929824561403, 0.31...\n", - " Right?\n", + " ๐Ÿ˜ฌ\n", + " [0.46745917201042175, 0.265864759683609, 0.266...\n", " \n", " \n", " 39024\n", - " ๐Ÿ˜ฑ\n", - " [0.4556638300418854, 0.26780301332473755, 0.27...\n", + " priceless love how got back at you\n", " ๐Ÿ˜‚\n", " [0.46813021474490496, 0.24716181096977158, 0.2...\n", - " <USER> priceless love how <USER> got back at ...\n", + " ๐Ÿ˜ฌ\n", + " [0.4606868028640747, 0.267996221780777, 0.2713...\n", " \n", " \n", " 35342\n", - " ๐Ÿ˜ฑ\n", - " [0.452972412109375, 0.27151480317115784, 0.275...\n", + " Right\n", " ๐Ÿ˜ซ\n", " [0.3404710920770878, 0.4860813704496788, 0.173...\n", - " <USER> Right\n", + " ๐Ÿ˜ฌ\n", + " [0.46745917201042175, 0.265864759683609, 0.266...\n", " \n", " \n", " 9715\n", - " ๐Ÿ˜ฑ\n", - " [0.4522416293621063, 0.2719023823738098, 0.275...\n", + " Opo pang i will!\n", " ๐Ÿ˜‡\n", " [0.6666666666666666, 0.06666666666666667, 0.26...\n", - " Opo pang i will!\n", + " ๐Ÿ˜ฌ\n", + " [0.45438939332962036, 0.2717699408531189, 0.27...\n", " \n", " \n", " 40490\n", - " ๐Ÿ˜ฌ\n", - " [0.4545004665851593, 0.27230802178382874, 0.27...\n", + " WOW whomever was tasked with vetting applica...\n", " ๐Ÿ˜ฎ\n", " [0.45555555555555555, 0.17777777777777778, 0.3...\n", - " <USER> WOW whomever was tasked with vetting a...\n", + " ๐Ÿ˜ฌ\n", + " [0.46492910385131836, 0.2654315233230591, 0.26...\n", " \n", " \n", " 22064\n", - " ๐Ÿ˜ฌ\n", - " [0.4601033926010132, 0.2683771550655365, 0.271...\n", + " Thank you !\n", " ๐Ÿ˜\n", " [0.7296744771190439, 0.05173769460607014, 0.21...\n", - " Thank you <USER> !\n", + " ๐Ÿ˜ฌ\n", + " [0.46712905168533325, 0.26472848653793335, 0.2...\n", " \n", " \n", " 53563\n", - " ๐Ÿ˜ฑ\n", - " [0.45222631096839905, 0.2727421224117279, 0.27...\n", + " Dat daurin Dan kwali from behind is the real ...\n", " ๐Ÿ˜„\n", " [0.5586552217453505, 0.13662374821173104, 0.30...\n", - " Dat daurin Dan kwali from behind is the real ...\n", + " ๐Ÿ˜ฌ\n", + " [0.4643474817276001, 0.2679416835308075, 0.267...\n", " \n", " \n", " 67332\n", - " ๐Ÿ˜ฌ\n", - " [0.45455402135849, 0.27052536606788635, 0.2749...\n", + " I miss my baby so much โ™ฅ๏ธ\n", " ๐Ÿ˜ช\n", " [0.3506224066390041, 0.4315352697095436, 0.217...\n", - " I miss my baby so much โ™ฅ๏ธ\n", + " ๐Ÿ˜ฌ\n", + " [0.4613974094390869, 0.2683691382408142, 0.270...\n", " \n", " \n", " 15404\n", - " ๐Ÿ˜ฑ\n", - " [0.45141908526420593, 0.2713443338871002, 0.27...\n", + " alright i'm gonna go take a shower, let me kno...\n", " ๐Ÿ˜•\n", " [0.20294117647058824, 0.6029411764705882, 0.19...\n", - " alright i'm gonna go take a shower, let me kno...\n", + " ๐Ÿ˜ฌ\n", + " [0.46027225255966187, 0.2696966826915741, 0.27...\n", " \n", " \n", " 30130\n", - " ๐Ÿ˜ฌ\n", - " [0.4556472599506378, 0.2695685029029846, 0.274...\n", + " and neither are buying her music\n", " ๐Ÿ˜ณ\n", " [0.34515366430260047, 0.32742316784869974, 0.3...\n", - " <USER> and neither are buying her music\n", + " ๐Ÿ˜ฌ\n", + " [0.46355295181274414, 0.26728588342666626, 0.2...\n", " \n", " \n", " 37242\n", - " ๐Ÿ˜ฌ\n", - " [0.4568144977092743, 0.27149033546447754, 0.27...\n", + " I saw that too as well! I feel to have it dur...\n", " ๐Ÿ˜‚\n", " [0.46813021474490496, 0.24716181096977158, 0.2...\n", - " <USER> I saw that too as well! I feel to have ...\n", + " ๐Ÿ˜ฌ\n", + " [0.46367916464805603, 0.26611489057540894, 0.2...\n", " \n", " \n", " 30533\n", - " ๐Ÿ˜ฌ\n", - " [0.45350709557533264, 0.27462711930274963, 0.2...\n", + " I had a mental break down but Iโ€™m back like hey\n", " ๐Ÿ˜‚\n", " [0.46813021474490496, 0.24716181096977158, 0.2...\n", - " I had a mental break down but Iโ€™m back like hey\n", + " ๐Ÿ˜ฌ\n", + " [0.4640607535839081, 0.26755788922309875, 0.26...\n", " \n", " \n", " ...\n", @@ -1953,243 +2000,243 @@ " \n", " \n", " 16955\n", - " ๐Ÿ˜ฑ\n", - " [0.45130616426467896, 0.2745077311992645, 0.27...\n", + " Bless up Esquivel finally update aeries\n", " ๐Ÿ™\n", " [0.4983755685510071, 0.08057179987004548, 0.42...\n", - " Bless up Esquivel finally update aeries\n", + " ๐Ÿ˜ฌ\n", + " [0.46489447355270386, 0.26702189445495605, 0.2...\n", " \n", " \n", " 36805\n", - " ๐Ÿ˜ฌ\n", - " [0.4585663378238678, 0.26974013447761536, 0.27...\n", + " Look just like my Mizuno I had in โ€˜88\n", " ๐Ÿ˜‚\n", " [0.46813021474490496, 0.24716181096977158, 0.2...\n", - " <USER> <USER> <USER> Look just like my Mizuno ...\n", + " ๐Ÿ˜ฌ\n", + " [0.4630420207977295, 0.26803845167160034, 0.26...\n", " \n", " \n", " 56326\n", - " ๐Ÿ˜ฑ\n", - " [0.45519477128982544, 0.2686183452606201, 0.27...\n", + " This felt like it happened yesterday.\n", " ๐Ÿ˜ข\n", " [0.39118825100133514, 0.38451268357810414, 0.2...\n", - " This felt like it happened yesterday. <HASHTAG>\n", + " ๐Ÿ˜ฌ\n", + " [0.4617263972759247, 0.26921841502189636, 0.26...\n", " \n", " \n", " 40306\n", - " ๐Ÿ˜ฑ\n", - " [0.449861079454422, 0.2748417556285858, 0.2752...\n", + " Our Guest photo this week highlights the beaut...\n", " ๐Ÿ˜\n", " [0.7296744771190439, 0.05173769460607014, 0.21...\n", - " Our Guest photo this week highlights the beaut...\n", + " ๐Ÿ˜ฌ\n", + " [0.46200209856033325, 0.26775091886520386, 0.2...\n", " \n", " \n", " 51333\n", - " ๐Ÿ˜ฑ\n", - " [0.4518674910068512, 0.2723572850227356, 0.275...\n", + " I need to see what โ€œthe stranger thingsโ€ is ab...\n", " ๐Ÿ˜Œ\n", " [0.6240601503759399, 0.13984962406015036, 0.23...\n", - " I need to see what โ€œthe stranger thingsโ€ is ab...\n", + " ๐Ÿ˜ฌ\n", + " [0.46260306239128113, 0.26784563064575195, 0.2...\n", " \n", " \n", " 42339\n", - " ๐Ÿ˜ฌ\n", - " [0.4573005437850952, 0.2696344256401062, 0.273...\n", + " Plz can u make me a pc I donโ€™t have enough mo...\n", " ๐Ÿ˜”\n", " [0.31784232365145226, 0.46390041493775935, 0.2...\n", - " <USER> Plz can u make me a pc I donโ€™t have eno...\n", + " ๐Ÿ˜ฌ\n", + " [0.4600381553173065, 0.26839619874954224, 0.27...\n", " \n", " \n", " 7644\n", - " ๐Ÿ˜ฑ\n", - " [0.4539974331855774, 0.2710561156272888, 0.274...\n", + " this is insulting\n", " ๐Ÿ˜ง\n", " [0.32608695652173914, 0.391304347826087, 0.282...\n", - " <USER> this is insulting\n", + " ๐Ÿ˜ฌ\n", + " [0.4657018482685089, 0.2653878927230835, 0.268...\n", " \n", " \n", " 13953\n", - " ๐Ÿ˜ฑ\n", - " [0.4520905911922455, 0.27237412333488464, 0.27...\n", + " I know this might sound crazy but I rather Don...\n", " ๐Ÿ˜\n", " [0.16296296296296298, 0.5555555555555556, 0.28...\n", - " I know this might sound crazy but I rather Don...\n", + " ๐Ÿ˜ฌ\n", + " [0.4625128209590912, 0.26891759037971497, 0.26...\n", " \n", " \n", " 26460\n", - " ๐Ÿ˜ฑ\n", - " [0.4516184329986572, 0.2706548869609833, 0.277...\n", + " Just a couple of cute pugs hanging out\n", " ๐Ÿ˜›\n", " [0.6909090909090909, 0.08181818181818182, 0.22...\n", - " Just a couple of cute pugs hanging out\n", + " ๐Ÿ˜ฌ\n", + " [0.46720531582832336, 0.26648813486099243, 0.2...\n", " \n", " \n", " 31571\n", - " ๐Ÿ˜ฑ\n", - " [0.4516292214393616, 0.2722293734550476, 0.276...\n", + " She knows nothing will happen to her.. welcom...\n", " ๐Ÿ˜’\n", " [0.21660649819494585, 0.5913357400722021, 0.19...\n", - " <USER> She knows nothing will happen to her.. ...\n", + " ๐Ÿ˜ฌ\n", + " [0.4603578746318817, 0.2692199945449829, 0.270...\n", " \n", " \n", " 9536\n", - " ๐Ÿ˜ฑ\n", - " [0.45403042435646057, 0.27084800601005554, 0.2...\n", + " happy raisin\n", " ๐Ÿ˜ญ\n", " [0.34310532030401736, 0.4364820846905538, 0.22...\n", - " <USER> happy raisin\n", + " ๐Ÿ˜ฌ\n", + " [0.4630625247955322, 0.26809123158454895, 0.26...\n", " \n", " \n", " 19571\n", - " ๐Ÿ˜ฑ\n", - " [0.45092839002609253, 0.273369699716568, 0.275...\n", + " I'm so disappointed\n", " ๐Ÿ˜ก\n", " [0.35978835978835977, 0.533068783068783, 0.107...\n", - " I'm so disappointed\n", + " ๐Ÿ˜ฌ\n", + " [0.4639342725276947, 0.26599037647247314, 0.27...\n", " \n", " \n", " 35934\n", - " ๐Ÿ˜ฑ\n", - " [0.45402011275291443, 0.2693026065826416, 0.27...\n", + " Nailed it.\n", " ๐Ÿ˜‚\n", " [0.46813021474490496, 0.24716181096977158, 0.2...\n", - " <USER> <USER> Nailed it.\n", + " ๐Ÿ˜ฌ\n", + " [0.46198946237564087, 0.26730334758758545, 0.2...\n", " \n", " \n", " 4605\n", - " ๐Ÿ˜ฌ\n", - " [0.45629507303237915, 0.2681595981121063, 0.27...\n", + " oh itโ€™s more than a thing!\n", " ๐Ÿ˜‚\n", " [0.46813021474490496, 0.24716181096977158, 0.2...\n", - " <USER> oh itโ€™s more than a thing!\n", + " ๐Ÿ˜ฌ\n", + " [0.46141430735588074, 0.268981009721756, 0.269...\n", " \n", " \n", " 1913\n", - " ๐Ÿ˜ฌ\n", - " [0.45600560307502747, 0.26834917068481445, 0.2...\n", + " stop it\n", " ๐Ÿ˜ญ\n", " [0.34310532030401736, 0.4364820846905538, 0.22...\n", - " <USER> stop it\n", + " ๐Ÿ˜ฌ\n", + " [0.4630756378173828, 0.2665313482284546, 0.270...\n", " \n", " \n", " 11827\n", - " ๐Ÿ˜ฌ\n", - " [0.45951539278030396, 0.26939070224761963, 0.2...\n", + " Have an awesome โค๏ธ\n", " ๐Ÿ˜„\n", " [0.5586552217453505, 0.13662374821173104, 0.30...\n", - " Have an awesome <HASHTAG> โค๏ธ <HASHTAG>\n", + " ๐Ÿ˜ฌ\n", + " [0.46398892998695374, 0.26717185974121094, 0.2...\n", " \n", " \n", " 15181\n", - " ๐Ÿ˜ฌ\n", - " [0.4616319239139557, 0.2665380537509918, 0.271...\n", + " when I see elbows ?\n", " ๐Ÿ˜‚\n", " [0.46813021474490496, 0.24716181096977158, 0.2...\n", - " <USER> when I see elbows ?\n", + " ๐Ÿ˜ฌ\n", + " [0.45438939332962036, 0.2717699408531189, 0.27...\n", " \n", " \n", " 32869\n", - " ๐Ÿ˜ฌ\n", - " [0.45471736788749695, 0.27229392528533936, 0.2...\n", + " Damn Jani back at it again with the BB-8 costu...\n", " ๐Ÿ˜น\n", " [0.4406779661016949, 0.2983050847457627, 0.261...\n", - " Damn Jani back at it again with the BB-8 costu...\n", + " ๐Ÿ˜ฌ\n", + " [0.4616232216358185, 0.2673903703689575, 0.270...\n", " \n", " \n", " 40784\n", - " ๐Ÿ˜ฌ\n", - " [0.45305681228637695, 0.2729623019695282, 0.27...\n", + " I think a big part of the reason Nick Wilde is...\n", " ๐Ÿ˜ป\n", " [0.6906474820143885, 0.0671462829736211, 0.242...\n", - " I think a big part of the reason Nick Wilde is...\n", + " ๐Ÿ˜ฌ\n", + " [0.46204957365989685, 0.26713815331459045, 0.2...\n", " \n", " \n", " 18611\n", - " ๐Ÿ˜ฑ\n", - " [0.4504826068878174, 0.27414649724960327, 0.27...\n", + " My 12 year old brother on Altuve- โ€œheโ€™s like t...\n", " ๐Ÿ˜‚\n", " [0.46813021474490496, 0.24716181096977158, 0.2...\n", - " My 12 year old brother on Altuve- โ€œheโ€™s like t...\n", + " ๐Ÿ˜ฌ\n", + " [0.4634704887866974, 0.2669770121574402, 0.269...\n", " \n", " \n", " 28341\n", - " ๐Ÿ˜ฑ\n", - " [0.45293256640434265, 0.27156686782836914, 0.2...\n", + " Bruh like how she know the exact number?\n", " ๐Ÿ˜‚\n", " [0.46813021474490496, 0.24716181096977158, 0.2...\n", - " <USER> Bruh like how she know the exact number?\n", + " ๐Ÿ˜ฌ\n", + " [0.46148979663848877, 0.26869532465934753, 0.2...\n", " \n", " \n", " 41368\n", - " ๐Ÿ˜ฑ\n", - " [0.4524029791355133, 0.27189141511917114, 0.27...\n", + " They starting early\n", " ๐Ÿ˜‚\n", " [0.46813021474490496, 0.24716181096977158, 0.2...\n", - " They starting early\n", + " ๐Ÿ˜ฌ\n", + " [0.4605691730976105, 0.26995062828063965, 0.26...\n", " \n", " \n", " 35554\n", - " ๐Ÿ˜ฌ\n", - " [0.45386624336242676, 0.2720849812030792, 0.27...\n", + " Please come through\n", " ๐Ÿ˜ฉ\n", " [0.22289823008849557, 0.5912610619469026, 0.18...\n", - " Please come through <USER>\n", + " ๐Ÿ˜ฌ\n", + " [0.46297168731689453, 0.2675779163837433, 0.26...\n", " \n", " \n", " 27968\n", - " ๐Ÿ˜ฌ\n", - " [0.4601033926010132, 0.2683771550655365, 0.271...\n", + " Thank you\n", " ๐Ÿ˜\n", " [0.7296744771190439, 0.05173769460607014, 0.21...\n", - " <USER> Thank you\n", + " ๐Ÿ˜ฌ\n", + " [0.46712905168533325, 0.26472848653793335, 0.2...\n", " \n", " \n", " 36772\n", - " ๐Ÿ˜ฑ\n", - " [0.45253145694732666, 0.2714029550552368, 0.27...\n", + " Imagine if the Vikings had a top 10 QB\n", " ๐Ÿ˜ณ\n", " [0.34515366430260047, 0.32742316784869974, 0.3...\n", - " Imagine if the Vikings had a top 10 QB\n", + " ๐Ÿ˜ฌ\n", + " [0.4609829783439636, 0.26704034209251404, 0.27...\n", " \n", " \n", " 7568\n", - " ๐Ÿ˜ฑ\n", - " [0.4514819085597992, 0.27036288380622864, 0.27...\n", + " Le costume de cupcakke.......\n", " ๐Ÿ˜ญ\n", " [0.34310532030401736, 0.4364820846905538, 0.22...\n", - " Le costume de cupcakke.......\n", + " ๐Ÿ˜ฌ\n", + " [0.46219775080680847, 0.26761168241500854, 0.2...\n", " \n", " \n", " 62837\n", - " ๐Ÿ˜ฌ\n", - " [0.4553525149822235, 0.27044105529785156, 0.27...\n", + " aM FKXNSK SCREAMING ODM\n", " ๐Ÿ˜‚\n", " [0.46813021474490496, 0.24716181096977158, 0.2...\n", - " <USER> aM FKXNSK SCREAMING ODM\n", + " ๐Ÿ˜ฌ\n", + " [0.46401336789131165, 0.2654089033603668, 0.27...\n", " \n", " \n", " 34105\n", - " ๐Ÿ˜ฑ\n", - " [0.4522460997104645, 0.27333661913871765, 0.27...\n", + " My birthday tomorrow\n", " ๐Ÿ˜‡\n", " [0.6666666666666666, 0.06666666666666667, 0.26...\n", - " My birthday tomorrow\n", + " ๐Ÿ˜ฌ\n", + " [0.4665740728378296, 0.26650139689445496, 0.26...\n", " \n", " \n", " 59069\n", - " ๐Ÿ˜ฌ\n", - " [0.45729154348373413, 0.26964816451072693, 0.2...\n", + " im just tryna change your life\n", " ๐Ÿ˜‰\n", " [0.5634451019066403, 0.0992767915844839, 0.337...\n", - " im just tryna change your life\n", + " ๐Ÿ˜ฌ\n", + " [0.4622177183628082, 0.268806129693985, 0.2689...\n", " \n", " \n", " 47088\n", - " ๐Ÿ˜ฑ\n", - " [0.4503130614757538, 0.27487510442733765, 0.27...\n", + " Do I look like Iโ€™m off 79th . Foh\n", " ๐Ÿ˜‚\n", " [0.46813021474490496, 0.24716181096977158, 0.2...\n", - " Do I look like Iโ€™m off 79th . Foh\n", + " ๐Ÿ˜ฌ\n", + " [0.46383899450302124, 0.26802247762680054, 0.2...\n", " \n", " \n", "\n", @@ -2197,194 +2244,194 @@ "" ], "text/plain": [ - " predict predicted_sentiment teacher \\\n", - "7618 ๐Ÿ˜ฌ [0.45662233233451843, 0.27109310030937195, 0.2... ๐Ÿ˜’ \n", - "6910 ๐Ÿ˜ฑ [0.454468309879303, 0.27005136013031006, 0.275... ๐Ÿ˜‚ \n", - "35783 ๐Ÿ˜ฌ [0.4557249844074249, 0.271492600440979, 0.2727... ๐Ÿ˜ญ \n", - "15623 ๐Ÿ˜ฌ [0.4551065266132355, 0.27315083146095276, 0.27... ๐Ÿ˜… \n", - "12023 ๐Ÿ˜ฑ [0.44956669211387634, 0.2718639373779297, 0.27... ๐Ÿ˜ญ \n", - "15763 ๐Ÿ˜ฑ [0.4546020030975342, 0.27004513144493103, 0.27... ๐Ÿ˜‚ \n", - "57240 ๐Ÿ˜ฑ [0.4514758884906769, 0.2734237313270569, 0.275... ๐Ÿ˜Ž \n", - "2418 ๐Ÿ˜ฌ [0.4555293321609497, 0.2700120508670807, 0.274... ๐Ÿ˜‚ \n", - "66384 ๐Ÿ˜ฌ [0.4541899561882019, 0.27133306860923767, 0.27... ๐Ÿ˜ \n", - "44639 ๐Ÿ˜ฌ [0.45672306418418884, 0.2679496109485626, 0.27... ๐Ÿ˜‚ \n", - "54463 ๐Ÿ˜ฑ [0.45094504952430725, 0.2715355157852173, 0.27... ๐Ÿ™Œ \n", - "38407 ๐Ÿ˜ฑ [0.45006927847862244, 0.2722932994365692, 0.27... ๐Ÿ˜Ž \n", - "31342 ๐Ÿ˜ฌ [0.45655250549316406, 0.26978597044944763, 0.2... ๐Ÿ˜„ \n", - "41255 ๐Ÿ˜ฑ [0.45397621393203735, 0.27026963233947754, 0.2... ๐Ÿ˜‚ \n", - "19686 ๐Ÿ˜ฑ [0.4501204788684845, 0.27288398146629333, 0.27... ๐Ÿ˜ญ \n", - "67193 ๐Ÿ˜ฑ [0.45182353258132935, 0.2724774479866028, 0.27... ๐Ÿ˜‚ \n", - "40650 ๐Ÿ˜ฑ [0.45002350211143494, 0.2710316777229309, 0.27... ๐Ÿ˜‡ \n", - "64416 ๐Ÿ˜ฑ [0.45193251967430115, 0.2698008418083191, 0.27... ๐Ÿ˜Š \n", - "19525 ๐Ÿ˜ฑ [0.45155930519104004, 0.27194666862487793, 0.2... ๐Ÿ™† \n", - "39024 ๐Ÿ˜ฑ [0.4556638300418854, 0.26780301332473755, 0.27... ๐Ÿ˜‚ \n", - "35342 ๐Ÿ˜ฑ [0.452972412109375, 0.27151480317115784, 0.275... ๐Ÿ˜ซ \n", - "9715 ๐Ÿ˜ฑ [0.4522416293621063, 0.2719023823738098, 0.275... ๐Ÿ˜‡ \n", - "40490 ๐Ÿ˜ฌ [0.4545004665851593, 0.27230802178382874, 0.27... ๐Ÿ˜ฎ \n", - "22064 ๐Ÿ˜ฌ [0.4601033926010132, 0.2683771550655365, 0.271... ๐Ÿ˜ \n", - "53563 ๐Ÿ˜ฑ [0.45222631096839905, 0.2727421224117279, 0.27... ๐Ÿ˜„ \n", - "67332 ๐Ÿ˜ฌ [0.45455402135849, 0.27052536606788635, 0.2749... ๐Ÿ˜ช \n", - "15404 ๐Ÿ˜ฑ [0.45141908526420593, 0.2713443338871002, 0.27... ๐Ÿ˜• \n", - "30130 ๐Ÿ˜ฌ [0.4556472599506378, 0.2695685029029846, 0.274... ๐Ÿ˜ณ \n", - "37242 ๐Ÿ˜ฌ [0.4568144977092743, 0.27149033546447754, 0.27... ๐Ÿ˜‚ \n", - "30533 ๐Ÿ˜ฌ [0.45350709557533264, 0.27462711930274963, 0.2... ๐Ÿ˜‚ \n", - "... ... ... ... \n", - "16955 ๐Ÿ˜ฑ [0.45130616426467896, 0.2745077311992645, 0.27... ๐Ÿ™ \n", - "36805 ๐Ÿ˜ฌ [0.4585663378238678, 0.26974013447761536, 0.27... ๐Ÿ˜‚ \n", - "56326 ๐Ÿ˜ฑ [0.45519477128982544, 0.2686183452606201, 0.27... ๐Ÿ˜ข \n", - "40306 ๐Ÿ˜ฑ [0.449861079454422, 0.2748417556285858, 0.2752... ๐Ÿ˜ \n", - "51333 ๐Ÿ˜ฑ [0.4518674910068512, 0.2723572850227356, 0.275... ๐Ÿ˜Œ \n", - "42339 ๐Ÿ˜ฌ [0.4573005437850952, 0.2696344256401062, 0.273... ๐Ÿ˜” \n", - "7644 ๐Ÿ˜ฑ [0.4539974331855774, 0.2710561156272888, 0.274... ๐Ÿ˜ง \n", - "13953 ๐Ÿ˜ฑ [0.4520905911922455, 0.27237412333488464, 0.27... ๐Ÿ˜ \n", - "26460 ๐Ÿ˜ฑ [0.4516184329986572, 0.2706548869609833, 0.277... ๐Ÿ˜› \n", - "31571 ๐Ÿ˜ฑ [0.4516292214393616, 0.2722293734550476, 0.276... ๐Ÿ˜’ \n", - "9536 ๐Ÿ˜ฑ [0.45403042435646057, 0.27084800601005554, 0.2... ๐Ÿ˜ญ \n", - "19571 ๐Ÿ˜ฑ [0.45092839002609253, 0.273369699716568, 0.275... ๐Ÿ˜ก \n", - "35934 ๐Ÿ˜ฑ [0.45402011275291443, 0.2693026065826416, 0.27... ๐Ÿ˜‚ \n", - "4605 ๐Ÿ˜ฌ [0.45629507303237915, 0.2681595981121063, 0.27... ๐Ÿ˜‚ \n", - "1913 ๐Ÿ˜ฌ [0.45600560307502747, 0.26834917068481445, 0.2... ๐Ÿ˜ญ \n", - "11827 ๐Ÿ˜ฌ [0.45951539278030396, 0.26939070224761963, 0.2... ๐Ÿ˜„ \n", - "15181 ๐Ÿ˜ฌ [0.4616319239139557, 0.2665380537509918, 0.271... ๐Ÿ˜‚ \n", - "32869 ๐Ÿ˜ฌ [0.45471736788749695, 0.27229392528533936, 0.2... ๐Ÿ˜น \n", - "40784 ๐Ÿ˜ฌ [0.45305681228637695, 0.2729623019695282, 0.27... ๐Ÿ˜ป \n", - "18611 ๐Ÿ˜ฑ [0.4504826068878174, 0.27414649724960327, 0.27... ๐Ÿ˜‚ \n", - "28341 ๐Ÿ˜ฑ [0.45293256640434265, 0.27156686782836914, 0.2... ๐Ÿ˜‚ \n", - "41368 ๐Ÿ˜ฑ [0.4524029791355133, 0.27189141511917114, 0.27... ๐Ÿ˜‚ \n", - "35554 ๐Ÿ˜ฌ [0.45386624336242676, 0.2720849812030792, 0.27... ๐Ÿ˜ฉ \n", - "27968 ๐Ÿ˜ฌ [0.4601033926010132, 0.2683771550655365, 0.271... ๐Ÿ˜ \n", - "36772 ๐Ÿ˜ฑ [0.45253145694732666, 0.2714029550552368, 0.27... ๐Ÿ˜ณ \n", - "7568 ๐Ÿ˜ฑ [0.4514819085597992, 0.27036288380622864, 0.27... ๐Ÿ˜ญ \n", - "62837 ๐Ÿ˜ฌ [0.4553525149822235, 0.27044105529785156, 0.27... ๐Ÿ˜‚ \n", - "34105 ๐Ÿ˜ฑ [0.4522460997104645, 0.27333661913871765, 0.27... ๐Ÿ˜‡ \n", - "59069 ๐Ÿ˜ฌ [0.45729154348373413, 0.26964816451072693, 0.2... ๐Ÿ˜‰ \n", - "47088 ๐Ÿ˜ฑ [0.4503130614757538, 0.27487510442733765, 0.27... ๐Ÿ˜‚ \n", + " text teacher \\\n", + "7618 There's fucking snow outside ๐Ÿ˜’ \n", + "6910 You look so animated bro! *Rimshot*, Get it? ... ๐Ÿ˜‚ \n", + "35783 Jon ordered it from a restaurant ๐Ÿ˜ญ \n", + "15623 I just want to move back in with my mom ๐Ÿ˜… \n", + "12023 guys lets vote!!! ๐Ÿ˜ญ \n", + "15763 Bruhhhh this man need a show ASAP!!! ๐Ÿ˜‚ \n", + "57240 AND THE RICH SIT IN A LOW PLACE ๐Ÿ˜Ž \n", + "2418 Time to go to bed ๐Ÿ˜‚ \n", + "66384 May sound like a hillbilly, but girls that can... ๐Ÿ˜ \n", + "44639 IM DYING ๐Ÿ˜‚ \n", + "54463 Black forest hot chocolate is coming back to C... ๐Ÿ™Œ \n", + "38407 5 days till my 21st. ๐Ÿ˜Ž \n", + "31342 I'm just going to leave my Twitter acct open ... ๐Ÿ˜„ \n", + "41255 Oh god im so happy ๐Ÿ˜‚ \n", + "19686 In my head ๐Ÿ˜ญ \n", + "67193 Jo, chilllllllll. This kid is mentally beati... ๐Ÿ˜‚ \n", + "40650 Iโ€™m ready sis ๐Ÿ˜‡ \n", + "64416 Sometimes we need to know how to respect our p... ๐Ÿ˜Š \n", + "19525 Right? ๐Ÿ™† \n", + "39024 priceless love how got back at you ๐Ÿ˜‚ \n", + "35342 Right ๐Ÿ˜ซ \n", + "9715 Opo pang i will! ๐Ÿ˜‡ \n", + "40490 WOW whomever was tasked with vetting applica... ๐Ÿ˜ฎ \n", + "22064 Thank you ! ๐Ÿ˜ \n", + "53563 Dat daurin Dan kwali from behind is the real ... ๐Ÿ˜„ \n", + "67332 I miss my baby so much โ™ฅ๏ธ ๐Ÿ˜ช \n", + "15404 alright i'm gonna go take a shower, let me kno... ๐Ÿ˜• \n", + "30130 and neither are buying her music ๐Ÿ˜ณ \n", + "37242 I saw that too as well! I feel to have it dur... ๐Ÿ˜‚ \n", + "30533 I had a mental break down but Iโ€™m back like hey ๐Ÿ˜‚ \n", + "... ... ... \n", + "16955 Bless up Esquivel finally update aeries ๐Ÿ™ \n", + "36805 Look just like my Mizuno I had in โ€˜88 ๐Ÿ˜‚ \n", + "56326 This felt like it happened yesterday. ๐Ÿ˜ข \n", + "40306 Our Guest photo this week highlights the beaut... ๐Ÿ˜ \n", + "51333 I need to see what โ€œthe stranger thingsโ€ is ab... ๐Ÿ˜Œ \n", + "42339 Plz can u make me a pc I donโ€™t have enough mo... ๐Ÿ˜” \n", + "7644 this is insulting ๐Ÿ˜ง \n", + "13953 I know this might sound crazy but I rather Don... ๐Ÿ˜ \n", + "26460 Just a couple of cute pugs hanging out ๐Ÿ˜› \n", + "31571 She knows nothing will happen to her.. welcom... ๐Ÿ˜’ \n", + "9536 happy raisin ๐Ÿ˜ญ \n", + "19571 I'm so disappointed ๐Ÿ˜ก \n", + "35934 Nailed it. ๐Ÿ˜‚ \n", + "4605 oh itโ€™s more than a thing! ๐Ÿ˜‚ \n", + "1913 stop it ๐Ÿ˜ญ \n", + "11827 Have an awesome โค๏ธ ๐Ÿ˜„ \n", + "15181 when I see elbows ? ๐Ÿ˜‚ \n", + "32869 Damn Jani back at it again with the BB-8 costu... ๐Ÿ˜น \n", + "40784 I think a big part of the reason Nick Wilde is... ๐Ÿ˜ป \n", + "18611 My 12 year old brother on Altuve- โ€œheโ€™s like t... ๐Ÿ˜‚ \n", + "28341 Bruh like how she know the exact number? ๐Ÿ˜‚ \n", + "41368 They starting early ๐Ÿ˜‚ \n", + "35554 Please come through ๐Ÿ˜ฉ \n", + "27968 Thank you ๐Ÿ˜ \n", + "36772 Imagine if the Vikings had a top 10 QB ๐Ÿ˜ณ \n", + "7568 Le costume de cupcakke....... ๐Ÿ˜ญ \n", + "62837 aM FKXNSK SCREAMING ODM ๐Ÿ˜‚ \n", + "34105 My birthday tomorrow ๐Ÿ˜‡ \n", + "59069 im just tryna change your life ๐Ÿ˜‰ \n", + "47088 Do I look like Iโ€™m off 79th . Foh ๐Ÿ˜‚ \n", "\n", - " teacher_sentiment \\\n", - "7618 [0.21660649819494585, 0.5913357400722021, 0.19... \n", - "6910 [0.46813021474490496, 0.24716181096977158, 0.2... \n", - "35783 [0.34310532030401736, 0.4364820846905538, 0.22... \n", - "15623 [0.47186147186147187, 0.2922077922077922, 0.23... \n", - "12023 [0.34310532030401736, 0.4364820846905538, 0.22... \n", - "15763 [0.46813021474490496, 0.24716181096977158, 0.2... \n", - "57240 [0.5981432360742706, 0.10477453580901856, 0.29... \n", - "2418 [0.46813021474490496, 0.24716181096977158, 0.2... \n", - "66384 [0.7296744771190439, 0.05173769460607014, 0.21... \n", - "44639 [0.46813021474490496, 0.24716181096977158, 0.2... \n", - "54463 [0.6613545816733067, 0.10092961487383798, 0.23... \n", - "38407 [0.5981432360742706, 0.10477453580901856, 0.29... \n", - "31342 [0.5586552217453505, 0.13662374821173104, 0.30... \n", - "41255 [0.46813021474490496, 0.24716181096977158, 0.2... \n", - "19686 [0.34310532030401736, 0.4364820846905538, 0.22... \n", - "67193 [0.46813021474490496, 0.24716181096977158, 0.2... \n", - "40650 [0.6666666666666666, 0.06666666666666667, 0.26... \n", - "64416 [0.7040175768989329, 0.059322033898305086, 0.2... \n", - "19525 [0.5964912280701754, 0.08771929824561403, 0.31... \n", - "39024 [0.46813021474490496, 0.24716181096977158, 0.2... \n", - "35342 [0.3404710920770878, 0.4860813704496788, 0.173... \n", - "9715 [0.6666666666666666, 0.06666666666666667, 0.26... \n", - "40490 [0.45555555555555555, 0.17777777777777778, 0.3... \n", - "22064 [0.7296744771190439, 0.05173769460607014, 0.21... \n", - "53563 [0.5586552217453505, 0.13662374821173104, 0.30... \n", - "67332 [0.3506224066390041, 0.4315352697095436, 0.217... \n", - "15404 [0.20294117647058824, 0.6029411764705882, 0.19... \n", - "30130 [0.34515366430260047, 0.32742316784869974, 0.3... \n", - "37242 [0.46813021474490496, 0.24716181096977158, 0.2... \n", - "30533 [0.46813021474490496, 0.24716181096977158, 0.2... \n", - "... ... \n", - "16955 [0.4983755685510071, 0.08057179987004548, 0.42... \n", - "36805 [0.46813021474490496, 0.24716181096977158, 0.2... \n", - "56326 [0.39118825100133514, 0.38451268357810414, 0.2... \n", - "40306 [0.7296744771190439, 0.05173769460607014, 0.21... \n", - "51333 [0.6240601503759399, 0.13984962406015036, 0.23... \n", - "42339 [0.31784232365145226, 0.46390041493775935, 0.2... \n", - "7644 [0.32608695652173914, 0.391304347826087, 0.282... \n", - "13953 [0.16296296296296298, 0.5555555555555556, 0.28... \n", - "26460 [0.6909090909090909, 0.08181818181818182, 0.22... \n", - "31571 [0.21660649819494585, 0.5913357400722021, 0.19... \n", - "9536 [0.34310532030401736, 0.4364820846905538, 0.22... \n", - "19571 [0.35978835978835977, 0.533068783068783, 0.107... \n", - "35934 [0.46813021474490496, 0.24716181096977158, 0.2... \n", - "4605 [0.46813021474490496, 0.24716181096977158, 0.2... \n", - "1913 [0.34310532030401736, 0.4364820846905538, 0.22... \n", - "11827 [0.5586552217453505, 0.13662374821173104, 0.30... \n", - "15181 [0.46813021474490496, 0.24716181096977158, 0.2... \n", - "32869 [0.4406779661016949, 0.2983050847457627, 0.261... \n", - "40784 [0.6906474820143885, 0.0671462829736211, 0.242... \n", - "18611 [0.46813021474490496, 0.24716181096977158, 0.2... \n", - "28341 [0.46813021474490496, 0.24716181096977158, 0.2... \n", - "41368 [0.46813021474490496, 0.24716181096977158, 0.2... \n", - "35554 [0.22289823008849557, 0.5912610619469026, 0.18... \n", - "27968 [0.7296744771190439, 0.05173769460607014, 0.21... \n", - "36772 [0.34515366430260047, 0.32742316784869974, 0.3... \n", - "7568 [0.34310532030401736, 0.4364820846905538, 0.22... \n", - "62837 [0.46813021474490496, 0.24716181096977158, 0.2... \n", - "34105 [0.6666666666666666, 0.06666666666666667, 0.26... \n", - "59069 [0.5634451019066403, 0.0992767915844839, 0.337... \n", - "47088 [0.46813021474490496, 0.24716181096977158, 0.2... \n", + " teacher_sentiment predict \\\n", + "7618 [0.21660649819494585, 0.5913357400722021, 0.19... ๐Ÿ˜ฌ \n", + "6910 [0.46813021474490496, 0.24716181096977158, 0.2... ๐Ÿ˜ฌ \n", + "35783 [0.34310532030401736, 0.4364820846905538, 0.22... ๐Ÿ˜ฌ \n", + "15623 [0.47186147186147187, 0.2922077922077922, 0.23... ๐Ÿ˜ฌ \n", + "12023 [0.34310532030401736, 0.4364820846905538, 0.22... ๐Ÿ˜ฌ \n", + "15763 [0.46813021474490496, 0.24716181096977158, 0.2... ๐Ÿ˜ฌ \n", + "57240 [0.5981432360742706, 0.10477453580901856, 0.29... ๐Ÿ˜ฌ \n", + "2418 [0.46813021474490496, 0.24716181096977158, 0.2... ๐Ÿ˜ฌ \n", + "66384 [0.7296744771190439, 0.05173769460607014, 0.21... ๐Ÿ˜ฌ \n", + "44639 [0.46813021474490496, 0.24716181096977158, 0.2... ๐Ÿ˜ฌ \n", + "54463 [0.6613545816733067, 0.10092961487383798, 0.23... ๐Ÿ˜ฌ \n", + "38407 [0.5981432360742706, 0.10477453580901856, 0.29... ๐Ÿ˜ฌ \n", + "31342 [0.5586552217453505, 0.13662374821173104, 0.30... ๐Ÿ˜ฌ \n", + "41255 [0.46813021474490496, 0.24716181096977158, 0.2... ๐Ÿ˜ฌ \n", + "19686 [0.34310532030401736, 0.4364820846905538, 0.22... ๐Ÿ˜ฌ \n", + "67193 [0.46813021474490496, 0.24716181096977158, 0.2... ๐Ÿ˜ฌ \n", + "40650 [0.6666666666666666, 0.06666666666666667, 0.26... ๐Ÿ˜ฌ \n", + "64416 [0.7040175768989329, 0.059322033898305086, 0.2... ๐Ÿ˜ฌ \n", + "19525 [0.5964912280701754, 0.08771929824561403, 0.31... ๐Ÿ˜ฌ \n", + "39024 [0.46813021474490496, 0.24716181096977158, 0.2... ๐Ÿ˜ฌ \n", + "35342 [0.3404710920770878, 0.4860813704496788, 0.173... ๐Ÿ˜ฌ \n", + "9715 [0.6666666666666666, 0.06666666666666667, 0.26... ๐Ÿ˜ฌ \n", + "40490 [0.45555555555555555, 0.17777777777777778, 0.3... ๐Ÿ˜ฌ \n", + "22064 [0.7296744771190439, 0.05173769460607014, 0.21... ๐Ÿ˜ฌ \n", + "53563 [0.5586552217453505, 0.13662374821173104, 0.30... ๐Ÿ˜ฌ \n", + "67332 [0.3506224066390041, 0.4315352697095436, 0.217... ๐Ÿ˜ฌ \n", + "15404 [0.20294117647058824, 0.6029411764705882, 0.19... ๐Ÿ˜ฌ \n", + "30130 [0.34515366430260047, 0.32742316784869974, 0.3... ๐Ÿ˜ฌ \n", + "37242 [0.46813021474490496, 0.24716181096977158, 0.2... ๐Ÿ˜ฌ \n", + "30533 [0.46813021474490496, 0.24716181096977158, 0.2... ๐Ÿ˜ฌ \n", + "... ... ... \n", + "16955 [0.4983755685510071, 0.08057179987004548, 0.42... ๐Ÿ˜ฌ \n", + "36805 [0.46813021474490496, 0.24716181096977158, 0.2... ๐Ÿ˜ฌ \n", + "56326 [0.39118825100133514, 0.38451268357810414, 0.2... ๐Ÿ˜ฌ \n", + "40306 [0.7296744771190439, 0.05173769460607014, 0.21... ๐Ÿ˜ฌ \n", + "51333 [0.6240601503759399, 0.13984962406015036, 0.23... ๐Ÿ˜ฌ \n", + "42339 [0.31784232365145226, 0.46390041493775935, 0.2... ๐Ÿ˜ฌ \n", + "7644 [0.32608695652173914, 0.391304347826087, 0.282... ๐Ÿ˜ฌ \n", + "13953 [0.16296296296296298, 0.5555555555555556, 0.28... ๐Ÿ˜ฌ \n", + "26460 [0.6909090909090909, 0.08181818181818182, 0.22... ๐Ÿ˜ฌ \n", + "31571 [0.21660649819494585, 0.5913357400722021, 0.19... ๐Ÿ˜ฌ \n", + "9536 [0.34310532030401736, 0.4364820846905538, 0.22... ๐Ÿ˜ฌ \n", + "19571 [0.35978835978835977, 0.533068783068783, 0.107... ๐Ÿ˜ฌ \n", + "35934 [0.46813021474490496, 0.24716181096977158, 0.2... ๐Ÿ˜ฌ \n", + "4605 [0.46813021474490496, 0.24716181096977158, 0.2... ๐Ÿ˜ฌ \n", + "1913 [0.34310532030401736, 0.4364820846905538, 0.22... ๐Ÿ˜ฌ \n", + "11827 [0.5586552217453505, 0.13662374821173104, 0.30... ๐Ÿ˜ฌ \n", + "15181 [0.46813021474490496, 0.24716181096977158, 0.2... ๐Ÿ˜ฌ \n", + "32869 [0.4406779661016949, 0.2983050847457627, 0.261... ๐Ÿ˜ฌ \n", + "40784 [0.6906474820143885, 0.0671462829736211, 0.242... ๐Ÿ˜ฌ \n", + "18611 [0.46813021474490496, 0.24716181096977158, 0.2... ๐Ÿ˜ฌ \n", + "28341 [0.46813021474490496, 0.24716181096977158, 0.2... ๐Ÿ˜ฌ \n", + "41368 [0.46813021474490496, 0.24716181096977158, 0.2... ๐Ÿ˜ฌ \n", + "35554 [0.22289823008849557, 0.5912610619469026, 0.18... ๐Ÿ˜ฌ \n", + "27968 [0.7296744771190439, 0.05173769460607014, 0.21... ๐Ÿ˜ฌ \n", + "36772 [0.34515366430260047, 0.32742316784869974, 0.3... ๐Ÿ˜ฌ \n", + "7568 [0.34310532030401736, 0.4364820846905538, 0.22... ๐Ÿ˜ฌ \n", + "62837 [0.46813021474490496, 0.24716181096977158, 0.2... ๐Ÿ˜ฌ \n", + "34105 [0.6666666666666666, 0.06666666666666667, 0.26... ๐Ÿ˜ฌ \n", + "59069 [0.5634451019066403, 0.0992767915844839, 0.337... ๐Ÿ˜ฌ \n", + "47088 [0.46813021474490496, 0.24716181096977158, 0.2... ๐Ÿ˜ฌ \n", "\n", - " text \n", - "7618 There's fucking snow outside \n", - "6910 You look so animated bro! *Rimshot*, Ge... \n", - "35783 Jon ordered it from a restaurant \n", - "15623 I just want to move back in with my mom \n", - "12023 guys lets vote!!! \n", - "15763 Bruhhhh this man need a show ASAP!!! \n", - "57240 AND THE RICH SIT IN A LOW PLACE \n", - "2418 Time to go to bed \n", - "66384 May sound like a hillbilly, but girls that can... \n", - "44639 IM DYING \n", - "54463 Black forest hot chocolate is coming back to C... \n", - "38407 5 days till my 21st. \n", - "31342 I'm just going to leave my Twitter acct... \n", - "41255 Oh god im so happy \n", - "19686 In my head \n", - "67193 Jo, chilllllllll. This kid is mentally... \n", - "40650 Iโ€™m ready sis \n", - "64416 Sometimes we need to know how to respect our p... \n", - "19525 Right? \n", - "39024 priceless love how got back at ... \n", - "35342 Right \n", - "9715 Opo pang i will! \n", - "40490 WOW whomever was tasked with vetting a... \n", - "22064 Thank you ! \n", - "53563 Dat daurin Dan kwali from behind is the real ... \n", - "67332 I miss my baby so much โ™ฅ๏ธ \n", - "15404 alright i'm gonna go take a shower, let me kno... \n", - "30130 and neither are buying her music \n", - "37242 I saw that too as well! I feel to have ... \n", - "30533 I had a mental break down but Iโ€™m back like hey \n", + " predicted_sentiment \n", + "7618 [0.46037721633911133, 0.2677566707134247, 0.27... \n", + "6910 [0.4636985957622528, 0.26727861166000366, 0.26... \n", + "35783 [0.4635818898677826, 0.2673886716365814, 0.269... \n", + "15623 [0.46267321705818176, 0.2673114538192749, 0.27... \n", + "12023 [0.4621191620826721, 0.26737213134765625, 0.27... \n", + "15763 [0.46237292885780334, 0.2675153613090515, 0.27... \n", + "57240 [0.4622046649456024, 0.2677682340145111, 0.270... \n", + "2418 [0.4639696180820465, 0.26709380745887756, 0.26... \n", + "66384 [0.4639129638671875, 0.26741719245910645, 0.26... \n", + "44639 [0.46023082733154297, 0.27063173055648804, 0.2... \n", + "54463 [0.46065258979797363, 0.2685313820838928, 0.27... \n", + "38407 [0.4624767005443573, 0.2678660750389099, 0.269... \n", + "31342 [0.4628538191318512, 0.2673165500164032, 0.269... \n", + "41255 [0.4628150463104248, 0.26765838265419006, 0.26... \n", + "19686 [0.4624774158000946, 0.26805710792541504, 0.26... \n", + "67193 [0.4607327878475189, 0.26832473278045654, 0.27... \n", + "40650 [0.4610658884048462, 0.26784956455230713, 0.27... \n", + "64416 [0.46121352910995483, 0.27051451802253723, 0.2... \n", + "19525 [0.46745917201042175, 0.265864759683609, 0.266... \n", + "39024 [0.4606868028640747, 0.267996221780777, 0.2713... \n", + "35342 [0.46745917201042175, 0.265864759683609, 0.266... \n", + "9715 [0.45438939332962036, 0.2717699408531189, 0.27... \n", + "40490 [0.46492910385131836, 0.2654315233230591, 0.26... \n", + "22064 [0.46712905168533325, 0.26472848653793335, 0.2... \n", + "53563 [0.4643474817276001, 0.2679416835308075, 0.267... \n", + "67332 [0.4613974094390869, 0.2683691382408142, 0.270... \n", + "15404 [0.46027225255966187, 0.2696966826915741, 0.27... \n", + "30130 [0.46355295181274414, 0.26728588342666626, 0.2... \n", + "37242 [0.46367916464805603, 0.26611489057540894, 0.2... \n", + "30533 [0.4640607535839081, 0.26755788922309875, 0.26... \n", "... ... \n", - "16955 Bless up Esquivel finally update aeries \n", - "36805 Look just like my Mizuno ... \n", - "56326 This felt like it happened yesterday. \n", - "40306 Our Guest photo this week highlights the beaut... \n", - "51333 I need to see what โ€œthe stranger thingsโ€ is ab... \n", - "42339 Plz can u make me a pc I donโ€™t have eno... \n", - "7644 this is insulting \n", - "13953 I know this might sound crazy but I rather Don... \n", - "26460 Just a couple of cute pugs hanging out \n", - "31571 She knows nothing will happen to her.. ... \n", - "9536 happy raisin \n", - "19571 I'm so disappointed \n", - "35934 Nailed it. \n", - "4605 oh itโ€™s more than a thing! \n", - "1913 stop it \n", - "11827 Have an awesome โค๏ธ \n", - "15181 when I see elbows ? \n", - "32869 Damn Jani back at it again with the BB-8 costu... \n", - "40784 I think a big part of the reason Nick Wilde is... \n", - "18611 My 12 year old brother on Altuve- โ€œheโ€™s like t... \n", - "28341 Bruh like how she know the exact number? \n", - "41368 They starting early \n", - "35554 Please come through \n", - "27968 Thank you \n", - "36772 Imagine if the Vikings had a top 10 QB \n", - "7568 Le costume de cupcakke....... \n", - "62837 aM FKXNSK SCREAMING ODM \n", - "34105 My birthday tomorrow \n", - "59069 im just tryna change your life \n", - "47088 Do I look like Iโ€™m off 79th . Foh \n", + "16955 [0.46489447355270386, 0.26702189445495605, 0.2... \n", + "36805 [0.4630420207977295, 0.26803845167160034, 0.26... \n", + "56326 [0.4617263972759247, 0.26921841502189636, 0.26... \n", + "40306 [0.46200209856033325, 0.26775091886520386, 0.2... \n", + "51333 [0.46260306239128113, 0.26784563064575195, 0.2... \n", + "42339 [0.4600381553173065, 0.26839619874954224, 0.27... \n", + "7644 [0.4657018482685089, 0.2653878927230835, 0.268... \n", + "13953 [0.4625128209590912, 0.26891759037971497, 0.26... \n", + "26460 [0.46720531582832336, 0.26648813486099243, 0.2... \n", + "31571 [0.4603578746318817, 0.2692199945449829, 0.270... \n", + "9536 [0.4630625247955322, 0.26809123158454895, 0.26... \n", + "19571 [0.4639342725276947, 0.26599037647247314, 0.27... \n", + "35934 [0.46198946237564087, 0.26730334758758545, 0.2... \n", + "4605 [0.46141430735588074, 0.268981009721756, 0.269... \n", + "1913 [0.4630756378173828, 0.2665313482284546, 0.270... \n", + "11827 [0.46398892998695374, 0.26717185974121094, 0.2... \n", + "15181 [0.45438939332962036, 0.2717699408531189, 0.27... \n", + "32869 [0.4616232216358185, 0.2673903703689575, 0.270... \n", + "40784 [0.46204957365989685, 0.26713815331459045, 0.2... \n", + "18611 [0.4634704887866974, 0.2669770121574402, 0.269... \n", + "28341 [0.46148979663848877, 0.26869532465934753, 0.2... \n", + "41368 [0.4605691730976105, 0.26995062828063965, 0.26... \n", + "35554 [0.46297168731689453, 0.2675779163837433, 0.26... \n", + "27968 [0.46712905168533325, 0.26472848653793335, 0.2... \n", + "36772 [0.4609829783439636, 0.26704034209251404, 0.27... \n", + "7568 [0.46219775080680847, 0.26761168241500854, 0.2... \n", + "62837 [0.46401336789131165, 0.2654089033603668, 0.27... \n", + "34105 [0.4665740728378296, 0.26650139689445496, 0.26... \n", + "59069 [0.4622177183628082, 0.268806129693985, 0.2689... \n", + "47088 [0.46383899450302124, 0.26802247762680054, 0.2... \n", "\n", "[3337 rows x 5 columns]" ]