master-thesis/RecipeAnalysis/AdjacencyMatrix.ipynb

368 lines
12 KiB
Plaintext

{
"cells": [
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# Adjacency Matrix"
]
},
{
"cell_type": "code",
"execution_count": 3,
"metadata": {},
"outputs": [],
"source": [
"import numpy as np\n",
"\n",
"from scipy.sparse import csr_matrix, lil_matrix, coo_matrix"
]
},
{
"cell_type": "code",
"execution_count": 2,
"metadata": {},
"outputs": [
{
"ename": "SyntaxError",
"evalue": "invalid syntax (<ipython-input-2-b94eb869690f>, line 63)",
"output_type": "error",
"traceback": [
"\u001b[0;36m File \u001b[0;32m\"<ipython-input-2-b94eb869690f>\"\u001b[0;36m, line \u001b[0;32m63\u001b[0m\n\u001b[0;31m if label in self._label_document_count[label] += 1\u001b[0m\n\u001b[0m ^\u001b[0m\n\u001b[0;31mSyntaxError\u001b[0m\u001b[0;31m:\u001b[0m invalid syntax\n"
]
}
],
"source": [
"class adj_matrix(object):\n",
" def __init__(self, symmetric_indices=False):\n",
" \n",
" self._sym = symmetric_indices\n",
" if not symmetric_indices:\n",
" self._x_labels = []\n",
" self._y_labels = []\n",
"\n",
" self._x_label_index={}\n",
" self._y_label_index={}\n",
" \n",
" else:\n",
" self._labels = []\n",
" self._label_index={}\n",
" \n",
" self._x = []\n",
" self._y = []\n",
" self._data = []\n",
" \n",
" self._mat = None\n",
" self._csr = None\n",
"\n",
" # for a TF-IDF like approach we need also a counter how frequently ingredients\n",
" # and actions appear in documents. \n",
"\n",
" self._current_document_labels = set()\n",
" self._label_document_count = {}\n",
" \n",
" self._document_count = 0\n",
" \n",
" # building type dependend functions:\n",
" self._build_funcs()\n",
" \n",
" def _get_ix(self, label):\n",
" i = self._x_label_index.get(label)\n",
" if i is None:\n",
" i = len(self._x_labels)\n",
" self._x_labels.append(label)\n",
" self._x_label_index[label] = i\n",
" return i\n",
" \n",
" def _get_iy(self, label):\n",
" i = self._y_label_index.get(label)\n",
" if i is None:\n",
" i = len(self._y_labels)\n",
" self._y_labels.append(label)\n",
" self._y_label_index[label] = i\n",
" return i\n",
" \n",
" def _get_i(self, label):\n",
" i = self._label_index.get(label)\n",
" if i is None:\n",
" i = len(self._labels)\n",
" self._labels.append(label)\n",
" self._label_index[label] = i\n",
" return i\n",
"\n",
" def _end_document(self):\n",
" self._document_count += 1\n",
"\n",
" # adding all seen labels to our counter:\n",
" for label in self._current_document_labels:\n",
" self._label_document_count[label] += 1\n",
" else:\n",
" self._label_document_count[label] = 1\n",
" \n",
" self._current_document_labels = set()\n",
" \n",
" def apply_threshold(self, min_count=5):\n",
" csr = self.get_csr()\n",
"\n",
" new_x = []\n",
" new_y = []\n",
" new_data = []\n",
"\n",
" for i in range(len(self._data)):\n",
" if csr[self._x[i],self._y[i]] >= min_count:\n",
" new_x.append(self._x[i])\n",
" new_y.append(self._y[i])\n",
" new_data.append(self._data[i])\n",
" \n",
" self._x = new_x\n",
" self._y = new_y\n",
" self._data = new_data\n",
"\n",
" \n",
" def next_document(self):\n",
" self._end_document()\n",
"\n",
" \n",
" def add_entry(self, x, y, data):\n",
" \n",
" if self._sym:\n",
" ix = self._get_i(x)\n",
" iy = self._get_i(y)\n",
" \n",
" else:\n",
" ix = self._get_ix(x)\n",
" iy = self._get_iy(y)\n",
" \n",
" self._x.append(ix)\n",
" self._y.append(iy)\n",
" self._data.append(data)\n",
"\n",
" self._current_document_labels.add(x)\n",
" self._current_document_labels.add(y)\n",
" \n",
" def compile(self):\n",
" self._csr = None\n",
" self._csr = self.get_csr()\n",
" if self._sym:\n",
" self._np_labels = np.array(self._labels)\n",
" else:\n",
" self._np_x_labels = np.array(self._x_labels)\n",
" self._np_y_labels = np.array(self._y_labels)\n",
" \n",
" \n",
" def compile_to_mat(self):\n",
" if self._sym:\n",
" sx = len(self._labels)\n",
" sy = len(self._labels)\n",
" else:\n",
" sx = len(self._x_labels)\n",
" sy = len(self._y_labels)\n",
" \n",
" self._mat = coo_matrix((self._data, (self._x, self._y)), shape=(sx,sy))\n",
" return self._mat\n",
" \n",
" def get_csr(self):\n",
" if self._csr is None:\n",
" return self.compile_to_mat().tocsr()\n",
" return self._csr\n",
" \n",
" def get_labels(self):\n",
" if self._sym:\n",
" return self._labels\n",
" return self._x_labels, self._y_labels\n",
" \n",
" def _build_funcs(self):\n",
" \n",
" def get_sym_adjacent(key):\n",
" assert self._csr is not None\n",
" \n",
" c = self._csr\n",
" \n",
" index = self._label_index[key]\n",
" i1 = c[index,:].nonzero()[1]\n",
" i2 = c[:,index].nonzero()[0]\n",
"\n",
" i = np.concatenate((i1,i2))\n",
"\n",
" names = self._np_labels[i]\n",
"\n",
" counts = np.concatenate((c[index, i1].toarray().flatten(), c[i2, index].toarray().flatten()))\n",
"\n",
" s = np.argsort(-counts)\n",
"\n",
" return names[s], counts[s]\n",
" \n",
" def get_forward_adjacent(key):\n",
" assert self._csr is not None\n",
" \n",
" c = self._csr\n",
" \n",
" index = self._x_label_index[key]\n",
" i = c[index,:].nonzero()[1]\n",
"\n",
" names = self._np_y_labels[i]\n",
"\n",
" counts = c[index, i].toarray().flatten()\n",
"\n",
" s = np.argsort(-counts)\n",
"\n",
" return names[s], counts[s]\n",
" \n",
" def get_backward_adjacent(key):\n",
" assert self._csr is not None\n",
" \n",
" c = self._csr\n",
" \n",
" index = self._y_label_index[key]\n",
" i = c[:,index].nonzero()[0]\n",
"\n",
" \n",
" names = self._np_x_labels[i]\n",
"\n",
" counts = c[i, index].toarray().flatten()\n",
"\n",
" s = np.argsort(-counts)\n",
"\n",
" return names[s], counts[s]\n",
" \n",
" # sum functions:\n",
" def sym_sum(key):\n",
" return np.sum(self.get_adjacent(key)[1])\n",
"\n",
" def fw_sum(key):\n",
" return np.sum(self.get_forward_adjacent(key)[1])\n",
"\n",
" def bw_sum(key):\n",
" return np.sum(self.get_backward_adjacent(key)[1])\n",
" \n",
" # normalization stuff:\n",
" def fw_normalization_factor(key, quotient_func):\n",
" assert self._csr is not None\n",
" c = self._csr\n",
" \n",
" ia = self._x_label_index[key]\n",
"\n",
" occurances = c[ia,:].nonzero()[1]\n",
"\n",
" return 1. / quotient_func(c[ia,occurances].toarray())\n",
"\n",
" def bw_normalization_factor(key, quotient_func):\n",
" assert self._csr is not None\n",
" \n",
" c = self._csr\n",
" \n",
" ib = m._y_label_index[key]\n",
"\n",
" occurances = c[:,ib].nonzero()[0]\n",
"\n",
" return 1. / quotient_func(c[occurances,ib].toarray())\n",
"\n",
" def sym_normalization_factor(key, quotient_func):\n",
" assert self._csr is not None\n",
" \n",
" c = self._csr\n",
" \n",
" ii = m._label_index[key]\n",
"\n",
" fw_occurances = c[ii,:].nonzero()[1]\n",
" bw_occurances = c[:,ii].nonzero()[0]\n",
"\n",
" return 1. / quotient_func(np.concatenate(\n",
" [c[ii,fw_occurances].toarray().flatten(),\n",
" c[bw_occurances,ii].toarray().flatten()]\n",
" ))\n",
" \n",
" def sym_p_a_given_b(key_a, key_b, quot_func = np.max):\n",
" assert self._csr is not None\n",
" \n",
" c = self._csr\n",
" \n",
" ia = m._label_index[key_a]\n",
" ib = m._label_index[key_b]\n",
"\n",
" v = c[ia,ib] + c[ib,ia]\n",
"\n",
" return v * self.sym_normalization_factor(key_b, quot_func)\n",
"\n",
" def fw_p_a_given_b(key_a, key_b, quot_func = np.max):\n",
" assert self._csr is not None\n",
" \n",
" c = self._csr\n",
" \n",
" ia = m._x_label_index[key_a]\n",
" ib = m._y_label_index[key_b]\n",
"\n",
" v = c[ia,ib]\n",
"\n",
" return v * self.bw_normalization_factor(key_b, quot_func)\n",
"\n",
" def bw_p_a_given_b(key_a, key_b, quot_func = np.max):\n",
" assert self._csr is not None\n",
" \n",
" c = self._csr\n",
" \n",
" ia = m._y_label_index[key_a]\n",
" ib = m._x_label_index[key_b]\n",
"\n",
" v = c[ib,ia]\n",
"\n",
" return v * self.fw_normalization_factor(key_b, quot_func)\n",
"\n",
" \n",
" if self._sym:\n",
" self.get_adjacent = get_sym_adjacent\n",
" self.get_sum = sym_sum\n",
" self.get_sym_normalization_factor = sym_normalization_factor\n",
" self.p_a_given_b = sym_p_a_given_b\n",
" \n",
" else:\n",
" self.get_forward_adjacent = get_forward_adjacent\n",
" self.get_backward_adjacent = get_backward_adjacent\n",
" \n",
" self.get_fw_sum = fw_sum\n",
" self.get_bw_sum = bw_sum\n",
" \n",
" self.get_fw_normalization_factor = fw_normalization_factor\n",
" self.get_bw_normalization_factor = bw_normalization_factor\n",
"\n",
" self.fw_p_a_given_b = fw_p_a_given_b\n",
" self.bw_p_a_given_b = bw_p_a_given_b\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
"file_extension": ".py",
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.7.5"
},
"mimetype": "text/x-python",
"name": "python",
"npconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": 3
},
"nbformat": 4,
"nbformat_minor": 4
}