updated german dictionary
This commit is contained in:
parent
ccefd63539
commit
ff5cbafe2b
@ -1188,24 +1188,24 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 9,
|
||||
"execution_count": 30,
|
||||
"source": [
|
||||
"get_database(\"de\")['sol']"
|
||||
"get_database(\"de\")['ore']"
|
||||
],
|
||||
"outputs": [
|
||||
{
|
||||
"output_type": "execute_result",
|
||||
"data": {
|
||||
"text/plain": [
|
||||
"{'word': 'Sol',\n",
|
||||
" 'senses': ['Chemie: eine kolloidale Suspension von festen Partikeln oder Makromolekülen in einer flüssigen Phase'],\n",
|
||||
"{'word': 'Ore',\n",
|
||||
" 'senses': ['Münze, die in Dänemark, Norwegen und Schweden verwendet wird'],\n",
|
||||
" 'synonyms': [],\n",
|
||||
" 'antonyms': ['Gel'],\n",
|
||||
" 'num_translations': 6}"
|
||||
" 'antonyms': [],\n",
|
||||
" 'num_translations': 10}"
|
||||
]
|
||||
},
|
||||
"metadata": {},
|
||||
"execution_count": 9
|
||||
"execution_count": 30
|
||||
}
|
||||
],
|
||||
"metadata": {}
|
||||
|
@ -3,8 +3,6 @@
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 1,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import json\n",
|
||||
"import re\n",
|
||||
@ -19,13 +17,13 @@
|
||||
"import wikitextparser as wtp\n",
|
||||
"import numpy as np\n",
|
||||
"import unidecode"
|
||||
]
|
||||
],
|
||||
"outputs": [],
|
||||
"metadata": {}
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 2,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"def extract_data(wikitext: str, title: str):\n",
|
||||
"\n",
|
||||
@ -113,23 +111,13 @@
|
||||
"\n",
|
||||
" \n",
|
||||
" return data\n"
|
||||
]
|
||||
],
|
||||
"outputs": [],
|
||||
"metadata": {}
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 3,
|
||||
"metadata": {
|
||||
"tags": []
|
||||
},
|
||||
"outputs": [
|
||||
{
|
||||
"output_type": "stream",
|
||||
"name": "stderr",
|
||||
"text": [
|
||||
"989351it [04:32, 3636.28it/s]\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"bzfile_path = \"dewiktionary-20210201-pages-articles.xml.bz2\"\n",
|
||||
"bz = BZ2File(bzfile_path)\n",
|
||||
@ -144,6 +132,9 @@
|
||||
" word = word.replace(\"ä\", \"ae\")\n",
|
||||
" word = word.replace(\"ü\", \"ue\")\n",
|
||||
" word = word.replace(\"ö\", \"oe\")\n",
|
||||
" word = word.replace(\"Ä\", \"Ae\")\n",
|
||||
" word = word.replace(\"Ü\", \"Ue\")\n",
|
||||
" word = word.replace(\"Ö\", \"Oe\")\n",
|
||||
" word = word.replace(\"ß\", \"ss\")\n",
|
||||
"\n",
|
||||
" \n",
|
||||
@ -163,22 +154,30 @@
|
||||
"\n",
|
||||
" i += 1\n",
|
||||
"\n"
|
||||
]
|
||||
],
|
||||
"outputs": [
|
||||
{
|
||||
"output_type": "stream",
|
||||
"name": "stderr",
|
||||
"text": [
|
||||
"989351it [07:14, 2274.95it/s]\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"tags": []
|
||||
}
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"source": [
|
||||
"## filter database from links and only keep \"relevant\" items"
|
||||
],
|
||||
"cell_type": "markdown",
|
||||
"metadata": {}
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 6,
|
||||
"metadata": {
|
||||
"tags": []
|
||||
},
|
||||
"outputs": [],
|
||||
"execution_count": 4,
|
||||
"source": [
|
||||
"def clean_text(s:str):\n",
|
||||
"\n",
|
||||
@ -234,14 +233,133 @@
|
||||
"\n",
|
||||
" return text\n",
|
||||
"\n"
|
||||
]
|
||||
],
|
||||
"outputs": [],
|
||||
"metadata": {
|
||||
"tags": []
|
||||
}
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 12,
|
||||
"source": [
|
||||
"def word_wise_filtering(sentence: str):\n",
|
||||
" words = sentence.split()\n",
|
||||
" for i, word in enumerate(words.copy()):\n",
|
||||
" if \"|\" in word:\n",
|
||||
" splitted = word.split(\"|\")\n",
|
||||
" if len(splitted) == 2:\n",
|
||||
" words[i] = splitted[1]\n",
|
||||
" return \" \".join(words)\n"
|
||||
],
|
||||
"outputs": [],
|
||||
"metadata": {}
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 23,
|
||||
"source": [
|
||||
"parse(\"{}[{:d}]{}\", \"[12]\") is None"
|
||||
],
|
||||
"outputs": [
|
||||
{
|
||||
"output_type": "execute_result",
|
||||
"data": {
|
||||
"text/plain": [
|
||||
"True"
|
||||
]
|
||||
},
|
||||
"metadata": {},
|
||||
"execution_count": 23
|
||||
}
|
||||
],
|
||||
"metadata": {}
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 14,
|
||||
"source": [
|
||||
"word_wise_filtering(\"hallo welten|welt|df\")"
|
||||
],
|
||||
"outputs": [
|
||||
{
|
||||
"output_type": "execute_result",
|
||||
"data": {
|
||||
"text/plain": [
|
||||
"'hallo welten|welt|df'"
|
||||
]
|
||||
},
|
||||
"metadata": {},
|
||||
"execution_count": 14
|
||||
}
|
||||
],
|
||||
"metadata": {}
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 7,
|
||||
"metadata": {
|
||||
"tags": []
|
||||
},
|
||||
"source": [
|
||||
"filtered_db = {}\n",
|
||||
"\n",
|
||||
"for key, item in tqdm.tqdm(db.items()):\n",
|
||||
" senses = item['senses']\n",
|
||||
" synonyms = item['synonyms']\n",
|
||||
" antonyms = item['antonyms']\n",
|
||||
"\n",
|
||||
" def filter_entries(entries: list):\n",
|
||||
" new_list = []\n",
|
||||
" for entry in entries:\n",
|
||||
" try:\n",
|
||||
" cleaned_entry = clean_text(entry)\n",
|
||||
" # sort out bad hints:\n",
|
||||
" if \"{\" in cleaned_entry:\n",
|
||||
" continue\n",
|
||||
" if item['word'].lower() in cleaned_entry.lower():\n",
|
||||
" continue\n",
|
||||
" if \"familienname\" in cleaned_entry.lower():\n",
|
||||
" continue\n",
|
||||
" if \"ortsteil von\" in cleaned_entry.lower():\n",
|
||||
" continue\n",
|
||||
" if \"dorf in\" in cleaned_entry.lower():\n",
|
||||
" continue\n",
|
||||
" if parse(\"{}[{:d}]{}\", cleaned_entry) is not None:\n",
|
||||
" continue\n",
|
||||
" if entry.isupper(): # try to sort out initialisms\n",
|
||||
" continue\n",
|
||||
" \n",
|
||||
" \n",
|
||||
" cleaned_entry = word_wise_filtering (cleaned_entry)\n",
|
||||
"\n",
|
||||
" new_list.append(cleaned_entry)\n",
|
||||
" except:\n",
|
||||
" #print(\"cannot process item\", entry)\n",
|
||||
" # just skipping unprocessable items\n",
|
||||
" pass\n",
|
||||
"\n",
|
||||
" \n",
|
||||
" return new_list\n",
|
||||
" \n",
|
||||
" item['senses'] = filter_entries(senses)\n",
|
||||
" item['synonyms'] = filter_entries(synonyms)\n",
|
||||
" item['antonyms'] = filter_entries(antonyms)\n",
|
||||
"\n",
|
||||
" # clean key from special characters:\n",
|
||||
" unaccented_key = unidecode.unidecode(key)\n",
|
||||
" unaccented_word = unidecode.unidecode(item['word'])\n",
|
||||
"\n",
|
||||
" def has_digit(s: str):\n",
|
||||
" for c in s:\n",
|
||||
" if c.isdigit():\n",
|
||||
" return True\n",
|
||||
" return False\n",
|
||||
"\n",
|
||||
" n_hints = len(item['senses']) + len(item['synonyms']) + len(item['antonyms'])\n",
|
||||
"\n",
|
||||
" if (n_hints == 1 and item['num_translations'] >= 6) or n_hints > 1:\n",
|
||||
" if not has_digit(item['word']):\n",
|
||||
" item['word'] = unaccented_word\n",
|
||||
" filtered_db[unaccented_key] = item\n"
|
||||
],
|
||||
"outputs": [
|
||||
{
|
||||
"output_type": "stream",
|
||||
@ -305,70 +423,16 @@
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"filtered_db = {}\n",
|
||||
"\n",
|
||||
"for key, item in tqdm.tqdm(db.items()):\n",
|
||||
" senses = item['senses']\n",
|
||||
" synonyms = item['synonyms']\n",
|
||||
" antonyms = item['antonyms']\n",
|
||||
"\n",
|
||||
" def filter_entries(entries: list):\n",
|
||||
" new_list = []\n",
|
||||
" for entry in entries:\n",
|
||||
" try:\n",
|
||||
" cleaned_entry = clean_text(entry)\n",
|
||||
" # sort out bad hints:\n",
|
||||
" if \"{\" in cleaned_entry:\n",
|
||||
" continue\n",
|
||||
" if item['word'].lower() in cleaned_entry.lower():\n",
|
||||
" continue\n",
|
||||
" if \"familienname\" in cleaned_entry.lower():\n",
|
||||
" continue\n",
|
||||
" if \"ortsteil von\" in cleaned_entry.lower():\n",
|
||||
" continue\n",
|
||||
" if \"dorf in\" in cleaned_entry.lower():\n",
|
||||
" continue\n",
|
||||
" if entry.isupper(): # try to sort out initialisms\n",
|
||||
" continue\n",
|
||||
" \n",
|
||||
" cleaned_entry = cleaned_entry.replace(\"|\",\"/\")\n",
|
||||
"\n",
|
||||
" new_list.append(cleaned_entry)\n",
|
||||
" except:\n",
|
||||
" #print(\"cannot process item\", entry)\n",
|
||||
" # just skipping unprocessable items\n",
|
||||
" pass\n",
|
||||
"\n",
|
||||
" \n",
|
||||
" return new_list\n",
|
||||
" \n",
|
||||
" item['senses'] = filter_entries(senses)\n",
|
||||
" item['synonyms'] = filter_entries(synonyms)\n",
|
||||
" item['antonyms'] = filter_entries(antonyms)\n",
|
||||
"\n",
|
||||
" # clean key from special characters:\n",
|
||||
" unaccented_key = unidecode.unidecode(key)\n",
|
||||
" unaccented_word = unidecode.unidecode(item['word'])\n",
|
||||
"\n",
|
||||
" def has_digit(s: str):\n",
|
||||
" for c in s:\n",
|
||||
" if c.isdigit():\n",
|
||||
" return True\n",
|
||||
" return False\n",
|
||||
"\n",
|
||||
" n_hints = len(item['senses']) + len(item['synonyms']) + len(item['antonyms'])\n",
|
||||
"\n",
|
||||
" if (n_hints == 1 and item['num_translations'] >= 6) or n_hints > 1:\n",
|
||||
" if not has_digit(item['word']):\n",
|
||||
" item['word'] = unaccented_word\n",
|
||||
" filtered_db[unaccented_key] = item\n"
|
||||
]
|
||||
"metadata": {
|
||||
"tags": []
|
||||
}
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 13,
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"db['muenchen']"
|
||||
],
|
||||
"outputs": [
|
||||
{
|
||||
"output_type": "execute_result",
|
||||
@ -391,14 +455,14 @@
|
||||
"execution_count": 13
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"db['muenchen']"
|
||||
]
|
||||
"metadata": {}
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 16,
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"len(filtered_db)"
|
||||
],
|
||||
"outputs": [
|
||||
{
|
||||
"output_type": "execute_result",
|
||||
@ -411,24 +475,26 @@
|
||||
"execution_count": 16
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"len(filtered_db)"
|
||||
]
|
||||
"metadata": {}
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 18,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"with open('de.json', 'w') as f:\n",
|
||||
" json.dump(filtered_db, f, indent = 4)"
|
||||
]
|
||||
],
|
||||
"outputs": [],
|
||||
"metadata": {}
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 25,
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"l = [1,2,3]\n",
|
||||
"l.remove(4)\n",
|
||||
"l"
|
||||
],
|
||||
"outputs": [
|
||||
{
|
||||
"output_type": "error",
|
||||
@ -442,18 +508,14 @@
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"l = [1,2,3]\n",
|
||||
"l.remove(4)\n",
|
||||
"l"
|
||||
]
|
||||
"metadata": {}
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"source": [],
|
||||
"outputs": [],
|
||||
"source": []
|
||||
"metadata": {}
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
|
BIN
data/de.json
(Stored with Git LFS)
BIN
data/de.json
(Stored with Git LFS)
Binary file not shown.
BIN
server/de.json
(Stored with Git LFS)
BIN
server/de.json
(Stored with Git LFS)
Binary file not shown.
Loading…
Reference in New Issue
Block a user