saving recipe id
This commit is contained in:
parent
eae9478e59
commit
d3bcb10efa
@ -11,6 +11,8 @@ import numpy as np
|
|||||||
sys.path.insert(0, '..')
|
sys.path.insert(0, '..')
|
||||||
import settings # noqa
|
import settings # noqa
|
||||||
|
|
||||||
|
import gzip
|
||||||
|
|
||||||
|
|
||||||
class ConlluSentenceIterator(object):
|
class ConlluSentenceIterator(object):
|
||||||
def __init__(self, conllu_reader):
|
def __init__(self, conllu_reader):
|
||||||
@ -19,7 +21,12 @@ class ConlluSentenceIterator(object):
|
|||||||
self._open()
|
self._open()
|
||||||
|
|
||||||
def _open(self):
|
def _open(self):
|
||||||
|
if self.conllu_reader._path.endswith(".gz"):
|
||||||
|
self._fileobj = gzip.open(self.conllu_reader._path, 'r')
|
||||||
|
self._nextline = self.read_byte_line
|
||||||
|
else:
|
||||||
self._fileobj = open(self.conllu_reader._path, 'r')
|
self._fileobj = open(self.conllu_reader._path, 'r')
|
||||||
|
self._nextline = self.read_str_line
|
||||||
|
|
||||||
def __next__(self):
|
def __next__(self):
|
||||||
next_sent = self.next_sentence()
|
next_sent = self.next_sentence()
|
||||||
@ -27,10 +34,16 @@ class ConlluSentenceIterator(object):
|
|||||||
raise StopIteration
|
raise StopIteration
|
||||||
return next_sent
|
return next_sent
|
||||||
|
|
||||||
|
def read_str_line(self):
|
||||||
|
return self._fileobj.readline()
|
||||||
|
|
||||||
|
def read_byte_line(self):
|
||||||
|
return self._fileobj.readline().decode("utf-8")
|
||||||
|
|
||||||
def next_sentence(self):
|
def next_sentence(self):
|
||||||
data = ""
|
data = ""
|
||||||
while True:
|
while True:
|
||||||
line = self._fileobj.readline()
|
line = self._nextline()
|
||||||
if line == "":
|
if line == "":
|
||||||
break
|
break
|
||||||
data += line
|
data += line
|
||||||
@ -54,13 +67,24 @@ class ConlluDocumentIterator(object):
|
|||||||
self._open()
|
self._open()
|
||||||
|
|
||||||
def _open(self):
|
def _open(self):
|
||||||
|
if self.conllu_reader._path.endswith(".gz"):
|
||||||
|
self._fileobj = gzip.open(self.conllu_reader._path, 'r')
|
||||||
|
self._nextline = self.read_byte_line
|
||||||
|
else:
|
||||||
self._fileobj = open(self.conllu_reader._path, 'r')
|
self._fileobj = open(self.conllu_reader._path, 'r')
|
||||||
|
self._nextline = self.read_str_line
|
||||||
|
|
||||||
|
def read_str_line(self):
|
||||||
|
return self._fileobj.readline()
|
||||||
|
|
||||||
|
def read_byte_line(self):
|
||||||
|
return self._fileobj.readline().decode("utf-8")
|
||||||
|
|
||||||
def next_document(self):
|
def next_document(self):
|
||||||
data = ""
|
data = ""
|
||||||
last_line_empty = False
|
last_line_empty = False
|
||||||
while True:
|
while True:
|
||||||
line = self._fileobj.readline()
|
line = self._nextline()
|
||||||
if line == "":
|
if line == "":
|
||||||
break
|
break
|
||||||
data += line
|
data += line
|
||||||
|
@ -121,14 +121,17 @@ class ConlluSentence(object):
|
|||||||
|
|
||||||
|
|
||||||
class ConlluDocument(object):
|
class ConlluDocument(object):
|
||||||
def __init__(self):
|
def __init__(self, id=None):
|
||||||
self.conllu_sentences = []
|
self.conllu_sentences = []
|
||||||
|
self.id = id
|
||||||
|
|
||||||
def add(self, conllu_sentence: ConlluSentence):
|
def add(self, conllu_sentence: ConlluSentence):
|
||||||
self.conllu_sentences.append(conllu_sentence)
|
self.conllu_sentences.append(conllu_sentence)
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
result = "# newdoc\n"
|
result = "# newdoc\n"
|
||||||
|
if self.id is not None:
|
||||||
|
result += "# id: " + self.id + "\n"
|
||||||
for elem in self.conllu_sentences:
|
for elem in self.conllu_sentences:
|
||||||
result += elem.__repr__() + "\n"
|
result += elem.__repr__() + "\n"
|
||||||
|
|
||||||
@ -139,7 +142,7 @@ class ConlluDocument(object):
|
|||||||
|
|
||||||
|
|
||||||
class ConlluGenerator(object):
|
class ConlluGenerator(object):
|
||||||
def __init__(self, documents: list, stemmed_multi_word_tokens, stemmer=PorterStemmer()):
|
def __init__(self, documents: list, stemmed_multi_word_tokens, stemmer=PorterStemmer(), ids=None):
|
||||||
self.documents = documents
|
self.documents = documents
|
||||||
self.stemmed_multi_word_tokens = stemmed_multi_word_tokens
|
self.stemmed_multi_word_tokens = stemmed_multi_word_tokens
|
||||||
self.mwe_tokenizer = StemmedMWETokenizer(
|
self.mwe_tokenizer = StemmedMWETokenizer(
|
||||||
@ -150,9 +153,12 @@ class ConlluGenerator(object):
|
|||||||
|
|
||||||
self.conllu_documents = []
|
self.conllu_documents = []
|
||||||
|
|
||||||
|
self.ids = ids
|
||||||
|
|
||||||
def tokenize_and_stem(self):
|
def tokenize_and_stem(self):
|
||||||
tokenized_documents = []
|
tokenized_documents = []
|
||||||
|
|
||||||
|
i = 0
|
||||||
for doc in self.documents:
|
for doc in self.documents:
|
||||||
tokenized_sentences = []
|
tokenized_sentences = []
|
||||||
sentences = doc.split("\n")
|
sentences = doc.split("\n")
|
||||||
@ -165,6 +171,9 @@ class ConlluGenerator(object):
|
|||||||
|
|
||||||
# now create initial colln-u elemnts
|
# now create initial colln-u elemnts
|
||||||
for doc in tokenized_documents:
|
for doc in tokenized_documents:
|
||||||
|
if self.ids:
|
||||||
|
conllu_doc = ConlluDocument(self.ids[i])
|
||||||
|
else:
|
||||||
conllu_doc = ConlluDocument()
|
conllu_doc = ConlluDocument()
|
||||||
self.id_counter = 0
|
self.id_counter = 0
|
||||||
for sent in doc:
|
for sent in doc:
|
||||||
@ -184,6 +193,7 @@ class ConlluGenerator(object):
|
|||||||
self.id_counter += 1
|
self.id_counter += 1
|
||||||
conllu_doc.add(conllu_sent)
|
conllu_doc.add(conllu_sent)
|
||||||
self.conllu_documents.append(conllu_doc)
|
self.conllu_documents.append(conllu_doc)
|
||||||
|
i += 1
|
||||||
|
|
||||||
def pos_tagging(self):
|
def pos_tagging(self):
|
||||||
for conllu_document in self.conllu_documents:
|
for conllu_document in self.conllu_documents:
|
||||||
|
@ -36,7 +36,7 @@ buffered_reader_1M = JSON_br("../" + settings.one_million_recipes_file)
|
|||||||
|
|
||||||
# open savefile:
|
# open savefile:
|
||||||
|
|
||||||
def process_instructions(instructions: list):
|
def process_instructions(instructions: list, document_ids=None):
|
||||||
|
|
||||||
if len(instructions) == 0:
|
if len(instructions) == 0:
|
||||||
return
|
return
|
||||||
@ -44,7 +44,7 @@ def process_instructions(instructions: list):
|
|||||||
conllu_input_docs = instructions
|
conllu_input_docs = instructions
|
||||||
|
|
||||||
cg = ConlluGenerator(
|
cg = ConlluGenerator(
|
||||||
conllu_input_docs, ingredients.multi_word_ingredients_stemmed)
|
conllu_input_docs, ingredients.multi_word_ingredients_stemmed, ids=document_ids)
|
||||||
cg.tokenize_and_stem()
|
cg.tokenize_and_stem()
|
||||||
cg.pos_tagging()
|
cg.pos_tagging()
|
||||||
cg.add_misc_value_by_list("food_type", "ingredient", [w.replace(" ","_") for w in ingredients.multi_word_ingredients_stemmed] + ingredients.ingredients_stemmed)
|
cg.add_misc_value_by_list("food_type", "ingredient", [w.replace(" ","_") for w in ingredients.multi_word_ingredients_stemmed] + ingredients.ingredients_stemmed)
|
||||||
@ -58,6 +58,7 @@ file_count = n_skipped_recipes // (recipe_buffer_size * recipe_buffers_per_file)
|
|||||||
|
|
||||||
savefile = open(f"recipes{file_count}.conllu", 'w')
|
savefile = open(f"recipes{file_count}.conllu", 'w')
|
||||||
instructions = []
|
instructions = []
|
||||||
|
ids = []
|
||||||
|
|
||||||
for raw_recipe in buffered_reader_1M:
|
for raw_recipe in buffered_reader_1M:
|
||||||
|
|
||||||
@ -68,13 +69,15 @@ for raw_recipe in buffered_reader_1M:
|
|||||||
instruction = ""
|
instruction = ""
|
||||||
for item in raw_recipe['instructions']:
|
for item in raw_recipe['instructions']:
|
||||||
instruction += item['text'] + '\n'
|
instruction += item['text'] + '\n'
|
||||||
|
ids.append(raw_recipe['id'])
|
||||||
|
|
||||||
instructions.append(instruction)
|
instructions.append(instruction)
|
||||||
|
|
||||||
if i % recipe_buffer_size == 0:
|
if i % recipe_buffer_size == 0:
|
||||||
process_instructions(instructions)
|
process_instructions(instructions, ids)
|
||||||
print(f"processed {i} recipes")
|
print(f"processed {i} recipes")
|
||||||
instructions = []
|
instructions = []
|
||||||
|
ids = []
|
||||||
buffer_count += 1
|
buffer_count += 1
|
||||||
if buffer_count % recipe_buffers_per_file == 0:
|
if buffer_count % recipe_buffers_per_file == 0:
|
||||||
savefile.close()
|
savefile.close()
|
||||||
|
Loading…
Reference in New Issue
Block a user