saving recipe id
This commit is contained in:
		| @ -11,6 +11,8 @@ import numpy as np | ||||
| sys.path.insert(0, '..') | ||||
| import settings  # noqa | ||||
|  | ||||
| import gzip | ||||
|  | ||||
|  | ||||
| class ConlluSentenceIterator(object): | ||||
|     def __init__(self, conllu_reader): | ||||
| @ -19,18 +21,29 @@ class ConlluSentenceIterator(object): | ||||
|         self._open() | ||||
|      | ||||
|     def _open(self): | ||||
|         self._fileobj = open(self.conllu_reader._path, 'r') | ||||
|         if self.conllu_reader._path.endswith(".gz"): | ||||
|             self._fileobj = gzip.open(self.conllu_reader._path, 'r') | ||||
|             self._nextline = self.read_byte_line | ||||
|         else: | ||||
|             self._fileobj = open(self.conllu_reader._path, 'r') | ||||
|             self._nextline = self.read_str_line | ||||
|  | ||||
|     def __next__(self): | ||||
|         next_sent = self.next_sentence() | ||||
|         if next_sent is None: | ||||
|             raise StopIteration | ||||
|         return next_sent | ||||
|      | ||||
|     def read_str_line(self): | ||||
|         return self._fileobj.readline() | ||||
|      | ||||
|     def read_byte_line(self): | ||||
|         return self._fileobj.readline().decode("utf-8") | ||||
|  | ||||
|     def next_sentence(self): | ||||
|         data = "" | ||||
|         while True: | ||||
|             line = self._fileobj.readline() | ||||
|             line = self._nextline() | ||||
|             if line == "": | ||||
|                 break | ||||
|             data += line | ||||
| @ -54,13 +67,24 @@ class ConlluDocumentIterator(object): | ||||
|         self._open() | ||||
|      | ||||
|     def _open(self): | ||||
|         self._fileobj = open(self.conllu_reader._path, 'r') | ||||
|         if self.conllu_reader._path.endswith(".gz"): | ||||
|             self._fileobj = gzip.open(self.conllu_reader._path, 'r') | ||||
|             self._nextline = self.read_byte_line | ||||
|         else: | ||||
|             self._fileobj = open(self.conllu_reader._path, 'r') | ||||
|             self._nextline = self.read_str_line | ||||
|          | ||||
|     def read_str_line(self): | ||||
|         return self._fileobj.readline() | ||||
|      | ||||
|     def read_byte_line(self): | ||||
|         return self._fileobj.readline().decode("utf-8") | ||||
|  | ||||
|     def next_document(self): | ||||
|         data = "" | ||||
|         last_line_empty = False | ||||
|         while True: | ||||
|             line = self._fileobj.readline() | ||||
|             line = self._nextline() | ||||
|             if line == "": | ||||
|                 break | ||||
|             data += line | ||||
|  | ||||
| @ -121,14 +121,17 @@ class ConlluSentence(object): | ||||
|  | ||||
|  | ||||
| class ConlluDocument(object): | ||||
|     def __init__(self): | ||||
|     def __init__(self, id=None): | ||||
|         self.conllu_sentences = [] | ||||
|         self.id = id | ||||
|      | ||||
|     def add(self, conllu_sentence: ConlluSentence): | ||||
|         self.conllu_sentences.append(conllu_sentence) | ||||
|      | ||||
|     def __repr__(self): | ||||
|         result = "# newdoc\n" | ||||
|         if self.id is not None: | ||||
|             result += "# id: " + self.id + "\n" | ||||
|         for elem in self.conllu_sentences: | ||||
|             result += elem.__repr__() + "\n" | ||||
|  | ||||
| @ -139,7 +142,7 @@ class ConlluDocument(object): | ||||
|  | ||||
|  | ||||
| class ConlluGenerator(object): | ||||
|     def __init__(self, documents: list, stemmed_multi_word_tokens, stemmer=PorterStemmer()): | ||||
|     def __init__(self, documents: list, stemmed_multi_word_tokens, stemmer=PorterStemmer(), ids=None): | ||||
|         self.documents = documents | ||||
|         self.stemmed_multi_word_tokens = stemmed_multi_word_tokens | ||||
|         self.mwe_tokenizer = StemmedMWETokenizer( | ||||
| @ -150,9 +153,12 @@ class ConlluGenerator(object): | ||||
|  | ||||
|         self.conllu_documents = [] | ||||
|  | ||||
|         self.ids = ids | ||||
|  | ||||
|     def tokenize_and_stem(self): | ||||
|         tokenized_documents = [] | ||||
|  | ||||
|         i = 0 | ||||
|         for doc in self.documents: | ||||
|             tokenized_sentences = [] | ||||
|             sentences = doc.split("\n") | ||||
| @ -165,7 +171,10 @@ class ConlluGenerator(object): | ||||
|  | ||||
|         # now create initial colln-u elemnts | ||||
|         for doc in tokenized_documents: | ||||
|             conllu_doc = ConlluDocument() | ||||
|             if self.ids: | ||||
|                 conllu_doc = ConlluDocument(self.ids[i]) | ||||
|             else: | ||||
|                 conllu_doc = ConlluDocument() | ||||
|             self.id_counter = 0 | ||||
|             for sent in doc: | ||||
|                 conllu_sent = ConlluSentence() | ||||
| @ -184,6 +193,7 @@ class ConlluGenerator(object): | ||||
|                     self.id_counter += 1 | ||||
|                 conllu_doc.add(conllu_sent) | ||||
|             self.conllu_documents.append(conllu_doc) | ||||
|             i += 1 | ||||
|  | ||||
|     def pos_tagging(self): | ||||
|         for conllu_document in self.conllu_documents: | ||||
|  | ||||
| @ -36,7 +36,7 @@ buffered_reader_1M = JSON_br("../" + settings.one_million_recipes_file) | ||||
|  | ||||
| # open savefile: | ||||
|  | ||||
| def process_instructions(instructions: list): | ||||
| def process_instructions(instructions: list, document_ids=None): | ||||
|  | ||||
|     if len(instructions) == 0: | ||||
|         return | ||||
| @ -44,7 +44,7 @@ def process_instructions(instructions: list): | ||||
|     conllu_input_docs = instructions | ||||
|  | ||||
|     cg = ConlluGenerator( | ||||
|         conllu_input_docs, ingredients.multi_word_ingredients_stemmed) | ||||
|         conllu_input_docs, ingredients.multi_word_ingredients_stemmed, ids=document_ids) | ||||
|     cg.tokenize_and_stem() | ||||
|     cg.pos_tagging() | ||||
|     cg.add_misc_value_by_list("food_type", "ingredient", [w.replace(" ","_") for w in ingredients.multi_word_ingredients_stemmed] + ingredients.ingredients_stemmed) | ||||
| @ -58,6 +58,7 @@ file_count = n_skipped_recipes // (recipe_buffer_size * recipe_buffers_per_file) | ||||
|  | ||||
| savefile = open(f"recipes{file_count}.conllu", 'w') | ||||
| instructions = [] | ||||
| ids = [] | ||||
|  | ||||
| for raw_recipe in buffered_reader_1M: | ||||
|  | ||||
| @ -68,13 +69,15 @@ for raw_recipe in buffered_reader_1M: | ||||
|         instruction = "" | ||||
|         for item in raw_recipe['instructions']: | ||||
|             instruction += item['text'] + '\n' | ||||
|         ids.append(raw_recipe['id']) | ||||
|  | ||||
|         instructions.append(instruction) | ||||
|  | ||||
|         if i % recipe_buffer_size == 0: | ||||
|             process_instructions(instructions) | ||||
|             process_instructions(instructions, ids) | ||||
|             print(f"processed {i} recipes") | ||||
|             instructions = [] | ||||
|             ids = [] | ||||
|             buffer_count += 1 | ||||
|             if buffer_count % recipe_buffers_per_file == 0: | ||||
|                 savefile.close() | ||||
|  | ||||
		Reference in New Issue
	
	Block a user