Merge branch 'master' of ssh://gogs@the-cake-is-a-lie.net:20022/jonas/NLP-LAB.git

This commit is contained in:
Carsten 2018-07-03 15:42:23 +02:00
commit 595d6326a2
2 changed files with 2 additions and 2 deletions

View File

@ -144,7 +144,7 @@
{ {
"data": { "data": {
"application/vnd.jupyter.widget-view+json": { "application/vnd.jupyter.widget-view+json": {
"model_id": "a4899ee1720f4db4a136a96657f3283a", "model_id": "d304cda50752491da1637b292a9367e8",
"version_major": 2, "version_major": 2,
"version_minor": 0 "version_minor": 0
}, },

View File

@ -641,7 +641,7 @@ class trainer(object):
named_steps[k].fit = lambda X, y: named_steps[k].train_on_batch(to_dense_if_sparse(X), y) # ← why has keras no sparse support on batch progressing!?!?! named_steps[k].fit = lambda X, y: named_steps[k].train_on_batch(to_dense_if_sparse(X), y) # ← why has keras no sparse support on batch progressing!?!?!
if batch_size is None: if batch_size is None:
self.pm.fit(X = self.sdm.X[:max_size], y = self.sdm.y[:max_size], validation_split=0.1, epochs=n_epochs) self.pm.fit(X = self.sdm.X[:max_size], y = self.sdm.y[:max_size])
else: else:
n = len(self.sdm.X) // batch_size n = len(self.sdm.X) // batch_size
for i in range(n_epochs): for i in range(n_epochs):