some keras batch size fixes

This commit is contained in:
Jonas Weinz 2018-07-03 15:07:45 +02:00
parent c59a58c672
commit 84aa7f288f
2 changed files with 11 additions and 9 deletions

View File

@ -144,7 +144,7 @@
{ {
"data": { "data": {
"application/vnd.jupyter.widget-view+json": { "application/vnd.jupyter.widget-view+json": {
"model_id": "d018a59d95fe45f2ae7be013a49b5900", "model_id": "6b0243a17f70405fb3bbb83918efd12e",
"version_major": 2, "version_major": 2,
"version_minor": 0 "version_minor": 0
}, },
@ -495,7 +495,7 @@
" p = progress_indicator()\n", " p = progress_indicator()\n",
" \n", " \n",
" tr = stl.trainer(sdm=sdm, pm=pm)\n", " tr = stl.trainer(sdm=sdm, pm=pm)\n",
" tr.fit(progress_callback=p.update, batch_size=batch_size, n_epochs=n_epochs)\n", " tr.fit(progress_callback=p.update, batch_size=batch_size if batch_size > 0 else None, n_epochs=n_epochs)\n",
" \n", " \n",
"\n", "\n",
"# linking:\n", "# linking:\n",

View File

@ -633,12 +633,13 @@ class trainer(object):
named_steps[s].fit = lambda self, X, y=None: self named_steps[s].fit = lambda self, X, y=None: self
named_steps[s].fit_transform = named_steps[s].transform named_steps[s].fit_transform = named_steps[s].transform
for k in keras_batch_fitting_layer: if batch_size is None:
# forcing batch fitting on keras for k in keras_batch_fitting_layer:
disabled_keras_fits[k]=named_steps[k].fit # forcing batch fitting on keras
disabled_keras_fits[k]=named_steps[k].fit
named_steps[k].fit = lambda X, y: named_steps[k].train_on_batch(to_dense_if_sparse(X), y) # ← why has keras no sparse support on batch progressing!?!?! named_steps[k].fit = lambda X, y: named_steps[k].train_on_batch(to_dense_if_sparse(X), y) # ← why has keras no sparse support on batch progressing!?!?!
if batch_size is None: if batch_size is None:
self.pm.fit(X = self.sdm.X[:max_size], y = self.sdm.y[:max_size]) self.pm.fit(X = self.sdm.X[:max_size], y = self.sdm.y[:max_size])
else: else:
@ -658,8 +659,9 @@ class trainer(object):
named_steps[s].fit = disabled_fits[s] named_steps[s].fit = disabled_fits[s]
named_steps[s].fit_transform = disabled_fit_transforms[s] named_steps[s].fit_transform = disabled_fit_transforms[s]
for k in keras_batch_fitting_layer: if batch_size is None:
named_steps[k].fit = disabled_keras_fits[k] for k in keras_batch_fitting_layer:
named_steps[k].fit = disabled_keras_fits[k]
def test(self): def test(self):
''' '''