Skip to content

Commit 0f5649c

Browse files
committed
workaround for train_set batching during inference time
1 parent b384686 commit 0f5649c

File tree

2 files changed

+5
-1
lines changed

2 files changed

+5
-1
lines changed

neuralmonkey/learning_utils.py

+4-1
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@
1313
from termcolor import colored
1414

1515
from neuralmonkey.logging import log, log_print, warn
16-
from neuralmonkey.dataset import Dataset
16+
from neuralmonkey.dataset import Dataset, BatchingScheme
1717
from neuralmonkey.tf_manager import TensorFlowManager
1818
from neuralmonkey.runners.base_runner import (
1919
BaseRunner, ExecutionResult, GraphExecutor, OutputSeries)
@@ -85,6 +85,9 @@ def training_loop(cfg: Namespace) -> None:
8585
trainer_result = cfg.tf_manager.execute(
8686
batch, feedables, cfg.trainers, train=True,
8787
summaries=True)
88+
# workaround: we need to use validation batching scheme
89+
# during evaluation
90+
batch.batching = BatchingScheme(batch_size=cfg.batch_size)
8891
train_results, train_outputs, f_batch = run_on_dataset(
8992
cfg.tf_manager, cfg.runners, cfg.dataset_runner, batch,
9093
cfg.postprocess, write_out=False)

tests/hier-multiattention.ini

+1
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,7 @@ trainer=<trainer>
1010
runners=[<runner_hier_noshare_nosentinel>, <runner_hier_share_nosentinel>, <runner_hier_share_sentinel>, <runner_hier_noshare_sentinel>]
1111
postprocess=None
1212
evaluation=[("target_hier_noshare_nosentinel", "target", evaluators.BLEU)]
13+
batch_size=1
1314
logging_period=1
1415
validation_period=5
1516
test_datasets=[<val_data>]

0 commit comments

Comments
 (0)