Created
June 20, 2020 05:45
-
-
Save himkt/cccd9056487fc6683d4811dab9643644 to your computer and use it in GitHub Desktop.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| ____________________________ test_dump_best_config _____________________________ | |
| def test_dump_best_config() -> None: | |
| with tempfile.TemporaryDirectory() as tmp_dir: | |
| def objective(trial: optuna.Trial) -> float: | |
| trial.suggest_uniform("DROPOUT", dropout, dropout) | |
| executor = optuna.integration.AllenNLPExecutor(trial, input_config_file, tmp_dir) | |
| return executor.run() | |
| dropout = 0.5 | |
| input_config_file = os.path.join( | |
| os.path.dirname(os.path.realpath(__file__)), "example.jsonnet" | |
| ) | |
| output_config_file = os.path.join(tmp_dir, "result.json") | |
| study = optuna.create_study(direction="maximize") | |
| > study.optimize(objective, n_trials=1) | |
| tests/integration_tests/allennlp_tests/test_allennlp.py:142: | |
| _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ | |
| optuna/study.py:286: in optimize | |
| func, n_trials, timeout, catch, callbacks, gc_after_trial, None | |
| optuna/study.py:629: in _optimize_sequential | |
| self._run_trial_and_callbacks(func, catch, callbacks, gc_after_trial) | |
| optuna/study.py:660: in _run_trial_and_callbacks | |
| trial = self._run_trial(func, catch, gc_after_trial) | |
| optuna/study.py:684: in _run_trial | |
| result = func(trial) | |
| tests/integration_tests/allennlp_tests/test_allennlp.py:133: in objective | |
| return executor.run() | |
| optuna/integration/allennlp.py:129: in run | |
| allennlp.commands.train.train_model(params, self._serialization_dir) | |
| venv/lib/python3.6/site-packages/allennlp/commands/train.py:230: in train_model | |
| dry_run=dry_run, | |
| venv/lib/python3.6/site-packages/allennlp/commands/train.py:418: in _train_worker | |
| params=params, serialization_dir=serialization_dir, local_rank=process_rank, | |
| venv/lib/python3.6/site-packages/allennlp/common/from_params.py:580: in from_params | |
| **extras, | |
| venv/lib/python3.6/site-packages/allennlp/common/from_params.py:611: in from_params | |
| return constructor_to_call(**kwargs) # type: ignore | |
| venv/lib/python3.6/site-packages/allennlp/commands/train.py:646: in from_partial_objects | |
| data_loader_ = data_loader.construct(dataset=datasets["train"]) | |
| venv/lib/python3.6/site-packages/allennlp/common/lazy.py:46: in construct | |
| return self._constructor(**kwargs) | |
| venv/lib/python3.6/site-packages/allennlp/common/from_params.py:446: in constructor | |
| return value_cls.from_params(params=deepcopy(popped_params), **constructor_extras) | |
| venv/lib/python3.6/site-packages/allennlp/common/from_params.py:580: in from_params | |
| **extras, | |
| venv/lib/python3.6/site-packages/allennlp/common/from_params.py:611: in from_params | |
| return constructor_to_call(**kwargs) # type: ignore | |
| venv/lib/python3.6/site-packages/allennlp/data/dataloader.py:151: in from_partial_objects | |
| batches_per_epoch=batches_per_epoch, | |
| _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ | |
| self = <allennlp.data.dataloader.DataLoader object at 0x7f4d142ba438> | |
| dataset = <allennlp.data.dataset_readers.dataset_reader.AllennlpDataset object at 0x7f4d14512ba8> | |
| batch_size = 32, shuffle = False, sampler = None, batch_sampler = None | |
| num_workers = 0, collate_fn = <function allennlp_collate at 0x7f4d9fc31730> | |
| pin_memory = False, drop_last = False, timeout = 0, worker_init_fn = None | |
| multiprocessing_context = None, batches_per_epoch = None | |
| def __init__( | |
| self, | |
| dataset: data.Dataset, | |
| batch_size: int = 1, | |
| shuffle: bool = False, | |
| sampler: Sampler = None, | |
| batch_sampler: BatchSampler = None, | |
| num_workers: int = 0, | |
| # NOTE: The default for collate_fn is different from the normal `None`. | |
| # We assume that if you are using this class you are using an | |
| # allennlp dataset of instances, which would require this. | |
| collate_fn=allennlp_collate, | |
| pin_memory: bool = False, | |
| drop_last: bool = False, | |
| timeout: int = 0, | |
| worker_init_fn=None, | |
| multiprocessing_context: str = None, | |
| batches_per_epoch: int = None, | |
| ): | |
| if num_workers and isinstance(dataset, AllennlpLazyDataset): | |
| warnings.warn( | |
| "Using multi-process data loading with a lazy dataset could lead to " | |
| "deadlocks with certain tokenizers. See:\n" | |
| " https://github.com/allenai/allennlp/issues/4330\n", | |
| UserWarning, | |
| ) | |
| super().__init__( | |
| dataset=dataset, | |
| batch_size=batch_size, | |
| shuffle=shuffle, | |
| sampler=sampler, | |
| batch_sampler=batch_sampler, | |
| num_workers=num_workers, | |
| collate_fn=collate_fn, | |
| pin_memory=pin_memory, | |
| drop_last=drop_last, | |
| timeout=timeout, | |
| worker_init_fn=worker_init_fn, | |
| > multiprocessing_context=multiprocessing_context, | |
| ) | |
| E TypeError: intercept_args() got an unexpected keyword argument 'multiprocessing_context' | |
| venv/lib/python3.6/site-packages/allennlp/data/dataloader.py:88: TypeError | |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment