Created
June 5, 2020 11:46
-
-
Save manisnesan/6e4b9dad7d40e6e422e27a116f1ac837 to your computer and use it in GitHub Desktop.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
--------------------------------------------------------------------------- | |
OSError Traceback (most recent call last) | |
<ipython-input-104-696308cd97b4> in <module> | |
----> 1 dblk_lm.summary(cases_mini[:100]) | |
~/miniconda3/envs/fastai2/lib/python3.7/site-packages/fastai2/data/block.py in summary(self, source, bs, show_batch, **kwargs) | |
152 print(f"Setting-up type transforms pipelines") | |
153 dsets = self.datasets(source, verbose=True) | |
--> 154 print("\nBuilding one sample") | |
155 for tl in dsets.train.tls: | |
156 _apply_pipeline(tl.tfms, get_first(dsets.train.items)) | |
~/miniconda3/envs/fastai2/lib/python3.7/site-packages/fastai2/data/block.py in datasets(self, source, verbose) | |
102 splits = (self.splitter or RandomSplitter())(items) | |
103 pv(f"{len(splits)} datasets of sizes {','.join([str(len(s)) for s in splits])}", verbose) | |
--> 104 return Datasets(items, tfms=self._combine_type_tfms(), splits=splits, dl_type=self.dl_type, n_inp=self.n_inp, verbose=verbose) | |
105 | |
106 def dataloaders(self, source, path='.', verbose=False, **kwargs): | |
~/miniconda3/envs/fastai2/lib/python3.7/site-packages/fastai2/data/core.py in __init__(self, items, tfms, tls, n_inp, dl_type, **kwargs) | |
278 def __init__(self, items=None, tfms=None, tls=None, n_inp=None, dl_type=None, **kwargs): | |
279 super().__init__(dl_type=dl_type) | |
--> 280 self.tls = L(tls if tls else [TfmdLists(items, t, **kwargs) for t in L(ifnone(tfms,[None]))]) | |
281 self.n_inp = ifnone(n_inp, max(1, len(self.tls)-1)) | |
282 | |
~/miniconda3/envs/fastai2/lib/python3.7/site-packages/fastai2/data/core.py in <listcomp>(.0) | |
278 def __init__(self, items=None, tfms=None, tls=None, n_inp=None, dl_type=None, **kwargs): | |
279 super().__init__(dl_type=dl_type) | |
--> 280 self.tls = L(tls if tls else [TfmdLists(items, t, **kwargs) for t in L(ifnone(tfms,[None]))]) | |
281 self.n_inp = ifnone(n_inp, max(1, len(self.tls)-1)) | |
282 | |
~/miniconda3/envs/fastai2/lib/python3.7/site-packages/fastcore/foundation.py in __call__(cls, x, *args, **kwargs) | |
45 return x | |
46 | |
---> 47 res = super().__call__(*((x,) + args), **kwargs) | |
48 res._newchk = 0 | |
49 return res | |
~/miniconda3/envs/fastai2/lib/python3.7/site-packages/fastai2/data/core.py in __init__(self, items, tfms, use_list, do_setup, split_idx, train_setup, splits, types, verbose) | |
216 if do_setup: | |
217 pv(f"Setting up {self.tfms}", verbose) | |
--> 218 self.setup(train_setup=train_setup) | |
219 | |
220 def _new(self, items, split_idx=None, **kwargs): | |
~/miniconda3/envs/fastai2/lib/python3.7/site-packages/fastai2/data/core.py in setup(self, train_setup) | |
232 | |
233 def setup(self, train_setup=True): | |
--> 234 self.tfms.setup(self, train_setup) | |
235 if len(self) != 0: | |
236 x = super().__getitem__(0) if self.splits is None else super().__getitem__(self.splits[0])[0] | |
~/miniconda3/envs/fastai2/lib/python3.7/site-packages/fastcore/transform.py in setup(self, items, train_setup) | |
179 tfms = self.fs[:] | |
180 self.fs.clear() | |
--> 181 for t in tfms: self.add(t,items, train_setup) | |
182 | |
183 def add(self,t, items=None, train_setup=False): | |
~/miniconda3/envs/fastai2/lib/python3.7/site-packages/fastcore/transform.py in add(self, t, items, train_setup) | |
182 | |
183 def add(self,t, items=None, train_setup=False): | |
--> 184 t.setup(items, train_setup) | |
185 self.fs.append(t) | |
186 | |
~/miniconda3/envs/fastai2/lib/python3.7/site-packages/fastcore/transform.py in setup(self, items, train_setup) | |
76 def setup(self, items=None, train_setup=False): | |
77 train_setup = train_setup if self.train_setup is None else self.train_setup | |
---> 78 return self.setups(getattr(items, 'train', items) if train_setup else items) | |
79 | |
80 def _call(self, fn, x, split_idx=None, **kwargs): | |
~/miniconda3/envs/fastai2/lib/python3.7/site-packages/fastcore/dispatch.py in __call__(self, *args, **kwargs) | |
96 if not f: return args[0] | |
97 if self.inst is not None: f = MethodType(f, self.inst) | |
---> 98 return f(*args, **kwargs) | |
99 | |
100 def __get__(self, inst, owner): | |
~/miniconda3/envs/fastai2/lib/python3.7/site-packages/fastai2/text/core.py in setups(self, dsets) | |
285 def setups(self, dsets): | |
286 if not self.mode == 'df' or not isinstance(dsets.items, pd.DataFrame): return | |
--> 287 dsets.items,count = tokenize_df(dsets.items, self.text_cols, rules=self.rules, **self.kwargs) | |
288 if self.counter is None: self.counter = count | |
289 return dsets | |
~/miniconda3/envs/fastai2/lib/python3.7/site-packages/fastai2/text/core.py in tokenize_df(df, text_cols, n_workers, rules, mark_fields, tok_func, res_col_name, **tok_kwargs) | |
215 rules = L(ifnone(rules, defaults.text_proc_rules.copy())) | |
216 texts = _join_texts(df[text_cols], mark_fields=mark_fields) | |
--> 217 outputs = L(parallel_tokenize(texts, tok_func, rules, n_workers=n_workers, **tok_kwargs) | |
218 ).sorted().itemgot(1) | |
219 | |
~/miniconda3/envs/fastai2/lib/python3.7/site-packages/fastai2/text/core.py in parallel_tokenize(items, tok_func, rules, as_gen, n_workers, **tok_kwargs) | |
141 def parallel_tokenize(items, tok_func, rules, as_gen=False, n_workers=defaults.cpus, **tok_kwargs): | |
142 "Calls a potential setup on `tok_func` before launching `TokenizeBatch` in parallel" | |
--> 143 if hasattr(tok_func, 'setup'): tok_kwargs = tok_func(**tok_kwargs).setup(items, rules) | |
144 return parallel_gen(TokenizeBatch, items, as_gen=as_gen, tok_func=tok_func, | |
145 rules=rules, n_workers=n_workers, **tok_kwargs) | |
~/miniconda3/envs/fastai2/lib/python3.7/site-packages/fastai2/text/core.py in setup(self, items, rules) | |
363 for t in progress_bar(maps(*rules, items), total=len(items), leave=False): | |
364 f.write(f'{t}\n') | |
--> 365 sp_model = self.train(raw_text_path) | |
366 self.tok = SentencePieceProcessor() | |
367 self.tok.Load(str(sp_model)) | |
~/miniconda3/envs/fastai2/lib/python3.7/site-packages/fastai2/text/core.py in train(self, raw_text_path) | |
351 f"--character_coverage={self.char_coverage} --model_type={self.model_type}", | |
352 f"--unk_id={len(spec_tokens)} --pad_id=-1 --bos_id=-1 --eos_id=-1 --minloglevel=2", | |
--> 353 f"--user_defined_symbols={','.join(spec_tokens)}"])) | |
354 raw_text_path.unlink() | |
355 return self.cache_dir/'spm.model' | |
~/miniconda3/envs/fastai2/lib/python3.7/site-packages/sentencepiece.py in Train(arg, **kwargs) | |
~/miniconda3/envs/fastai2/lib/python3.7/site-packages/sentencepiece.py in _TrainFromString(arg) | |
OSError: Not found: unknown field name "minloglevel" in TrainerSpec. |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment