You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
This behavior is deprecated in Hydra 1.1 and will be removed in Hydra 1.2.
See https://hydra.cc/docs/next/upgrades/1.0_to_1.1/automatic_schema_matching for migration instructions.
**kwargs,
2022-07-07 04:27:14 | INFO | fairseq.tasks.multilingual_masked_lm | dictionary: 250001 types
Traceback (most recent call last):
File "xlmr/ootb/xlmr.py", line 182, in <module>
run()
File "xlmr/ootb/xlmr.py", line 142, in run
xlmr = get_model()
File "xlmr/ootb/xlmr.py", line 29, in get_model
fairseq_xlmr_large = torch.hub.load('pytorch/fairseq:main', 'xlmr.large')
File "/opt/conda/lib/python3.7/site-packages/torch/hub.py", line 525, in load
model = _load_local(repo_or_dir, model, *args, **kwargs)
File "/opt/conda/lib/python3.7/site-packages/torch/hub.py", line 554, in _load_local
model = entry(*args, **kwargs)
File "/root/.cache/torch/hub/pytorch_fairseq_main/fairseq/models/roberta/model_xlmr.py", line 44, in from_pretrained
**kwargs,
File "/root/.cache/torch/hub/pytorch_fairseq_main/fairseq/hub_utils.py", line 75, in from_pretrained
arg_overrides=kwargs,
File "/root/.cache/torch/hub/pytorch_fairseq_main/fairseq/checkpoint_utils.py", line 473, in load_model_ensemble_and_task
model = task.build_model(cfg.model, from_checkpoint=True)
File "/root/.cache/torch/hub/pytorch_fairseq_main/fairseq/tasks/fairseq_task.py", line 676, in build_model
model = models.build_model(args, self, from_checkpoint)
File "/root/.cache/torch/hub/pytorch_fairseq_main/fairseq/models/__init__.py", line 106, in build_model
return model.build_model(cfg, task)
File "/root/.cache/torch/hub/pytorch_fairseq_main/fairseq/models/roberta/model.py", line 237, in build_model
encoder = RobertaEncoder(args, task.source_dictionary)
File "/root/.cache/torch/hub/pytorch_fairseq_main/fairseq/models/roberta/model.py", line 553, in __init__
self.sentence_encoder = self.build_encoder(args, dictionary, embed_tokens)
File "/root/.cache/torch/hub/pytorch_fairseq_main/fairseq/models/roberta/model.py", line 570, in build_encoder
encoder = TransformerEncoder(args, dictionary, embed_tokens)
File "/root/.cache/torch/hub/pytorch_fairseq_main/fairseq/models/transformer/transformer_encoder.py", line 433, in __init__
return_fc=return_fc,
File "/root/.cache/torch/hub/pytorch_fairseq_main/fairseq/models/transformer/transformer_encoder.py", line 96, in __init__
[self.build_encoder_layer(cfg) for i in range(cfg.encoder.layers)]
File "/root/.cache/torch/hub/pytorch_fairseq_main/fairseq/models/transformer/transformer_encoder.py", line 96, in <listcomp>
[self.build_encoder_layer(cfg) for i in range(cfg.encoder.layers)]
File "/root/.cache/torch/hub/pytorch_fairseq_main/fairseq/models/transformer/transformer_encoder.py", line 438, in build_encoder_layer
TransformerConfig.from_namespace(args),
File "/root/.cache/torch/hub/pytorch_fairseq_main/fairseq/models/transformer/transformer_encoder.py", line 107, in build_encoder_layer
cfg, return_fc=self.return_fc
File "/root/.cache/torch/hub/pytorch_fairseq_main/fairseq/modules/transformer_layer.py", line 131, in __init__
+ int(self.torch_version[2])
The text was updated successfully, but these errors were encountered:
Possibly related to facebookresearch/fairseq#4532
The text was updated successfully, but these errors were encountered: