Created
February 1, 2024 10:37
-
-
Save afrittoli/e4d34aa235865cfd20ee56faf0b33031 to your computer and use it in GitHub Desktop.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
fms/utils/generation.py:10: note: By default the bodies of untyped functions are not checked, consider using --check-untyped-defs [annotation-unchecked] | |
fms/models/hf/lm_head_mixins.py:7: error: Skipping analyzing "transformers": module is installed, but missing library stubs or py.typed marker [import-untyped] | |
fms/models/hf/lm_head_mixins.py:8: error: Skipping analyzing "transformers.modeling_outputs": module is installed, but missing library stubs or py.typed marker [import-untyped] | |
fms/models/hf/lm_head_mixins.py:15: error: Skipping analyzing "transformers.utils": module is installed, but missing library stubs or py.typed marker [import-untyped] | |
fms/models/hf/lm_head_mixins.py:73: error: Unexpected keyword argument "config" for "__init__" of "object" [call-arg] | |
fms/models/hf/lm_head_mixins.py:73: error: Unexpected keyword argument "lm_head" for "__init__" of "object" [call-arg] | |
fms/models/hf/lm_head_mixins.py:186: error: "__init__" of "LMHeadMixin" gets multiple values for keyword argument "_lm_head_params" [misc] | |
fms/models/hf/lm_head_mixins.py:188: error: Signature of "_get_empty_lm_head" incompatible with supertype "LMHeadMixin" [override] | |
fms/models/hf/lm_head_mixins.py:188: note: Superclass: | |
fms/models/hf/lm_head_mixins.py:188: note: def _get_empty_lm_head(self, **kwargs: Any) -> Module | |
fms/models/hf/lm_head_mixins.py:188: note: Subclass: | |
fms/models/hf/lm_head_mixins.py:188: note: def _get_empty_lm_head(self, bias: bool) -> Module | |
fms/models/hf/lm_head_mixins.py:210: error: Item "None" of "Any | None" has no attribute "past_key_values" [union-attr] | |
fms/models/hf/lm_head_mixins.py:211: error: Item "None" of "Any | None" has no attribute "hidden_states" [union-attr] | |
fms/models/hf/lm_head_mixins.py:212: error: Item "None" of "Any | None" has no attribute "attentions" [union-attr] | |
fms/models/hf/lm_head_mixins.py:213: error: Item "None" of "Any | None" has no attribute "cross_attentions" [union-attr] | |
fms/models/hf/lm_head_mixins.py:234: error: "__init__" of "LMHeadMixin" gets multiple values for keyword argument "_lm_head_params" [misc] | |
fms/models/hf/lm_head_mixins.py:236: error: Signature of "_get_empty_lm_head" incompatible with supertype "LMHeadMixin" [override] | |
fms/models/hf/lm_head_mixins.py:236: note: Superclass: | |
fms/models/hf/lm_head_mixins.py:236: note: def _get_empty_lm_head(self, **kwargs: Any) -> Module | |
fms/models/hf/lm_head_mixins.py:236: note: Subclass: | |
fms/models/hf/lm_head_mixins.py:236: note: def _get_empty_lm_head(self, bias: bool) -> Module | |
fms/models/hf/lm_head_mixins.py:258: error: Item "None" of "Any | None" has no attribute "past_key_values" [union-attr] | |
fms/models/hf/lm_head_mixins.py:259: error: Item "None" of "Any | None" has no attribute "hidden_states" [union-attr] | |
fms/models/hf/lm_head_mixins.py:260: error: Item "None" of "Any | None" has no attribute "attentions" [union-attr] | |
fms/models/hf/lm_head_mixins.py:261: error: Item "None" of "Any | None" has no attribute "cross_attentions" [union-attr] | |
fms/models/hf/lm_head_mixins.py:262: error: Item "None" of "Any | None" has no attribute "last_hidden_state" [union-attr] | |
fms/models/hf/lm_head_mixins.py:263: error: Item "None" of "Any | None" has no attribute "hidden_states" [union-attr] | |
fms/models/hf/lm_head_mixins.py:264: error: Item "None" of "Any | None" has no attribute "attentions" [union-attr] | |
fms/models/hf/lm_head_mixins.py:305: error: "__init__" of "LMHeadMixin" gets multiple values for keyword argument "_lm_head_params" [misc] | |
fms/models/hf/lm_head_mixins.py:314: error: Signature of "_get_empty_lm_head" incompatible with supertype "LMHeadMixin" [override] | |
fms/models/hf/lm_head_mixins.py:314: note: Superclass: | |
fms/models/hf/lm_head_mixins.py:314: note: def _get_empty_lm_head(self, **kwargs: Any) -> Module | |
fms/models/hf/lm_head_mixins.py:314: note: Subclass: | |
fms/models/hf/lm_head_mixins.py:314: note: def _get_empty_lm_head(self, classifier_activation_fn: str, classifier_dropout: float) -> Module | |
fms/models/hf/lm_head_mixins.py:345: error: Incompatible types in assignment (expression has type "CrossEntropyLoss", variable has type "MSELoss") [assignment] | |
fms/models/hf/lm_head_mixins.py:350: error: Incompatible types in assignment (expression has type "BCEWithLogitsLoss", variable has type "MSELoss") [assignment] | |
fms/models/hf/lm_head_mixins.py:365: error: Item "None" of "Any | None" has no attribute "hidden_states" [union-attr] | |
fms/models/hf/lm_head_mixins.py:366: error: Item "None" of "Any | None" has no attribute "attentions" [union-attr] | |
fms/models/hf/lm_head_mixins.py:403: error: "__init__" of "LMHeadMixin" gets multiple values for keyword argument "_lm_head_params" [misc] | |
fms/models/hf/lm_head_mixins.py:415: error: Signature of "_get_empty_lm_head" incompatible with supertype "LMHeadMixin" [override] | |
fms/models/hf/lm_head_mixins.py:415: note: Superclass: | |
fms/models/hf/lm_head_mixins.py:415: note: def _get_empty_lm_head(self, **kwargs: Any) -> Module | |
fms/models/hf/lm_head_mixins.py:415: note: Subclass: | |
fms/models/hf/lm_head_mixins.py:415: note: def _get_empty_lm_head(self, activation_fn: str, norm_eps: float) -> Module | |
fms/models/hf/lm_head_mixins.py:437: error: Item "None" of "Any | None" has no attribute "hidden_states" [union-attr] | |
fms/models/hf/lm_head_mixins.py:438: error: Item "None" of "Any | None" has no attribute "attentions" [union-attr] | |
fms/models/hf/gpt_bigcode/configuration_gpt_bigcode_hf.py:3: error: Skipping analyzing "transformers": module is installed, but missing library stubs or py.typed marker [import-untyped] | |
fms/models/hf/llama/configuration_llama_hf.py:3: error: Skipping analyzing "transformers": module is installed, but missing library stubs or py.typed marker [import-untyped] | |
fms/models/hf/utils.py:5: error: Skipping analyzing "transformers": module is installed, but missing library stubs or py.typed marker [import-untyped] | |
fms/models/hf/utils.py:49: error: Incompatible return value type (got "Tensor", expected "BoolTensor") [return-value] | |
fms/models/hf/utils.py:103: error: Incompatible return value type (got "Tensor", expected "BoolTensor") [return-value] | |
fms/models/hf/utils.py:106: error: Name "HFModelArchitecture" is not defined [name-defined] | |
fms/models/hf/utils.py:132: error: "PostInitCaller" has no attribute "from_fms_model" [attr-defined] | |
fms/models/hf/modeling_hf_adapter.py:9: error: Skipping analyzing "transformers": module is installed, but missing library stubs or py.typed marker [import-untyped] | |
fms/models/hf/modeling_hf_adapter.py:10: error: Skipping analyzing "transformers.modeling_outputs": module is installed, but missing library stubs or py.typed marker [import-untyped] | |
fms/models/hf/modeling_hf_adapter.py:17: error: Skipping analyzing "transformers.utils": module is installed, but missing library stubs or py.typed marker [import-untyped] | |
fms/models/hf/modeling_hf_adapter.py:120: error: Incompatible types in assignment (expression has type "Tensor", variable has type "FloatTensor | None") [assignment] | |
fms/models/hf/modeling_hf_adapter.py:125: error: Incompatible types in assignment (expression has type "Tensor", variable has type "FloatTensor | None") [assignment] | |
fms/models/hf/modeling_hf_adapter.py:228: error: "forward" of "_HFBase" gets multiple values for keyword argument "input_ids" [misc] | |
fms/models/hf/modeling_hf_adapter.py:228: error: "forward" of "_HFBase" gets multiple values for keyword argument "attention_mask" [misc] | |
fms/models/hf/modeling_hf_adapter.py:228: error: "forward" of "_HFBase" gets multiple values for keyword argument "head_mask" [misc] | |
fms/models/hf/modeling_hf_adapter.py:228: error: "forward" of "_HFBase" gets multiple values for keyword argument "inputs_embeds" [misc] | |
fms/models/hf/modeling_hf_adapter.py:228: error: "forward" of "_HFBase" gets multiple values for keyword argument "output_attentions" [misc] | |
fms/models/hf/modeling_hf_adapter.py:228: error: "forward" of "_HFBase" gets multiple values for keyword argument "output_hidden_states" [misc] | |
fms/models/hf/modeling_hf_adapter.py:228: error: "forward" of "_HFBase" gets multiple values for keyword argument "return_dict" [misc] | |
fms/models/hf/modeling_hf_adapter.py:327: error: Signature of "forward" incompatible with supertype "_HFBase" [override] | |
fms/models/hf/modeling_hf_adapter.py:327: note: Superclass: | |
fms/models/hf/modeling_hf_adapter.py:327: note: def forward(self, input_ids: LongTensor | None = ..., attention_mask: FloatTensor | None = ..., head_mask: FloatTensor | None = ..., inputs_embeds: FloatTensor | None = ..., output_attentions: bool | None = ..., output_hidden_states: bool | None = ..., return_dict: bool | None = ..., *args: Any, **kwargs: Any) -> Any | |
fms/models/hf/modeling_hf_adapter.py:327: note: Subclass: | |
fms/models/hf/modeling_hf_adapter.py:327: note: def forward(self, input_ids: LongTensor | None = ..., attention_mask: Tensor | None = ..., inputs_embeds: FloatTensor | None = ..., past_key_values: tuple[Tensor] | None = ..., encoder_hidden_states: Tensor | None = ..., encoder_attention_mask: Tensor | None = ..., head_mask: Tensor | None = ..., cross_attn_head_mask: Tensor | None = ..., use_cache: bool | None = ..., output_attentions: bool | None = ..., output_hidden_states: bool | None = ..., return_dict: bool | None = ..., *args: Any, **kwargs: Any) -> Any | |
fms/models/hf/modeling_hf_adapter.py:412: error: "forward" of "_HFBase" gets multiple values for keyword argument "input_ids" [misc] | |
fms/models/hf/modeling_hf_adapter.py:412: error: "forward" of "_HFBase" gets multiple values for keyword argument "attention_mask" [misc] | |
fms/models/hf/modeling_hf_adapter.py:412: error: "forward" of "_HFBase" gets multiple values for keyword argument "head_mask" [misc] | |
fms/models/hf/modeling_hf_adapter.py:412: error: "forward" of "_HFBase" gets multiple values for keyword argument "inputs_embeds" [misc] | |
fms/models/hf/modeling_hf_adapter.py:412: error: "forward" of "_HFBase" gets multiple values for keyword argument "output_attentions" [misc] | |
fms/models/hf/modeling_hf_adapter.py:412: error: "forward" of "_HFBase" gets multiple values for keyword argument "output_hidden_states" [misc] | |
fms/models/hf/modeling_hf_adapter.py:412: error: "forward" of "_HFBase" gets multiple values for keyword argument "return_dict" [misc] | |
fms/models/hf/modeling_hf_adapter.py:414: error: Argument "attention_mask" to "forward" of "_HFBase" has incompatible type "Tensor | None"; expected "FloatTensor | None" [arg-type] | |
fms/models/hf/modeling_hf_adapter.py:419: error: Argument "head_mask" to "forward" of "_HFBase" has incompatible type "Tensor | None"; expected "FloatTensor | None" [arg-type] | |
fms/models/hf/modeling_hf_adapter.py:682: error: Argument "map_location" to "load" has incompatible type "str | dict[str, int | str | device] | None"; expected "Callable[[Tensor, str], Tensor] | device | str | dict[str, str] | None" [arg-type] | |
fms/models/hf/modeling_hf_adapter.py:844: error: Value of type "Any | None" is not indexable [index] | |
fms/models/hf/modeling_hf_adapter.py:844: error: Item "None" of "Any | None" has no attribute "to_tuple" [union-attr] | |
fms/models/hf/modeling_hf_adapter.py:914: error: "_EncoderArchitectureMixin" has no attribute "_compute_attention_masks"; maybe "_compute_encoder_attention_masks"? [attr-defined] | |
fms/models/hf/modeling_hf_adapter.py:953: error: "__init__" of "HFModelArchitecture" gets multiple values for keyword argument "embedding" [misc] | |
fms/models/hf/modeling_hf_adapter.py:953: error: "__init__" of "HFModelArchitecture" gets multiple values for keyword argument "config" [misc] | |
fms/models/hf/modeling_hf_adapter.py:1050: error: Signature of "forward" incompatible with supertype "HFModelArchitecture" [override] | |
fms/models/hf/modeling_hf_adapter.py:1050: note: Superclass: | |
fms/models/hf/modeling_hf_adapter.py:1050: note: def forward(self, input_ids: LongTensor | None = ..., labels: LongTensor | None = ..., past_key_values: tuple[tuple[FloatTensor]] | None = ..., use_cache: bool | None = ..., return_dict: bool | None = ..., **kwargs: Any) -> Any | |
fms/models/hf/modeling_hf_adapter.py:1050: note: Subclass: | |
fms/models/hf/modeling_hf_adapter.py:1050: note: def forward(self, input_ids: LongTensor | None = ..., past_key_values: tuple[tuple[FloatTensor]] | None = ..., attention_mask: FloatTensor | None = ..., head_mask: FloatTensor | None = ..., position_ids: LongTensor | None = ..., inputs_embeds: FloatTensor | None = ..., use_cache: bool | None = ..., output_attentions: bool | None = ..., output_hidden_states: bool | None = ..., return_dict: bool | None = ..., labels: LongTensor | None = ..., **kwargs: Any) -> Any | |
fms/models/hf/modeling_hf_adapter.py:1316: error: "__init__" of "HFModelArchitecture" gets multiple values for keyword argument "embedding" [misc] | |
fms/models/hf/modeling_hf_adapter.py:1316: error: "__init__" of "HFModelArchitecture" gets multiple values for keyword argument "config" [misc] | |
fms/models/hf/modeling_hf_adapter.py:1352: error: Signature of "forward" incompatible with supertype "HFModelArchitecture" [override] | |
fms/models/hf/modeling_hf_adapter.py:1352: note: Superclass: | |
fms/models/hf/modeling_hf_adapter.py:1352: note: def forward(self, input_ids: LongTensor | None = ..., labels: LongTensor | None = ..., past_key_values: tuple[tuple[FloatTensor]] | None = ..., use_cache: bool | None = ..., return_dict: bool | None = ..., **kwargs: Any) -> Any | |
fms/models/hf/modeling_hf_adapter.py:1352: note: Subclass: | |
fms/models/hf/modeling_hf_adapter.py:1352: note: def forward(self, input_ids: LongTensor | None = ..., attention_mask: FloatTensor | None = ..., head_mask: FloatTensor | None = ..., inputs_embeds: FloatTensor | None = ..., output_attentions: bool | None = ..., output_hidden_states: bool | None = ..., return_dict: bool | None = ..., labels: LongTensor | None = ..., **kwargs: Any) -> Any | |
fms/models/hf/modeling_hf_adapter.py:1466: error: "__init__" of "HFDecoderModelArchitecture" gets multiple values for keyword argument "decoder" [misc] | |
fms/models/hf/modeling_hf_adapter.py:1466: error: "__init__" of "HFDecoderModelArchitecture" gets multiple values for keyword argument "embedding" [misc] | |
fms/models/hf/modeling_hf_adapter.py:1466: error: "__init__" of "HFDecoderModelArchitecture" gets multiple values for keyword argument "config" [misc] | |
fms/models/hf/modeling_hf_adapter.py:1521: error: Signature of "forward" incompatible with supertype "HFDecoderModelArchitecture" [override] | |
fms/models/hf/modeling_hf_adapter.py:1521: note: Superclass: | |
fms/models/hf/modeling_hf_adapter.py:1521: note: def forward(self, input_ids: LongTensor | None = ..., past_key_values: tuple[tuple[FloatTensor]] | None = ..., attention_mask: FloatTensor | None = ..., head_mask: FloatTensor | None = ..., position_ids: LongTensor | None = ..., inputs_embeds: FloatTensor | None = ..., use_cache: bool | None = ..., output_attentions: bool | None = ..., output_hidden_states: bool | None = ..., return_dict: bool | None = ..., labels: LongTensor | None = ..., **kwargs: Any) -> Any | |
fms/models/hf/modeling_hf_adapter.py:1521: note: Subclass: | |
fms/models/hf/modeling_hf_adapter.py:1521: note: def forward(self, input_ids: LongTensor | None = ..., attention_mask: FloatTensor | None = ..., decoder_input_ids: LongTensor | None = ..., decoder_attention_mask: BoolTensor | None = ..., head_mask: FloatTensor | None = ..., decoder_head_mask: FloatTensor | None = ..., cross_attn_head_mask: Tensor | None = ..., encoder_outputs: tuple[tuple[FloatTensor]] | None = ..., past_key_values: tuple[tuple[FloatTensor]] | None = ..., inputs_embeds: FloatTensor | None = ..., decoder_inputs_embeds: FloatTensor | None = ..., use_cache: bool | None = ..., output_attentions: bool | None = ..., output_hidden_states: bool | None = ..., return_dict: bool | None = ..., labels: LongTensor | None = ..., **kwargs: Any) -> Any | |
fms/models/hf/modeling_hf_adapter.py:1521: error: Signature of "forward" incompatible with supertype "HFModelArchitecture" [override] | |
fms/models/hf/modeling_hf_adapter.py:1521: note: Superclass: | |
fms/models/hf/modeling_hf_adapter.py:1521: note: def forward(self, input_ids: LongTensor | None = ..., labels: LongTensor | None = ..., past_key_values: tuple[tuple[FloatTensor]] | None = ..., use_cache: bool | None = ..., return_dict: bool | None = ..., **kwargs: Any) -> Any | |
fms/models/hf/modeling_hf_adapter.py:1521: note: Subclass: | |
fms/models/hf/modeling_hf_adapter.py:1521: note: def forward(self, input_ids: LongTensor | None = ..., attention_mask: FloatTensor | None = ..., decoder_input_ids: LongTensor | None = ..., decoder_attention_mask: BoolTensor | None = ..., head_mask: FloatTensor | None = ..., decoder_head_mask: FloatTensor | None = ..., cross_attn_head_mask: Tensor | None = ..., encoder_outputs: tuple[tuple[FloatTensor]] | None = ..., past_key_values: tuple[tuple[FloatTensor]] | None = ..., inputs_embeds: FloatTensor | None = ..., decoder_inputs_embeds: FloatTensor | None = ..., use_cache: bool | None = ..., output_attentions: bool | None = ..., output_hidden_states: bool | None = ..., return_dict: bool | None = ..., labels: LongTensor | None = ..., **kwargs: Any) -> Any | |
fms/models/hf/modeling_hf_adapter.py:1630: error: Argument 1 to "_compute_encoder_attention_masks" of "_EncoderArchitectureMixin" has incompatible type "LongTensor | FloatTensor | None"; expected "Tensor" [arg-type] | |
fms/models/hf/modeling_hf_adapter.py:1631: error: Argument 2 to "_compute_encoder_attention_masks" of "_EncoderArchitectureMixin" has incompatible type "FloatTensor | None"; expected "Tensor" [arg-type] | |
fms/models/hf/modeling_hf_adapter.py:1655: error: Tuple index out of range [misc] | |
fms/models/hf/modeling_hf_adapter.py:1656: error: Tuple index out of range [misc] | |
fms/models/hf/modeling_hf_adapter.py:1661: error: Value of type "tuple[tuple[FloatTensor]] | None" is not indexable [index] | |
fms/models/hf/modeling_hf_adapter.py:1667: error: Incompatible types in assignment (expression has type "Tensor", variable has type "LongTensor | None") [assignment] | |
fms/models/hf/modeling_hf_adapter.py:1682: error: Argument 1 to "_compute_decoder_attention_masks" of "HFEncoderDecoderModelArchitecture" has incompatible type "LongTensor | FloatTensor | None"; expected "Tensor" [arg-type] | |
fms/models/hf/modeling_hf_adapter.py:1686: error: Argument 2 to "_compute_decoder_attention_masks" of "HFEncoderDecoderModelArchitecture" has incompatible type "BoolTensor | None"; expected "Tensor" [arg-type] | |
fms/models/hf/modeling_hf_adapter.py:1687: error: Argument 3 to "_compute_decoder_attention_masks" of "HFEncoderDecoderModelArchitecture" has incompatible type "bool | None"; expected "bool" [arg-type] | |
fms/models/hf/modeling_hf_adapter.py:1701: error: Item "tuple[FloatTensor, ...]" of "tuple[FloatTensor] | Any" has no attribute "shape" [union-attr] | |
fms/models/hf/modeling_hf_adapter.py:1742: error: Argument "attention_mask" to "forward" of "HFDecoderModelArchitecture" has incompatible type "Any | BoolTensor | None"; expected "FloatTensor | None" [arg-type] | |
fms/models/hf/modeling_hf_adapter.py:1855: error: Signature of "_prepare_inputs_for_generation" incompatible with supertype "HFDecoderModelArchitecture" [override] | |
fms/models/hf/modeling_hf_adapter.py:1855: note: Superclass: | |
fms/models/hf/modeling_hf_adapter.py:1855: note: def _prepare_inputs_for_generation(self, input_ids: Tensor, attention_mask: Tensor | None = ..., past_key_values: tuple[Tensor] | None = ..., use_cache: bool | None = ..., **model_kwargs: Any) -> dict[Any, Any] | |
fms/models/hf/modeling_hf_adapter.py:1855: note: Subclass: | |
fms/models/hf/modeling_hf_adapter.py:1855: note: def _prepare_inputs_for_generation(self, decoder_input_ids: Tensor, past_key_values: tuple[Tensor] | None = ..., attention_mask: Tensor | None = ..., head_mask: FloatTensor | None = ..., decoder_head_mask: FloatTensor | None = ..., decoder_attention_mask: Tensor | None = ..., cross_attn_head_mask: Tensor | None = ..., use_cache: bool | None = ..., encoder_outputs: tuple[tuple[FloatTensor]] | None = ..., **model_kwargs: Any) -> dict[Any, Any] | |
fms/models/hf/modeling_hf_adapter.py:1855: error: Signature of "_prepare_inputs_for_generation" incompatible with supertype "HFModelArchitecture" [override] | |
fms/models/hf/modeling_hf_adapter.py:1855: note: Superclass: | |
fms/models/hf/modeling_hf_adapter.py:1855: note: def _prepare_inputs_for_generation(self, input_ids: Tensor, **model_kwargs: Any) -> dict[Any, Any] | |
fms/models/hf/modeling_hf_adapter.py:1855: note: Subclass: | |
fms/models/hf/modeling_hf_adapter.py:1855: note: def _prepare_inputs_for_generation(self, decoder_input_ids: Tensor, past_key_values: tuple[Tensor] | None = ..., attention_mask: Tensor | None = ..., head_mask: FloatTensor | None = ..., decoder_head_mask: FloatTensor | None = ..., decoder_attention_mask: Tensor | None = ..., cross_attn_head_mask: Tensor | None = ..., use_cache: bool | None = ..., encoder_outputs: tuple[tuple[FloatTensor]] | None = ..., **model_kwargs: Any) -> dict[Any, Any] | |
fms/models/hf/roberta/modeling_roberta_hf.py:5: error: Skipping analyzing "transformers": module is installed, but missing library stubs or py.typed marker [import-untyped] | |
fms/models/hf/roberta/modeling_roberta_hf.py:6: error: Skipping analyzing "transformers.modeling_outputs": module is installed, but missing library stubs or py.typed marker [import-untyped] | |
fms/models/hf/roberta/modeling_roberta_hf.py:139: error: "__init__" of "MaskedLMHeadMixin" gets multiple values for keyword argument "activation_fn" [misc] | |
fms/models/hf/roberta/modeling_roberta_hf.py:139: error: "__init__" of "MaskedLMHeadMixin" gets multiple values for keyword argument "norm_eps" [misc] | |
fms/models/hf/roberta/modeling_roberta_hf.py:149: error: Argument 1 of "_hf_model_from_fms" is incompatible with supertype "HFModelArchitecture"; supertype defines the argument type as "Module" [override] | |
fms/models/hf/roberta/modeling_roberta_hf.py:149: note: This violates the Liskov substitution principle | |
fms/models/hf/roberta/modeling_roberta_hf.py:149: note: See https://mypy.readthedocs.io/en/stable/common_issues.html#incompatible-overrides | |
fms/models/hf/roberta/modeling_roberta_hf.py:170: error: "__init__" of "SequenceClassificationLMHeadMixin" gets multiple values for keyword argument "classifier_activation_fn" [misc] | |
fms/models/hf/roberta/modeling_roberta_hf.py:170: error: "__init__" of "SequenceClassificationLMHeadMixin" gets multiple values for keyword argument "classifier_dropout" [misc] | |
fms/models/hf/roberta/modeling_roberta_hf.py:182: error: Argument 1 of "_hf_model_from_fms" is incompatible with supertype "HFModelArchitecture"; supertype defines the argument type as "Module" [override] | |
fms/models/hf/roberta/modeling_roberta_hf.py:182: note: This violates the Liskov substitution principle | |
fms/models/hf/roberta/modeling_roberta_hf.py:182: note: See https://mypy.readthedocs.io/en/stable/common_issues.html#incompatible-overrides | |
fms/models/hf/llama/modeling_llama_hf.py:5: error: Skipping analyzing "transformers": module is installed, but missing library stubs or py.typed marker [import-untyped] | |
fms/models/hf/llama/modeling_llama_hf.py:6: error: Skipping analyzing "transformers.modeling_outputs": module is installed, but missing library stubs or py.typed marker [import-untyped] | |
fms/models/hf/llama/modeling_llama_hf.py:20: error: Signature of "_adapt" incompatible with supertype "HFDecoder" [override] | |
fms/models/hf/llama/modeling_llama_hf.py:20: note: Superclass: | |
fms/models/hf/llama/modeling_llama_hf.py:20: note: def _adapt(self, input_ids: LongTensor | None = ..., attention_mask: Tensor | None = ..., inputs_embeds: FloatTensor | None = ..., past_key_values: tuple[Tensor] | None = ..., encoder_hidden_states: Tensor | None = ..., encoder_attention_mask: Tensor | None = ..., head_mask: Tensor | None = ..., cross_attn_head_mask: Tensor | None = ..., use_cache: bool | None = ..., output_attentions: bool | None = ..., output_hidden_states: bool | None = ..., *args: Any, **kwargs: Any) -> Any | |
fms/models/hf/llama/modeling_llama_hf.py:20: note: Subclass: | |
fms/models/hf/llama/modeling_llama_hf.py:20: note: def _adapt(self, input_ids: LongTensor | None = ..., attention_mask: Tensor | None = ..., position_ids: LongTensor | None = ..., past_key_values: tuple[Tensor] | None = ..., use_cache: bool | None = ..., attn_algorithm: str | None = ..., *args: Any, **kwargs: Any) -> Any | |
fms/models/hf/llama/modeling_llama_hf.py:73: error: Argument 1 to "HFAdaptedLLaMADecoder" has incompatible type Module; expected "LLaMA" [arg-type] | |
fms/models/hf/llama/modeling_llama_hf.py:114: error: "__init__" of "LMHeadModelLMHeadMixin" gets multiple values for keyword argument "bias" [misc] | |
fms/models/hf/llama/modeling_llama_hf.py:118: error: Argument 1 of "_hf_model_from_fms" is incompatible with supertype "HFModelArchitecture"; supertype defines the argument type as "Module" [override] | |
fms/models/hf/llama/modeling_llama_hf.py:118: note: This violates the Liskov substitution principle | |
fms/models/hf/llama/modeling_llama_hf.py:118: note: See https://mypy.readthedocs.io/en/stable/common_issues.html#incompatible-overrides | |
fms/models/hf/gpt_bigcode/modeling_gpt_bigcode_hf.py:5: error: Skipping analyzing "transformers": module is installed, but missing library stubs or py.typed marker [import-untyped] | |
fms/models/hf/gpt_bigcode/modeling_gpt_bigcode_hf.py:6: error: Skipping analyzing "transformers.modeling_outputs": module is installed, but missing library stubs or py.typed marker [import-untyped] | |
fms/models/hf/gpt_bigcode/modeling_gpt_bigcode_hf.py:22: error: Signature of "_adapt" incompatible with supertype "HFDecoder" [override] | |
fms/models/hf/gpt_bigcode/modeling_gpt_bigcode_hf.py:22: note: Superclass: | |
fms/models/hf/gpt_bigcode/modeling_gpt_bigcode_hf.py:22: note: def _adapt(self, input_ids: LongTensor | None = ..., attention_mask: Tensor | None = ..., inputs_embeds: FloatTensor | None = ..., past_key_values: tuple[Tensor] | None = ..., encoder_hidden_states: Tensor | None = ..., encoder_attention_mask: Tensor | None = ..., head_mask: Tensor | None = ..., cross_attn_head_mask: Tensor | None = ..., use_cache: bool | None = ..., output_attentions: bool | None = ..., output_hidden_states: bool | None = ..., *args: Any, **kwargs: Any) -> Any | |
fms/models/hf/gpt_bigcode/modeling_gpt_bigcode_hf.py:22: note: Subclass: | |
fms/models/hf/gpt_bigcode/modeling_gpt_bigcode_hf.py:22: note: def _adapt(self, input_ids: LongTensor | None = ..., attention_mask: Tensor | None = ..., position_ids: LongTensor | None = ..., past_key_values: tuple[Tensor] | None = ..., use_cache: bool | None = ..., attn_algorithm: str | None = ..., *args: Any, **kwargs: Any) -> Any | |
fms/models/hf/gpt_bigcode/modeling_gpt_bigcode_hf.py:84: error: "__init__" of "LMHeadModelLMHeadMixin" gets multiple values for keyword argument "bias" [misc] | |
fms/models/hf/llama/__init__.py:23: error: Skipping analyzing "transformers": module is installed, but missing library stubs or py.typed marker [import-untyped] | |
fms/models/hf/gpt_bigcode/__init__.py:5: error: Skipping analyzing "transformers": module is installed, but missing library stubs or py.typed marker [import-untyped] | |
fms/models/hf/gpt_bigcode/__init__.py:5: note: See https://mypy.readthedocs.io/en/stable/running_mypy.html#missing-imports | |
fms/models/hf/gpt_bigcode/__init__.py:45: error: Item "str" of "str | PathLike[Any] | Any" has no attribute "config" [union-attr] | |
fms/models/hf/gpt_bigcode/__init__.py:45: error: Item "PathLike[Any]" of "str | PathLike[Any] | Any" has no attribute "config" [union-attr] | |
fms/models/hf/gpt_bigcode/__init__.py:46: error: Item "str" of "str | PathLike[Any] | Any" has no attribute "config" [union-attr] | |
fms/models/hf/gpt_bigcode/__init__.py:46: error: Item "PathLike[Any]" of "str | PathLike[Any] | Any" has no attribute "config" [union-attr] | |
fms/models/hf/gpt_bigcode/__init__.py:47: error: Item "str" of "str | PathLike[Any] | Any" has no attribute "config" [union-attr] | |
fms/models/hf/gpt_bigcode/__init__.py:47: error: Item "PathLike[Any]" of "str | PathLike[Any] | Any" has no attribute "config" [union-attr] | |
fms/models/hf/gpt_bigcode/__init__.py:48: error: Item "str" of "str | PathLike[Any] | Any" has no attribute "config" [union-attr] | |
fms/models/hf/gpt_bigcode/__init__.py:48: error: Item "PathLike[Any]" of "str | PathLike[Any] | Any" has no attribute "config" [union-attr] | |
fms/models/hf/gpt_bigcode/__init__.py:49: error: Item "str" of "str | PathLike[Any] | Any" has no attribute "config" [union-attr] | |
fms/models/hf/gpt_bigcode/__init__.py:49: error: Item "PathLike[Any]" of "str | PathLike[Any] | Any" has no attribute "config" [union-attr] | |
fms/models/hf/gpt_bigcode/__init__.py:50: error: Item "str" of "str | PathLike[Any] | Any" has no attribute "config" [union-attr] | |
fms/models/hf/gpt_bigcode/__init__.py:50: error: Item "PathLike[Any]" of "str | PathLike[Any] | Any" has no attribute "config" [union-attr] | |
fms/models/hf/gpt_bigcode/__init__.py:52: error: Item "str" of "str | PathLike[Any] | Any" has no attribute "config" [union-attr] | |
fms/models/hf/gpt_bigcode/__init__.py:52: error: Item "PathLike[Any]" of "str | PathLike[Any] | Any" has no attribute "config" [union-attr] | |
fms/models/hf/gpt_bigcode/__init__.py:55: error: Item "str" of "str | PathLike[Any] | Any" has no attribute "transformer" [union-attr] | |
fms/models/hf/gpt_bigcode/__init__.py:55: error: Item "PathLike[Any]" of "str | PathLike[Any] | Any" has no attribute "transformer" [union-attr] | |
fms/models/hf/gpt_bigcode/__init__.py:58: error: Item "str" of "str | PathLike[Any] | Any" has no attribute "transformer" [union-attr] | |
fms/models/hf/gpt_bigcode/__init__.py:58: error: Item "PathLike[Any]" of "str | PathLike[Any] | Any" has no attribute "transformer" [union-attr] | |
fms/models/hf/gpt_bigcode/__init__.py:73: error: Item "str" of "str | PathLike[Any] | Any" has no attribute "lm_head" [union-attr] | |
fms/models/hf/gpt_bigcode/__init__.py:73: error: Item "PathLike[Any]" of "str | PathLike[Any] | Any" has no attribute "lm_head" [union-attr] | |
fms/models/hf/gpt_bigcode/__init__.py:76: error: Item "str" of "str | PathLike[Any] | Any" has no attribute "config" [union-attr] | |
fms/models/hf/gpt_bigcode/__init__.py:76: error: Item "PathLike[Any]" of "str | PathLike[Any] | Any" has no attribute "config" [union-attr] | |
fms/models/hf/gpt_bigcode/__init__.py:77: error: Item "str" of "str | PathLike[Any] | Any" has no attribute "config" [union-attr] | |
fms/models/hf/gpt_bigcode/__init__.py:77: error: Item "PathLike[Any]" of "str | PathLike[Any] | Any" has no attribute "config" [union-attr] | |
fms/models/hf/gpt_bigcode/__init__.py:78: error: Item "str" of "str | PathLike[Any] | Any" has no attribute "config" [union-attr] | |
fms/models/hf/gpt_bigcode/__init__.py:78: error: Item "PathLike[Any]" of "str | PathLike[Any] | Any" has no attribute "config" [union-attr] | |
fms/models/hf/__init__.py:40: error: List item 0 has incompatible type "type[HFAdaptedGPTBigCodeForCausalLM]"; expected "type[LMHeadModelLMHeadMixin]" [list-item] | |
fms/models/hf/__init__.py:40: error: List item 1 has incompatible type "type[HFAdaptedLLaMAForCausalLM]"; expected "type[LMHeadModelLMHeadMixin]" [list-item] | |
Found 135 errors in 11 files (checked 44 source files) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment