From ed128dad783613dcd133953531c662116fc85310 Mon Sep 17 00:00:00 2001 From: daspartho Date: Tue, 13 Sep 2022 00:00:59 +0530 Subject: [PATCH 1/2] added type hints --- .../models/m2m_100/modeling_m2m_100.py | 38 +++++++++---------- 1 file changed, 19 insertions(+), 19 deletions(-) diff --git a/src/transformers/models/m2m_100/modeling_m2m_100.py b/src/transformers/models/m2m_100/modeling_m2m_100.py index 3abe593bb129a7..7c6a85a4df1a0b 100755 --- a/src/transformers/models/m2m_100/modeling_m2m_100.py +++ b/src/transformers/models/m2m_100/modeling_m2m_100.py @@ -712,13 +712,13 @@ def __init__(self, config: M2M100Config, embed_tokens: Optional[nn.Embedding] = def forward( self, - input_ids=None, - attention_mask=None, - head_mask=None, - inputs_embeds=None, - output_attentions=None, - output_hidden_states=None, - return_dict=None, + input_ids: Optional[torch.Tensor] = None, + attention_mask: Optional[torch.Tensor] = None, + head_mask: Optional[torch.Tensor] = None, + inputs_embeds: Optional[torch.Tensor] = None, + output_attentions: Optional[bool] = None, + output_hidden_states: Optional[bool] = None, + return_dict: Optional[bool] = None, ): r""" Args: @@ -887,18 +887,18 @@ def __init__(self, config: M2M100Config, embed_tokens: Optional[nn.Embedding] = def forward( self, - input_ids=None, - attention_mask=None, - encoder_hidden_states=None, - encoder_attention_mask=None, - head_mask=None, - cross_attn_head_mask=None, - past_key_values=None, - inputs_embeds=None, - use_cache=None, - output_attentions=None, - output_hidden_states=None, - return_dict=None, + input_ids: Optional[torch.Tensor] = None, + attention_mask: Optional[torch.Tensor] = None, + encoder_hidden_states: Optional[torch.Tensor] = None, + encoder_attention_mask: Optional[torch.Tensor] = None, + head_mask: Optional[torch.Tensor] = None, + cross_attn_head_mask: Optional[torch.Tensor] = None, + past_key_values: Optional[List[torch.FloatTensor]] = None, + inputs_embeds: Optional[torch.Tensor] = None, + use_cache: Optional[bool] = None, + output_attentions: Optional[bool] = None, + output_hidden_states: Optional[bool] = None, + return_dict: Optional[bool] = None, ): r""" Args: From 5749147775644c755e68718007f8010554e3fb63 Mon Sep 17 00:00:00 2001 From: daspartho Date: Tue, 13 Sep 2022 00:09:30 +0530 Subject: [PATCH 2/2] fixed typo --- src/transformers/models/m2m_100/modeling_m2m_100.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/transformers/models/m2m_100/modeling_m2m_100.py b/src/transformers/models/m2m_100/modeling_m2m_100.py index 7c6a85a4df1a0b..092496f9f2d128 100755 --- a/src/transformers/models/m2m_100/modeling_m2m_100.py +++ b/src/transformers/models/m2m_100/modeling_m2m_100.py @@ -17,7 +17,7 @@ import math import random -from typing import Optional, Tuple, Union +from typing import List, Optional, Tuple, Union import torch from torch import nn