diff --git a/mindspore/nn/layer/activation.py b/mindspore/nn/layer/activation.py index 4a843c6c46..1578194a72 100644 --- a/mindspore/nn/layer/activation.py +++ b/mindspore/nn/layer/activation.py @@ -415,7 +415,7 @@ class GELU(Cell): def __init__(self): super(GELU, self).__init__() - self.gelu = _selected_ops.Gelu() + self.gelu = _selected_ops.GeLU() def construct(self, x): return self.gelu(x) @@ -458,7 +458,7 @@ class FastGelu(Cell): def __init__(self): super(FastGelu, self).__init__() - self.fast_gelu = _selected_ops.FastGelu() + self.fast_gelu = _selected_ops.FastGeLU() def construct(self, x): return self.fast_gelu(x) diff --git a/mindspore/ops/_selected_ops.py b/mindspore/ops/_selected_ops.py index 9a51916a48..226a6eb086 100644 --- a/mindspore/ops/_selected_ops.py +++ b/mindspore/ops/_selected_ops.py @@ -73,13 +73,13 @@ class Tanh: @op_selector -class Gelu: +class GeLU: def __call__(self, *args): pass @op_selector -class FastGelu: +class FastGeLU: def __call__(self, *args): pass diff --git a/model_zoo/research/nlp/gpt2/src/GPT2_model.py b/model_zoo/research/nlp/gpt2/src/GPT2_model.py index 1a0349faba..c7baa4e3df 100644 --- a/model_zoo/research/nlp/gpt2/src/GPT2_model.py +++ b/model_zoo/research/nlp/gpt2/src/GPT2_model.py @@ -499,7 +499,7 @@ class FeedForward(nn.Cell): self.layernorm = LayerNorm(in_channels=in_channels) self.residual_connect = ResidualConnection(dropout_prob=hidden_dropout) - self.gelu_act = P.Gelu() + self.gelu_act = P.GeLU() self.dropout = nn.Dropout(1 - hidden_dropout) self.use_dropout = hidden_dropout > 0 self.reshape = P.Reshape()