Modify the default activation function for bert

This commit is contained in:
shibeiji 2020-11-30 08:19:52 +08:00
parent a6845395e6
commit bde36af04c
1 changed files with 1 additions and 1 deletions

View File

@ -103,7 +103,7 @@ if cfg.bert_network == 'large':
num_hidden_layers=24,
num_attention_heads=16,
intermediate_size=4096,
hidden_act="fast_gelu",
hidden_act="gelu",
hidden_dropout_prob=0.1,
attention_probs_dropout_prob=0.1,
max_position_embeddings=512,