!10994 modify wide_and_deep to adapt for unify_ir

From: @yuchaojie
Reviewed-by: @c_34,@wuxuejian
Signed-off-by: @wuxuejian
This commit is contained in:
mindspore-ci-bot 2021-01-06 16:10:36 +08:00 committed by Gitee
commit 77adcecedb
1 changed files with 0 additions and 2 deletions

View File

@ -212,7 +212,6 @@ class WideDeepModel(nn.Cell):
self.deep_embeddinglookup = nn.EmbeddingLookup(self.vocab_size, self.emb_dim, target=target,
slice_mode=nn.EmbeddingLookup.TABLE_COLUMN_SLICE)
self.dense_layer_1.dropout.dropout.shard(((1, get_group_size()),))
self.dense_layer_1.dropout.dropout_do_mask.shard(((1, get_group_size()),))
self.dense_layer_1.matmul.shard(((1, get_group_size()), (get_group_size(), 1)))
self.dense_layer_1.matmul.add_prim_attr("field_size", self.field_size)
self.deep_mul.shard(((1, 1, get_group_size()), (1, 1, 1)))
@ -234,7 +233,6 @@ class WideDeepModel(nn.Cell):
self.wide_mul.shard(((1, get_group_size(), 1), (1, get_group_size(), 1)))
self.reduce_sum.shard(((1, get_group_size(), 1),))
self.dense_layer_1.dropout.dropout.shard(((1, get_group_size()),))
self.dense_layer_1.dropout.dropout_do_mask.shard(((1, get_group_size()),))
self.dense_layer_1.matmul.shard(((1, get_group_size()), (get_group_size(), 1)))
self.embedding_table = self.deep_embeddinglookup.embedding_table
elif parameter_server: