forked from mindspore-Ecosystem/mindspore
!8699 Add the option no need of training the epistemic model
From: @zhangxinfeng3 Reviewed-by: @zichun_ye,@wang_zi_dong Signed-off-by: @zichun_ye
This commit is contained in:
commit
daa670c3e9
|
@ -47,6 +47,7 @@ class UncertaintyEvaluation:
|
|||
Default: None.
|
||||
epochs (int): Total number of iterations on the data. Default: 1.
|
||||
epi_uncer_model_path (str): The save or read path of the epistemic uncertainty model. Default: None.
|
||||
If the epi_uncer_model_path is 'Untrain', the epistemic model need not to be trained.
|
||||
ale_uncer_model_path (str): The save or read path of the aleatoric uncertainty model. Default: None.
|
||||
save_model (bool): Whether to save the uncertainty model or not, if true, the epi_uncer_model_path
|
||||
and ale_uncer_model_path must not be None. If false, the model to evaluate will be loaded from
|
||||
|
@ -111,7 +112,7 @@ class UncertaintyEvaluation:
|
|||
"""
|
||||
if self.epi_uncer_model is None:
|
||||
self.epi_uncer_model = EpistemicUncertaintyModel(self.epi_model)
|
||||
if self.epi_uncer_model.drop_count == 0:
|
||||
if self.epi_uncer_model.drop_count == 0 and self.epi_uncer_model_path != 'Untrain':
|
||||
if self.task_type == 'classification':
|
||||
net_loss = SoftmaxCrossEntropyWithLogits(sparse=True, reduction="mean")
|
||||
net_opt = Adam(self.epi_uncer_model.trainable_params())
|
||||
|
|
Loading…
Reference in New Issue