forked from mindspore-Ecosystem/mindspore
!7741 [ME] format code
Merge pull request !7741 from chenzhongming/zomi_master
This commit is contained in:
commit
7f91a3faf2
|
@ -257,10 +257,10 @@ class Value:
|
|||
self.data_format = data_format
|
||||
|
||||
def __str__(self):
|
||||
return self.name + str(list(self.shape)) + str(self.value)
|
||||
return self.name + str(list(self.shape))
|
||||
|
||||
def __repr__(self):
|
||||
return "%s.%s%s%s" % (self.name, self.dtype, str(list(self.shape)), str(self.value))
|
||||
return "%s.%s%s" % (self.name, self.dtype, str(list(self.shape)))
|
||||
|
||||
def get_size(self):
|
||||
return 1
|
||||
|
|
|
@ -158,16 +158,10 @@ class Parameter(MetaTensor_):
|
|||
return (Tensor, data)
|
||||
|
||||
def __str__(self):
|
||||
value_str = MetaTensor.__str__(self)
|
||||
if isinstance(self, Tensor):
|
||||
value_str = Tensor.__str__(self)
|
||||
return f'Parameter (name={self._param_info.name}, value={value_str})'
|
||||
return f'Parameter (name={self._param_info.name})'
|
||||
|
||||
def __repr__(self):
|
||||
value_str = MetaTensor.__repr__(self)
|
||||
if isinstance(self, Tensor):
|
||||
value_str = Tensor.__repr__(self)
|
||||
return f'Parameter (name={self._param_info.name}, value={value_str})'
|
||||
return f'Parameter (name={self._param_info.name})'
|
||||
|
||||
def __parameter__(self):
|
||||
"""For parse check."""
|
||||
|
|
|
@ -121,8 +121,7 @@ class Dropout(Cell):
|
|||
return self.dropout_do_mask(x, output, keep_prob)
|
||||
|
||||
def extend_repr(self):
|
||||
str_info = 'keep_prob={}, dtype={}'.format(self.keep_prob, self.dtype)
|
||||
return str_info
|
||||
return 'keep_prob={}, dtype={}'.format(self.keep_prob, self.dtype)
|
||||
|
||||
|
||||
class Flatten(Cell):
|
||||
|
@ -365,8 +364,7 @@ class Norm(Cell):
|
|||
return x
|
||||
|
||||
def extend_repr(self):
|
||||
str_info = 'axis={}, keep_dims={}'.format(self.axis, self.keep_dims)
|
||||
return str_info
|
||||
return 'axis={}, keep_dims={}'.format(self.axis, self.keep_dims)
|
||||
|
||||
|
||||
class OneHot(Cell):
|
||||
|
|
|
@ -547,9 +547,8 @@ class LayerNorm(Cell):
|
|||
|
||||
def extend_repr(self):
|
||||
"""Display instance object as string."""
|
||||
s = 'normalized_shape={}, begin_norm_axis={}, begin_params_axis={}, gamma{}, beta={}'.format(
|
||||
return 'normalized_shape={}, begin_norm_axis={}, begin_params_axis={}, gamma{}, beta={}'.format(
|
||||
self.normalized_shape, self.begin_norm_axis, self.begin_params_axis, self.gamma, self.beta)
|
||||
return s
|
||||
|
||||
|
||||
class GroupNorm(Cell):
|
||||
|
@ -642,5 +641,4 @@ class GroupNorm(Cell):
|
|||
|
||||
def extend_repr(self):
|
||||
"""Display instance object as string."""
|
||||
s = 'num_groups={}, num_channels={}'.format(self.num_groups, self.num_channels)
|
||||
return s
|
||||
return 'num_groups={}, num_channels={}'.format(self.num_groups, self.num_channels)
|
||||
|
|
|
@ -1008,14 +1008,13 @@ class DenseQuant(Cell):
|
|||
|
||||
def extend_repr(self):
|
||||
"""A pretty print for Dense layer."""
|
||||
str_info = 'in_channels={}, out_channels={}, weight={}, has_bias={}'.format(
|
||||
s = 'in_channels={}, out_channels={}, weight={}, has_bias={}'.format(
|
||||
self.in_channels, self.out_channels, self.weight, self.has_bias)
|
||||
if self.has_bias:
|
||||
str_info = str_info + ', bias={}'.format(self.bias)
|
||||
s += ', bias={}'.format(self.bias)
|
||||
if self.activation_flag:
|
||||
str_info = str_info + ', activation={}'.format(self.activation)
|
||||
|
||||
return str_info
|
||||
s += ', activation={}'.format(self.activation)
|
||||
return s
|
||||
|
||||
|
||||
class _QuantActivation(Cell):
|
||||
|
@ -1387,13 +1386,13 @@ class QuantBlock(Cell):
|
|||
return x
|
||||
|
||||
def extend_repr(self):
|
||||
str_info = f'quant={self.quant}, core_op={type(self.core_op)}, weight=shape[{self.weight.shape}]'
|
||||
s = f'quant={self.quant}, core_op={type(self.core_op)}, weight=shape[{self.weight.shape}]'
|
||||
if self.has_bias:
|
||||
str_info = str_info + f', bias=shape[{self.bias.shape}]'
|
||||
s += f', bias=shape[{self.bias.shape}]'
|
||||
if self.has_act:
|
||||
str_info = str_info + f', activation={self.activation}'
|
||||
str_info = str_info + f', dequant={self.dequant}'
|
||||
return str_info
|
||||
s += f', activation={self.activation}'
|
||||
s += f', dequant={self.dequant}'
|
||||
return s
|
||||
|
||||
|
||||
class QuantMindirBlock(Cell):
|
||||
|
@ -1454,9 +1453,9 @@ class QuantMindirBlock(Cell):
|
|||
return x
|
||||
|
||||
def extend_repr(self):
|
||||
str_info = f'core_op={type(self.core_op)}, weight=shape[{self.weight.shape}]'
|
||||
s = f'core_op={type(self.core_op)}, weight=shape[{self.weight.shape}]'
|
||||
if self.has_bias:
|
||||
str_info = str_info + f', bias=shape[{self.bias.shape}]'
|
||||
s += f', bias=shape[{self.bias.shape}]'
|
||||
if self.has_act:
|
||||
str_info = str_info + f', activation={self.activation}'
|
||||
return str_info
|
||||
s += f', activation={self.activation}'
|
||||
return s
|
||||
|
|
|
@ -100,8 +100,7 @@ class GumbelCDF(Bijector):
|
|||
return self._parameter_type
|
||||
|
||||
def extend_repr(self):
|
||||
str_info = f'loc = {self.loc}, scale = {self.scale}'
|
||||
return str_info
|
||||
return f'loc = {self.loc}, scale = {self.scale}'
|
||||
|
||||
def shape_mapping(self, shape):
|
||||
return shape
|
||||
|
|
|
@ -81,8 +81,7 @@ class PowerTransform(Bijector):
|
|||
return self._power
|
||||
|
||||
def extend_repr(self):
|
||||
str_info = f'power = {self.power}'
|
||||
return str_info
|
||||
return f'power = {self.power}'
|
||||
|
||||
def shape_mapping(self, shape):
|
||||
return shape
|
||||
|
|
|
@ -90,8 +90,7 @@ class ScalarAffine(Bijector):
|
|||
return self._shift
|
||||
|
||||
def extend_repr(self):
|
||||
str_info = f'scale = {self.scale}, shift = {self.shift}'
|
||||
return str_info
|
||||
return f'scale = {self.scale}, shift = {self.shift}'
|
||||
|
||||
def shape_mapping(self, shape):
|
||||
return shape
|
||||
|
|
|
@ -118,8 +118,7 @@ class Softplus(Bijector):
|
|||
return self._sharpness
|
||||
|
||||
def extend_repr(self):
|
||||
str_info = f'sharpness = {self.sharpness}'
|
||||
return str_info
|
||||
return f'sharpness = {self.sharpness}'
|
||||
|
||||
def shape_mapping(self, shape):
|
||||
return shape
|
||||
|
|
|
@ -141,15 +141,15 @@ class _ConvVariational(_Conv):
|
|||
return outputs
|
||||
|
||||
def extend_repr(self):
|
||||
str_info = 'in_channels={}, out_channels={}, kernel_size={}, stride={}, pad_mode={}, ' \
|
||||
'padding={}, dilation={}, group={}, weight_mean={}, weight_std={}, has_bias={}'\
|
||||
s = 'in_channels={}, out_channels={}, kernel_size={}, stride={}, pad_mode={}, ' \
|
||||
'padding={}, dilation={}, group={}, weight_mean={}, weight_std={}, has_bias={}'\
|
||||
.format(self.in_channels, self.out_channels, self.kernel_size, self.stride, self.pad_mode, self.padding,
|
||||
self.dilation, self.group, self.weight_posterior.mean, self.weight_posterior.untransformed_std,
|
||||
self.has_bias)
|
||||
if self.has_bias:
|
||||
str_info = str_info + ', bias_mean={}, bias_std={}'\
|
||||
s += ', bias_mean={}, bias_std={}'\
|
||||
.format(self.bias_posterior.mean, self.bias_posterior.untransformed_std)
|
||||
return str_info
|
||||
return s
|
||||
|
||||
def _apply_variational_bias(self, inputs):
|
||||
bias_posterior_tensor = self.bias_posterior("sample")
|
||||
|
|
|
@ -107,16 +107,15 @@ class _DenseVariational(Cell):
|
|||
return outputs
|
||||
|
||||
def extend_repr(self):
|
||||
str_info = 'in_channels={}, out_channels={}, weight_mean={}, weight_std={}, has_bias={}' \
|
||||
s = 'in_channels={}, out_channels={}, weight_mean={}, weight_std={}, has_bias={}' \
|
||||
.format(self.in_channels, self.out_channels, self.weight_posterior.mean,
|
||||
self.weight_posterior.untransformed_std, self.has_bias)
|
||||
if self.has_bias:
|
||||
str_info = str_info + ', bias_mean={}, bias_std={}' \
|
||||
s += ', bias_mean={}, bias_std={}' \
|
||||
.format(self.bias_posterior.mean, self.bias_posterior.untransformed_std)
|
||||
|
||||
if self.activation_flag:
|
||||
str_info = str_info + ', activation={}'.format(self.activation)
|
||||
return str_info
|
||||
s += ', activation={}'.format(self.activation)
|
||||
return s
|
||||
|
||||
def _apply_variational_bias(self, inputs):
|
||||
bias_posterior_tensor = self.bias_posterior("sample")
|
||||
|
|
|
@ -141,10 +141,10 @@ class Bernoulli(Distribution):
|
|||
|
||||
def extend_repr(self):
|
||||
if self.is_scalar_batch:
|
||||
str_info = f'probs = {self.probs}'
|
||||
s = f'probs = {self.probs}'
|
||||
else:
|
||||
str_info = f'batch_shape = {self._broadcast_shape}'
|
||||
return str_info
|
||||
s = f'batch_shape = {self._broadcast_shape}'
|
||||
return s
|
||||
|
||||
@property
|
||||
def probs(self):
|
||||
|
|
|
@ -157,10 +157,10 @@ class Categorical(Distribution):
|
|||
|
||||
def extend_repr(self):
|
||||
if self.is_scalar_batch:
|
||||
str_info = f'probs = {self.probs}'
|
||||
s = f'probs = {self.probs}'
|
||||
else:
|
||||
str_info = f'batch_shape = {self._broadcast_shape}'
|
||||
return str_info
|
||||
s = f'batch_shape = {self._broadcast_shape}'
|
||||
return s
|
||||
|
||||
@property
|
||||
def probs(self):
|
||||
|
|
|
@ -145,10 +145,10 @@ class Exponential(Distribution):
|
|||
|
||||
def extend_repr(self):
|
||||
if self.is_scalar_batch:
|
||||
str_info = f'rate = {self.rate}'
|
||||
s = f'rate = {self.rate}'
|
||||
else:
|
||||
str_info = f'batch_shape = {self._broadcast_shape}'
|
||||
return str_info
|
||||
s = f'batch_shape = {self._broadcast_shape}'
|
||||
return s
|
||||
|
||||
@property
|
||||
def rate(self):
|
||||
|
|
|
@ -150,10 +150,10 @@ class Geometric(Distribution):
|
|||
|
||||
def extend_repr(self):
|
||||
if self.is_scalar_batch:
|
||||
str_info = f'probs = {self.probs}'
|
||||
s = f'probs = {self.probs}'
|
||||
else:
|
||||
str_info = f'batch_shape = {self._broadcast_shape}'
|
||||
return str_info
|
||||
s = f'batch_shape = {self._broadcast_shape}'
|
||||
return s
|
||||
|
||||
@property
|
||||
def probs(self):
|
||||
|
|
|
@ -163,10 +163,10 @@ class LogNormal(msd.TransformedDistribution):
|
|||
|
||||
def extend_repr(self):
|
||||
if self.is_scalar_batch:
|
||||
str_info = f'loc = {self._mean_value}, scale = {self._sd_value}'
|
||||
s = f'loc = {self._mean_value}, scale = {self._sd_value}'
|
||||
else:
|
||||
str_info = f'batch_shape = {self._broadcast_shape}'
|
||||
return str_info
|
||||
s = f'batch_shape = {self._broadcast_shape}'
|
||||
return s
|
||||
|
||||
def _mean(self, loc=None, scale=None):
|
||||
"""
|
||||
|
|
|
@ -156,10 +156,10 @@ class Logistic(Distribution):
|
|||
|
||||
def extend_repr(self):
|
||||
if self.is_scalar_batch:
|
||||
str_info = f'location = {self._loc}, scale = {self._scale}'
|
||||
s = f'location = {self._loc}, scale = {self._scale}'
|
||||
else:
|
||||
str_info = f'batch_shape = {self._broadcast_shape}'
|
||||
return str_info
|
||||
s = f'batch_shape = {self._broadcast_shape}'
|
||||
return s
|
||||
|
||||
@property
|
||||
def loc(self):
|
||||
|
|
|
@ -149,10 +149,10 @@ class Normal(Distribution):
|
|||
|
||||
def extend_repr(self):
|
||||
if self.is_scalar_batch:
|
||||
str_info = f'mean = {self._mean_value}, standard deviation = {self._sd_value}'
|
||||
s = f'mean = {self._mean_value}, standard deviation = {self._sd_value}'
|
||||
else:
|
||||
str_info = f'batch_shape = {self._broadcast_shape}'
|
||||
return str_info
|
||||
s = f'batch_shape = {self._broadcast_shape}'
|
||||
return s
|
||||
|
||||
def _mean(self, mean=None, sd=None):
|
||||
"""
|
||||
|
|
|
@ -154,10 +154,10 @@ class Uniform(Distribution):
|
|||
|
||||
def extend_repr(self):
|
||||
if self.is_scalar_batch:
|
||||
str_info = f'low = {self.low}, high = {self.high}'
|
||||
s = f'low = {self.low}, high = {self.high}'
|
||||
else:
|
||||
str_info = f'batch_shape = {self._broadcast_shape}'
|
||||
return str_info
|
||||
s = f'batch_shape = {self._broadcast_shape}'
|
||||
return s
|
||||
|
||||
@property
|
||||
def low(self):
|
||||
|
|
|
@ -392,15 +392,12 @@ class Dense_Thor_GPU(Cell):
|
|||
|
||||
def extend_repr(self):
|
||||
"""extend_repr"""
|
||||
str_info = 'in_channels={}, out_channels={}, weight={}, has_bias={}' \
|
||||
.format(self.in_channels, self.out_channels, self.weight, self.has_bias)
|
||||
s = 'in_channels={}, out_channels={}'.format(self.in_channels, self.out_channels)
|
||||
if self.has_bias:
|
||||
str_info = str_info + ', bias={}'.format(self.bias)
|
||||
|
||||
s += ', has_bias={}'.format(self.has_bias)
|
||||
if self.activation_flag:
|
||||
str_info = str_info + ', activation={}'.format(self.activation)
|
||||
|
||||
return str_info
|
||||
s += ', activation={}'.format(self.activation)
|
||||
return s
|
||||
|
||||
|
||||
class Conv2d_Thor(_Conv):
|
||||
|
@ -775,12 +772,9 @@ class Dense_Thor(Cell):
|
|||
|
||||
def extend_repr(self):
|
||||
"""extend_repr"""
|
||||
str_info = 'in_channels={}, out_channels={}, weight={}, has_bias={}' \
|
||||
.format(self.in_channels, self.out_channels, self.weight, self.has_bias)
|
||||
s = 'in_channels={}, out_channels={}'.format(self.in_channels, self.out_channels)
|
||||
if self.has_bias:
|
||||
str_info = str_info + ', bias={}'.format(self.bias)
|
||||
|
||||
s += ', has_bias={}'.format(self.has_bias)
|
||||
if self.activation_flag:
|
||||
str_info = str_info + ', activation={}'.format(self.activation)
|
||||
|
||||
return str_info
|
||||
s += ', activation={}'.format(self.activation)
|
||||
return s
|
||||
|
|
|
@ -102,12 +102,10 @@ class GNNFeatureTransform(nn.Cell):
|
|||
return output
|
||||
|
||||
def extend_repr(self):
|
||||
str_info = 'in_channels={}, out_channels={}, weight={}, has_bias={}' \
|
||||
.format(self.in_channels, self.out_channels, self.weight, self.has_bias)
|
||||
s = 'in_channels={}, out_channels={}'.format(self.in_channels, self.out_channels)
|
||||
if self.has_bias:
|
||||
str_info = str_info + ', bias={}'.format(self.bias)
|
||||
|
||||
return str_info
|
||||
s += ', has_bias={}'.format(self.has_bias)
|
||||
return s
|
||||
|
||||
|
||||
class _BaseAggregator(nn.Cell):
|
||||
|
|
|
@ -270,12 +270,9 @@ class Dense_Thor(Cell):
|
|||
|
||||
def extend_repr(self):
|
||||
"""extend_repr"""
|
||||
str_info = 'in_channels={}, out_channels={}, weight={}, has_bias={}' \
|
||||
.format(self.in_channels, self.out_channels, self.weight, self.has_bias)
|
||||
s = 'in_channels={}, out_channels={}'.format(self.in_channels, self.out_channels)
|
||||
if self.has_bias:
|
||||
str_info = str_info + ', bias={}'.format(self.bias)
|
||||
|
||||
s += ', bias={}'.format(self.bias)
|
||||
if self.activation_flag:
|
||||
str_info = str_info + ', activation={}'.format(self.activation)
|
||||
|
||||
return str_info
|
||||
s += ', activation={}'.format(self.activation)
|
||||
return s
|
||||
|
|
|
@ -104,12 +104,10 @@ class GNNFeatureTransform(nn.Cell):
|
|||
return output
|
||||
|
||||
def extend_repr(self):
|
||||
str_info = 'in_channels={}, out_channels={}, weight={}, has_bias={}' \
|
||||
.format(self.in_channels, self.out_channels, self.weight, self.has_bias)
|
||||
s = 'in_channels={}, out_channels={}'.format(self.in_channels, self.out_channels)
|
||||
if self.has_bias:
|
||||
str_info = str_info + ', bias={}'.format(self.bias)
|
||||
|
||||
return str_info
|
||||
s += ', has_bias={}'.format(self.has_bias)
|
||||
return s
|
||||
|
||||
|
||||
class _BaseAggregator(nn.Cell):
|
||||
|
|
|
@ -470,12 +470,10 @@ class Dense_Thor(Cell):
|
|||
|
||||
def extend_repr(self):
|
||||
"""extend_repr"""
|
||||
str_info = 'in_channels={}, out_channels={}, weight={}, has_bias={}' \
|
||||
.format(self.in_channels, self.out_channels, self.weight, self.has_bias)
|
||||
s = 'in_channels={}, out_channels={}'.format(self.in_channels, self.out_channels)
|
||||
if self.has_bias:
|
||||
str_info = str_info + ', bias={}'.format(self.bias)
|
||||
|
||||
s += ', has_bias={}'.format(self.has_bias)
|
||||
if self.activation_flag:
|
||||
str_info = str_info + ', activation={}'.format(self.activation)
|
||||
s += ', activation={}'.format(self.activation)
|
||||
|
||||
return str_info
|
||||
|
|
Loading…
Reference in New Issue