7
7
from mindnlp import core
8
8
from mindnlp .core .executor import execute
9
9
10
- from ..configs import DEVICE_TARGET , ON_ORANGE_PI , use_pyboost , ON_A1 , ON_A2
10
+ from ..configs import ON_ORANGE_PI , use_pyboost , ON_A1 , ON_A2
11
11
12
12
generator_step_ = 12
13
13
@@ -74,7 +74,7 @@ def hardsigmoid(input, inplace=False):
74
74
return ops .hardsigmoid (input )
75
75
76
76
def hardswish (input : core .Tensor , inplace : bool = False ) -> core .Tensor :
77
- return ops . hardswish ( input )
77
+ return execute ( 'hswish' , input )
78
78
79
79
def hardshrink (input , lambd = 0.5 ):
80
80
return execute ('hard_shrink' , input , lambd )
@@ -129,7 +129,7 @@ def adaptive_avg_pool2d(input, output_size):
129
129
return execute ('adaptive_avg_pool2d_ext' , input , output_size )
130
130
131
131
def dropout (input , p = 0.5 , training = True , inplace = False ):
132
- if not training :
132
+ if not training or p == 0 :
133
133
return input
134
134
out , _ = execute ('dropout_ext' , input , p )
135
135
if inplace :
@@ -138,7 +138,10 @@ def dropout(input, p=0.5, training=True, inplace=False):
138
138
return out
139
139
140
140
def dropout2d (input , p = 0.5 , training = False ):
141
- return ops .dropout2d (input , p , training )
141
+ if not training or p == 0 :
142
+ return input
143
+ out , _ = execute ('dropout2d' , input , p )
144
+ return out
142
145
143
146
def drop_and_mask (keep_prob , seed = None ):
144
147
seed0 , seed1 = _get_seed (seed , "dropout" )
@@ -301,6 +304,9 @@ def pad(input, pad, mode='constant', value=None):
301
304
return execute ('pad_v3' , input , new_pad , mode )
302
305
if value is None :
303
306
value = 0
307
+ if mode == "replicate" :
308
+ mode = "edge"
309
+ return execute ('pad_v3' , input , new_pad , mode )
304
310
return execute ('pad_v3' , input , new_pad , mode , value )
305
311
out = input
306
312
if (isinstance (pad , tuple ) and not pad ):
@@ -1541,8 +1547,8 @@ def _canonical_mask(
1541
1547
) -> Optional [core .Tensor ]:
1542
1548
if mask is not None :
1543
1549
_mask_dtype = mask .dtype
1544
- _mask_is_float = ops .is_floating_point (mask )
1545
- if _mask_dtype != mindspore . bool_ and not _mask_is_float :
1550
+ _mask_is_float = core .is_floating_point (mask )
1551
+ if _mask_dtype != core . bool and not _mask_is_float :
1546
1552
raise AssertionError (
1547
1553
f"only bool and floating types of { mask_name } are supported" )
1548
1554
if check_other and other_type is not None :
@@ -1552,8 +1558,8 @@ def _canonical_mask(
1552
1558
"is deprecated. Use same type for both instead."
1553
1559
)
1554
1560
if not _mask_is_float :
1555
- zero_tensor = ops .zeros_like (mask , dtype = target_type )
1556
- mask = ops .where (mask , core .Tensor (float ("-inf" ), target_type ), zero_tensor )
1561
+ zero_tensor = core .zeros_like (mask , dtype = target_type , device = mask . device )
1562
+ mask = core .where (mask , core .tensor (float ("-inf" ), dtype = target_type , device = mask . device ), zero_tensor )
1557
1563
# mask = (
1558
1564
# ops.zeros_like(mask, dtype=target_type)
1559
1565
# .masked_fill_(mask, float("-inf"))
@@ -1571,14 +1577,9 @@ def unfold(input, kernel_size, dilation=1, padding=0, stride=1):
1571
1577
if ON_A1 :
1572
1578
return execute ('im2col' , input , kernel_size , dilation , padding , stride )
1573
1579
return execute ('im2col_ext' , input , kernel_size , dilation , padding , stride )
1574
- if use_pyboost () and not ON_A1 :
1575
- return mint .nn .functional .unfold (input , kernel_size , dilation , padding , stride )
1576
- return ops .unfold (input , kernel_size , dilation , padding , stride )
1577
1580
1578
1581
def fold (input , output_size , kernel_size , dilation = 1 , padding = 0 , stride = 1 ):
1579
- if use_pyboost ():
1580
- return mint .nn .functional .fold (input , output_size , kernel_size , dilation , padding , stride )
1581
- return ops .fold (input , output_size , kernel_size , dilation , padding , stride )
1582
+ return execute ('col2im_ext' , input , output_size , kernel_size , dilation , padding , stride )
1582
1583
1583
1584
def ctc_loss (log_probs , targets , input_lengths , target_lengths , blank = 0 , reduction = 'mean' , zero_infinity = False ):
1584
1585
return execute ('ctc_loss' , log_probs , targets , input_lengths , target_lengths , blank , reduction , zero_infinity )
0 commit comments