@@ -63,7 +63,6 @@ def get_config(self):
63
63
base_config = super (Masking , self ).get_config ()
64
64
return dict (list (base_config .items ()) + list (config .items ()))
65
65
66
-
67
66
class Dropout (Layer ):
68
67
'''Applies Dropout to the input. Dropout consists in randomly setting
69
68
a fraction `p` of input units to 0 at each update during training time,
@@ -1510,3 +1509,25 @@ def get_config(self):
1510
1509
config = {'indices' : self .indices }
1511
1510
base_config = super (SetSubtensor , self ).get_config ()
1512
1511
return dict (list (base_config .items ()) + list (config .items ()))
1512
+
1513
+
1514
+ class RemoveMask (Layer ):
1515
+ def __init__ (self , ** kwargs ):
1516
+ super (RemoveMask , self ).__init__ (** kwargs )
1517
+
1518
+ def compute_mask (self , input , input_mask = None ):
1519
+ return None
1520
+
1521
+ """
1522
+ class LambdaRemoveMask(Lambda):
1523
+ def __init__(self, lambda_fn):
1524
+ super(LambdaRemoveMask, self).__init__((lambda_fn))
1525
+ #self.supports_masking = True
1526
+
1527
+ def compute_mask(self, input, input_mask=None):
1528
+ return None
1529
+
1530
+ #def get_config(self):
1531
+ #base_config = super(LambdaRemoveMask, self).get_config()
1532
+ #return dict(list(base_config.items()))
1533
+ """
0 commit comments