File tree 10 files changed +26
-46
lines changed
src/main/kotlin/com/kotlinnlp/simplednn
core/layers/models/merge/mergeconfig
deeplearning/attention/pointernetwork
10 files changed +26
-46
lines changed Original file line number Diff line number Diff line change @@ -11,19 +11,16 @@ import com.kotlinnlp.simplednn.core.functionalities.activations.ActivationFuncti
11
11
import com.kotlinnlp.simplednn.core.layers.LayerType
12
12
13
13
/* *
14
- * A data class that defines the configuration of an Affine layer .
14
+ * The Affine merge layer configuration.
15
15
*
16
16
* @property outputSize the size of the merged output
17
17
* @property activationFunction the output activation function
18
- * @property dropout the probability of dropout
19
18
*/
20
19
class AffineMerge (
21
20
outputSize : Int ,
22
- activationFunction : ActivationFunction ? = null ,
23
- dropout : Double = 0.0
24
- ) : OpenOutputMerge(
21
+ activationFunction : ActivationFunction ? = null
22
+ ) : VariableOutputMergeConfig(
25
23
type = LayerType .Connection .Affine ,
26
- dropout = dropout,
27
24
outputSize = outputSize,
28
25
activationFunction = activationFunction
29
26
)
Original file line number Diff line number Diff line change @@ -10,8 +10,6 @@ package com.kotlinnlp.simplednn.core.layers.models.merge.mergeconfig
10
10
import com.kotlinnlp.simplednn.core.layers.LayerType
11
11
12
12
/* *
13
- * A data class that defines the configuration of an Avg layer.
14
- *
15
- * @property dropout the probability of dropout
13
+ * The Avg merge layer configuration.
16
14
*/
17
- class AvgMerge ( dropout : Double = 0.0 ) : MergeConfiguration(type = LayerType .Connection .Avg , dropout = dropout )
15
+ class AvgMerge : MergeConfiguration (type = LayerType .Connection .Avg )
Original file line number Diff line number Diff line change @@ -11,19 +11,16 @@ import com.kotlinnlp.simplednn.core.functionalities.activations.ActivationFuncti
11
11
import com.kotlinnlp.simplednn.core.layers.LayerType
12
12
13
13
/* *
14
- * A data class that defines the configuration of a Biaffine layer .
14
+ * The Biaffine merge layer configuration.
15
15
*
16
16
* @property outputSize the size of the merged output
17
17
* @property activationFunction the output activation function
18
- * @property dropout the probability of dropout
19
18
*/
20
19
class BiaffineMerge (
21
20
outputSize : Int ,
22
- activationFunction : ActivationFunction ? = null ,
23
- dropout : Double = 0.0
24
- ) : OpenOutputMerge(
21
+ activationFunction : ActivationFunction ? = null
22
+ ) : VariableOutputMergeConfig(
25
23
type = LayerType .Connection .Biaffine ,
26
- dropout = dropout,
27
24
outputSize = outputSize,
28
25
activationFunction = activationFunction
29
26
)
Original file line number Diff line number Diff line change @@ -11,19 +11,16 @@ import com.kotlinnlp.simplednn.core.functionalities.activations.ActivationFuncti
11
11
import com.kotlinnlp.simplednn.core.layers.LayerType
12
12
13
13
/* *
14
- * A data class that defines the configuration of a Concat layer followed by a feed-forward layer .
14
+ * The ConcatFeedforward merge layer configuration .
15
15
*
16
16
* @property outputSize the size of the merged output
17
17
* @property activationFunction the output activation function
18
- * @property dropout the probability of dropout
19
18
*/
20
19
class ConcatFeedforwardMerge (
21
20
outputSize : Int ,
22
- activationFunction : ActivationFunction ? = null ,
23
- dropout : Double = 0.0
24
- ) : OpenOutputMerge(
21
+ activationFunction : ActivationFunction ? = null
22
+ ) : VariableOutputMergeConfig(
25
23
type = LayerType .Connection .ConcatFeedforward ,
26
- dropout = dropout,
27
24
outputSize = outputSize,
28
25
activationFunction = activationFunction
29
26
)
Original file line number Diff line number Diff line change @@ -10,8 +10,6 @@ package com.kotlinnlp.simplednn.core.layers.models.merge.mergeconfig
10
10
import com.kotlinnlp.simplednn.core.layers.LayerType
11
11
12
12
/* *
13
- * A data class that defines the configuration of a Concat layer.
14
- *
15
- * @property dropout the probability of dropout
13
+ * The Concat merge layer configuration.
16
14
*/
17
- class ConcatMerge ( dropout : Double = 0.0 ) : MergeConfiguration(type = LayerType .Connection .Concat , dropout = dropout )
15
+ class ConcatMerge : MergeConfiguration (type = LayerType .Connection .Concat )
Original file line number Diff line number Diff line change @@ -10,15 +10,14 @@ package com.kotlinnlp.simplednn.core.layers.models.merge.mergeconfig
10
10
import com.kotlinnlp.simplednn.core.layers.LayerType
11
11
12
12
/* *
13
- * A class that defines the configuration of a Merge layer.
13
+ * The configuration of a merge layer.
14
14
*
15
- * @property type the connection type of the output Merge layer
16
- * @property dropout the probability of dropout
15
+ * @property type the connection type
17
16
*/
18
- abstract class MergeConfiguration (val type : LayerType .Connection , val dropout : Double ) {
17
+ abstract class MergeConfiguration (val type : LayerType .Connection ) {
19
18
20
19
/* *
21
- * Check connection type.
20
+ * Check the connection type.
22
21
*/
23
22
init {
24
23
require(this .type.property == LayerType .Property .Merge )
Original file line number Diff line number Diff line change @@ -10,8 +10,6 @@ package com.kotlinnlp.simplednn.core.layers.models.merge.mergeconfig
10
10
import com.kotlinnlp.simplednn.core.layers.LayerType
11
11
12
12
/* *
13
- * A data class that defines the configuration of a Product layer.
14
- *
15
- * @property dropout the probability of dropout
13
+ * The Product merge layer configuration.
16
14
*/
17
- class ProductMerge ( dropout : Double = 0.0 ) : MergeConfiguration(type = LayerType .Connection .Product , dropout = dropout )
15
+ class ProductMerge : MergeConfiguration (type = LayerType .Connection .Product )
Original file line number Diff line number Diff line change @@ -10,8 +10,6 @@ package com.kotlinnlp.simplednn.core.layers.models.merge.mergeconfig
10
10
import com.kotlinnlp.simplednn.core.layers.LayerType
11
11
12
12
/* *
13
- * A data class that defines the configuration of a Sum layer.
14
- *
15
- * @property dropout the probability of dropout
13
+ * The Sum merge layer configuration.
16
14
*/
17
- class SumMerge ( dropout : Double = 0.0 ) : MergeConfiguration(type = LayerType .Connection .Sum , dropout = dropout )
15
+ class SumMerge : MergeConfiguration (type = LayerType .Connection .Sum )
Original file line number Diff line number Diff line change @@ -11,16 +11,14 @@ import com.kotlinnlp.simplednn.core.functionalities.activations.ActivationFuncti
11
11
import com.kotlinnlp.simplednn.core.layers.LayerType
12
12
13
13
/* *
14
- * A class that defines the configuration of the Merge layer has a configurable output size and activation.
14
+ * The configuration of a merge layer with a variable output size and and an optional activation function .
15
15
*
16
- * @property type the connection type of the output Merge layer
17
- * @property dropout the probability of dropout
16
+ * @property type the connection type
18
17
* @property outputSize the size of the merged output
19
18
* @property activationFunction the output activation function
20
19
*/
21
- abstract class OpenOutputMerge (
20
+ abstract class VariableOutputMergeConfig (
22
21
type : LayerType .Connection ,
23
- dropout : Double ,
24
22
val outputSize : Int ,
25
23
val activationFunction : ActivationFunction ?
26
- ) : MergeConfiguration(type = type, dropout = dropout )
24
+ ) : MergeConfiguration(type)
Original file line number Diff line number Diff line change @@ -69,7 +69,7 @@ class PointerNetworkModel(
69
69
sizes = listOf (this .inputSize, this .vectorSize)),
70
70
LayerInterface (
71
71
size = this .mergeOutputSize,
72
- activationFunction = (mergeConfig as ? OpenOutputMerge )?.activationFunction,
72
+ activationFunction = (mergeConfig as ? VariableOutputMergeConfig )?.activationFunction,
73
73
connectionType = mergeConfig.type),
74
74
weightsInitializer = weightsInitializer,
75
75
biasesInitializer = biasesInitializer)
You can’t perform that action at this time.
0 commit comments