Skip to content

Commit 4db5e03

Browse files
committed
#1. Bugs fixed
#2. Code refactoring #3. Performance improvement
1 parent df4406b commit 4db5e03

21 files changed

+307
-373
lines changed

README.md

+7-7
Original file line numberDiff line numberDiff line change
@@ -61,7 +61,7 @@ major | JJ | S
6161
financial | JJ | S
6262
centers | NNS | S
6363
. | PUN | S
64-
| |
64+
---empty line---
6565
! | PUN | S
6666
p | FW | S
6767
' | PUN | S
@@ -162,21 +162,21 @@ PRETRAIN_TYPE = Embedding
162162
WORDEMBEDDING_FILENAME = Data\WordEmbedding\wordvec_chs.bin
163163
\#The context range of word embedding. In below example, the context is current token, previous token and next token
164164
\#If more than one token are combined, this feature would use a plenty of memory.
165-
WORDEMBEDDING_CONTEXT: -1,0,1
165+
WORDEMBEDDING_CONTEXT = -1,0,1
166166
\#The column index applied word embedding feature
167167
WORDEMBEDDING_COLUMN = 0
168168

169169
\#The following setting is for pretrained model in 'Autoencoder' type.
170170
\#The feature configuration file for pretrained model.
171-
AUTOENCODER_CONFIG: D:\RNNSharpDemoPackage\config_autoencoder.txt
171+
AUTOENCODER_CONFIG = D:\RNNSharpDemoPackage\config_autoencoder.txt
172172

173173
\#The following setting is the configuration file for source sequence encoder which is only for sequence-to-sequence task that MODEL_TYPE equals to SEQ2SEQ.
174174
\#In this example, since MODEL_TYPE is SEQLABEL, so we comment it out.
175-
\#SEQ2SEQ_AUTOENCODER_CONFIG: D:\RNNSharpDemoPackage\config_seq2seq_autoencoder.txt
175+
\#SEQ2SEQ_AUTOENCODER_CONFIG = D:\RNNSharpDemoPackage\config_seq2seq_autoencoder.txt
176176

177177
\#The context range of run time feature. In below example, RNNSharp will use the output of previous token as run time feature for current token
178178
\#Note that, bi-directional model does not support run time feature, so we comment it out.
179-
\#RTFEATURE_CONTEXT: -1
179+
\#RTFEATURE_CONTEXT = -1
180180

181181
## Training file format
182182

@@ -199,7 +199,7 @@ major | JJ | S
199199
financial | JJ | S
200200
centers | NNS | S
201201
. | PUN | S
202-
| |
202+
---empty line---
203203
! | PUN | S
204204
p | FW | S
205205
' | PUN | S
@@ -233,7 +233,7 @@ is |
233233
your |
234234
name |
235235
? |
236-
|
236+
---empty line---
237237
I |
238238
am |
239239
Zhongkai |

RNNSharp/Config.cs

+9-1
Original file line numberDiff line numberDiff line change
@@ -200,6 +200,10 @@ public void LoadFeatureConfigFromFile(string configFilePath)
200200
{
201201
NetworkType = NETWORKTYPE.BiDirectionalAverage;
202202
}
203+
else if (networkType.Equals(NETWORKTYPE.ForwardSeq2SeqLabeling.ToString(), StringComparison.InvariantCultureIgnoreCase))
204+
{
205+
NetworkType = NETWORKTYPE.ForwardSeq2SeqLabeling;
206+
}
203207
else
204208
{
205209
throw new ArgumentException($"Invalidated network type: {networkType}");
@@ -212,7 +216,7 @@ public void LoadFeatureConfigFromFile(string configFilePath)
212216
SetTFeatures();
213217

214218
//Load auto-encoder model for sequence-to-sequence. This model is used to encode source sequence
215-
if (NetworkType == NETWORKTYPE.ForwardSeq2Seq)
219+
if (NetworkType == NETWORKTYPE.ForwardSeq2Seq || NetworkType == NETWORKTYPE.ForwardSeq2SeqLabeling)
216220
{
217221
var seqAutoEncoderConfigFilePath = GetFilePath(currentDirectory,
218222
config.GetValueRequired(SEQ2SEQ_AUTOENCODER_CONFIG));
@@ -360,6 +364,10 @@ var type in
360364
OutputLayerConfig = new SimpleLayerConfig();
361365
Logger.WriteLine("Initialize configuration for simple layer.");
362366
break;
367+
368+
default:
369+
Logger.WriteLine($"Invalidated output layer type {sLayerType}");
370+
throw new ArgumentException($"Invalidated output layer type {sLayerType}");
363371
}
364372
}
365373

RNNSharp/Enums.cs

+1
Original file line numberDiff line numberDiff line change
@@ -27,6 +27,7 @@ public enum NETWORKTYPE
2727
{
2828
Forward = 0,
2929
ForwardSeq2Seq,
30+
ForwardSeq2SeqLabeling,
3031
BiDirectional,
3132
BiDirectionalAverage
3233
}

RNNSharp/Layers/DropoutLayer.cs

+25-15
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
using AdvUtils;
22
using System;
33
using System.IO;
4+
using System.Numerics;
45

56
namespace RNNSharp
67
{
@@ -42,12 +43,35 @@ public override Neuron CopyNeuronTo(Neuron neuron)
4243
return dropoutNeuron;
4344
}
4445

46+
public override void PreUpdateWeights(Neuron neuron, float[] errs)
47+
{
48+
DropoutNeuron dropoutNeuron = neuron as DropoutNeuron;
49+
dropoutNeuron.Cells.CopyTo(Cells, 0);
50+
for (int i = 0; i < LayerSize; i++)
51+
{
52+
if (dropoutNeuron.mask[i])
53+
{
54+
Errs[i] = 0;
55+
}
56+
else
57+
{
58+
Errs[i] = errs[i];
59+
}
60+
}
61+
}
62+
4563
public override void InitializeWeights(int sparseFeatureSize, int denseFeatureSize)
4664
{
65+
SparseFeatureSize = sparseFeatureSize;
66+
DenseFeatureSize = denseFeatureSize;
67+
if (DenseFeatureSize % Vector<float>.Count != 0)
68+
{
69+
DenseFeatureSize += (Vector<float>.Count - (DenseFeatureSize % Vector<float>.Count));
70+
}
71+
4772
if (denseFeatureSize > 0)
4873
{
4974
Logger.WriteLine("Initializing dense feature matrix. layer size = {0}, feature size = {1}", LayerSize, denseFeatureSize);
50-
DenseFeatureSize = denseFeatureSize;
5175
DenseWeights = new Matrix<float>(LayerSize, denseFeatureSize);
5276
for (var i = 0; i < DenseWeights.Height; i++)
5377
{
@@ -61,7 +85,6 @@ public override void InitializeWeights(int sparseFeatureSize, int denseFeatureSi
6185
if (sparseFeatureSize > 0)
6286
{
6387
Logger.WriteLine("Initializing sparse feature matrix. layer size = {0}, feature size = {1}", LayerSize, sparseFeatureSize);
64-
SparseFeatureSize = sparseFeatureSize;
6588
SparseWeights = new Matrix<float>(LayerSize, SparseFeatureSize);
6689
for (var i = 0; i < SparseWeights.Height; i++)
6790
{
@@ -111,19 +134,6 @@ public override void BackwardPass()
111134
{
112135
}
113136

114-
public override void ComputeLayerErr(SimpleLayer nextLayer, float[] destErrLayer, float[] srcErrLayer, Neuron neuron)
115-
{
116-
DropoutNeuron dropoutNeuron = neuron as DropoutNeuron;
117-
base.ComputeLayerErr(nextLayer, destErrLayer, srcErrLayer, dropoutNeuron);
118-
for (var i = 0; i < LayerSize; i++)
119-
{
120-
if (dropoutNeuron.mask[i])
121-
{
122-
destErrLayer[i] = 0;
123-
}
124-
}
125-
}
126-
127137
public override void ComputeLayerErr(SimpleLayer nextLayer)
128138
{
129139
base.ComputeLayerErr(nextLayer);

0 commit comments

Comments
 (0)