-
Notifications
You must be signed in to change notification settings - Fork 17
/
Copy pathdnnclassifier.py
65 lines (59 loc) · 2.47 KB
/
dnnclassifier.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
import tensorflow as tf
class DNNClassifier(tf.keras.Model):
def __init__(self, feature_columns=None, hidden_units=[100,100], n_classes=3):
"""DNNClassifier
:param feature_columns: feature columns.
:type feature_columns: list[tf.feature_column].
:param hidden_units: number of hidden units.
:type hidden_units: list[int].
:param n_classes: List of hidden units per layer.
:type n_classes: int.
"""
global _loss
super(DNNClassifier, self).__init__()
self.feature_layer = None
self.n_classes = n_classes
if feature_columns is not None:
# combines all the data as a dense tensor
self.feature_layer = tf.keras.layers.DenseFeatures(feature_columns)
self.hidden_layers = []
for hidden_unit in hidden_units:
self.hidden_layers.append(tf.keras.layers.Dense(hidden_unit, activation='relu'))
if self.n_classes == 2:
# special setup for binary classification
pred_act = 'sigmoid'
_loss = 'binary_crossentropy'
n_out = 1
else:
pred_act = 'softmax'
_loss = 'categorical_crossentropy'
n_out = self.n_classes
self.prediction_layer = tf.keras.layers.Dense(n_out, activation=pred_act)
def call(self, inputs, training=True):
if self.feature_layer is not None:
x = self.feature_layer(inputs)
else:
x = tf.keras.layers.Flatten()(inputs)
for hidden_layer in self.hidden_layers:
x = hidden_layer(x)
return self.prediction_layer(x)
def optimizer(learning_rate=0.001):
"""Default optimizer name. Used in model.compile."""
return tf.keras.optimizers.Adagrad(lr=learning_rate)
def loss(labels, output):
"""Default loss function. Used in model.compile."""
global _loss
if _loss == "binary_crossentropy":
return tf.reduce_mean(tf.keras.losses.binary_crossentropy(labels, output))
elif _loss == "categorical_crossentropy":
return tf.reduce_mean(tf.keras.losses.sparse_categorical_crossentropy(labels, output))
def prepare_prediction_column(prediction):
"""Return the class label of highest probability."""
return prediction.argmax(axis=-1)
def eval_metrics_fn():
return {
"accuracy": lambda labels, predictions: tf.equal(
tf.argmax(predictions, 1, output_type=tf.int32),
tf.cast(tf.reshape(labels, [-1]), tf.int32),
)
}