Path: blob/master/22_word_embedding/supervised_word_embeddings.ipynb
1141 views
Kernel: Python 3
In [19]:
In [20]:
Out[20]:
[4, 23]
In [21]:
Out[21]:
[[13, 21], [4, 23], [14, 17], [8, 15, 16], [22, 15, 29], [8, 21], [26, 15, 24], [16, 4], [16, 12], [4, 29]]
In [22]:
Out[22]:
[[13 21 0 0]
[ 4 23 0 0]
[14 17 0 0]
[ 8 15 16 0]
[22 15 29 0]
[ 8 21 0 0]
[26 15 24 0]
[16 4 0 0]
[16 12 0 0]
[ 4 29 0 0]]
In [23]:
In [24]:
In [25]:
Out[25]:
Model: "sequential_1"
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
embedding (Embedding) (None, 4, 5) 150
_________________________________________________________________
flatten_1 (Flatten) (None, 20) 0
_________________________________________________________________
dense_1 (Dense) (None, 1) 21
=================================================================
Total params: 171
Trainable params: 171
Non-trainable params: 0
_________________________________________________________________
None
In [26]:
Out[26]:
<tensorflow.python.keras.callbacks.History at 0x1bb8daa5a30>
In [29]:
Out[29]:
1/1 [==============================] - 0s 1ms/step - loss: 0.6384 - accuracy: 1.0000
1.0
In [30]:
Out[30]:
30
In [31]:
Out[31]:
array([-0.08330977, -0.06752131, -0.04629624, -0.00765801, -0.02024159],
dtype=float32)
In [32]:
Out[32]:
array([-0.07935128, -0.08574004, 0.06615968, -0.02349528, 0.00917289],
dtype=float32)
In [33]:
Out[33]:
array([ 0.0128377 , 0.03549778, 0.05134471, -0.07147218, 0.03261041],
dtype=float32)