import tensorflow as tf
import numpy as np
from utility.data_utils import pad_sequences
from keras.layers import Masking,Dense
d=np.array([[[111,112,113],
[121,122,123],
[131,132,133],
[141,142,143]],
[[211,212,113],
[221,222,223.]]]); #(B,-1,D)=(2,-1,3)
print(d)
"""
[list([[111, 112, 113], [121, 122, 123], [131, 132, 133], [141, 142, 143]])
list([[211, 212, 113], [221, 222, 223.0]])]
"""
#(B,-1,D),M=>(B,M,D)=(2,3,3)
d,s= pad_sequences(sequences=d,maxlen=3,dtype=np.float32,padding='post',value=0.0)
print(d)
"""
[[[111. 112. 113.]
[121. 122. 123.]
[131. 132. 133.]]
[[211. 212. 113.]
[221. 222. 223.]
[ 0. 0. 0.]]]
"""
d=tf.constant(d)
#timestep에 있는 모든 값이 mask_value와 같으면 그 timestep은 모든 하위 layer에서 mask(skip)된다.
#하위레이어들은 masking을 지원해야 한다.
d=Masking(mask_value=0., input_shape=(3,3))(d)
e=Dense(2)(d) #(B,M,D)=(B,M,2)
"""
[[[ -97.99757 -80.39526 ]
[-106.70024 -87.56952 ]
[-115.40292 -94.74378 ]]
[[ -97.336426 -81.49376 ]
[-193.72702 -159.31212 ]
[ 0. 0. ]]]
"""
with tf.Session() as sess:
sess.run(tf.global_variables_initializer())
_e=sess.run(e)
print(_e)