For the purpose of inference we build a generator function build_generator replicating the logic of the build_model to define all the model variables and the required TensorFlow ops to load the model and run inference on the same:
def build_generator(self):
with tf.device('/cpu:0'):
self.word_emb =
tf.Variable(tf.random_uniform([self.n_words, self.dim_hidden],
-0.1, 0.1), name='word_emb')
self.lstm1 =
tf.nn.rnn_cell.BasicLSTMCell(self.dim_hidden, state_is_tuple=False)
self.lstm2 =
tf.nn.rnn_cell.BasicLSTMCell(self.dim_hidden, state_is_tuple=False)
self.encode_W =
tf.Variable(tf.random_uniform([self.dim_image,self.dim_hidden],
-0.1, 0.1), name='encode_W')
self.encode_b =
tf.Variable...