# 计算 logits 的序列的交叉熵(Cross-Entropy)的损失(loss) loss = tf.contrib.seq2seq.sequence_loss( logits, # 形状默认为 [20, 35, 10000] self.input_obj.targets, # 期望输出,形状默认为 [20, 35] tf.ones([self.batch_size, self.num_steps], dtype=tf.float32), average_across_timesteps=False, average_across_batch=True)
16.9k 10
1.8k 9
1.8k 8
1.9k 7
1.5k 7