5-RNN-01_字符集RNN




"""
单词字符级别预测RNN
"""
import time
from collections import namedtuple
import numpy as np
import tensorflow as tf
import os



with open('../datas/anna.txt', 'r') as f:
    text = f.read()
vocab = sorted(set(text))    # 取文本中唯一的字符,共83个。

vocab_to_int = {c: i for i, c in enumerate(vocab)}
int_to_vocab = dict(enumerate(vocab))
# todo-使用字典表vocab_to_int 将文本的字符转换为 相应的数字。
encoded = np.array([vocab_to_int[c] for c in text], dtype=np.int32)

print('所有的字符是:{}'.format(vocab))
print(text[:100])
print(len(text))
print(encoded[:100])
print(len(vocab))

# todo 制作训练 batches函数。每次调用该函数可以返回一个batch,多用generator
def get_batches(arr, batch_size, n_steps):
    """

    :param arr:  原始文本(已经转换为 tokenize)
    :param batch_size:  批量
    :param n_steps:   时间步
    :return:
    """
    # 1、每个批量字符的数量
    chars_per_batch = batch_size*n_steps
    n_batches = len(arr) // chars_per_batch

    arr = arr[:n_batches*chars_per_batch]
    # 2、重塑 [N, -1]
    arr = np.reshape(arr, newshape=[batch_size, -1])

    # 3、构造批量数据
    for i in range(0, arr.shape[1], n_steps):
        # 构造输入x
        x = arr[:, i:i+n_steps]
        # 目标y 是平移了一位
        y_temp = arr[:, i+1: i+n_steps+1]
        y = np.zeros(shape=x.shape, dtype=x.dtype)
        y[:, :y_temp.shape[1]] = y_temp
        yield x, y


def test_batches_func():
    """
    验证上面的函数的
    :return:
    """
    batches = get_batches(encoded, 8, 10)
    x, y = next(batches)
    print(x)
    print('\n', '**' *40)
    print(y)

def build_inputs(batch_size, n_steps):
    """
    创建模型输入,占位符
    :param batch_size:
    :param n_steps:
    :return:
    """
    inputs = tf.placeholder(tf.int32, shape=[batch_size, n_steps], name='x')
    targets = tf.placeholder(tf.int32, shape=[batch_size, n_steps], name='y')
    keep_prob = tf.placeholder(tf.float32, shape=None, name='keep_prob')
    return inputs, targets, keep_prob


def build_lstm(lstm_size, num_layers, batch_size, keep_prob):
    """
    创建rnn 隐藏层细胞核(cell)
    :param lstm_size:  隐藏层节点数量(神经元个数)
    :param num_layers:  隐藏层层数
    :param batch_size:  批量
    :param keep_prob:   dropout
    :return:
    """
    def build_cell(lstm_size, keep_prob):
        # 1、实例化一个 lstm cell
        cell = tf.nn.rnn_cell.BasicLSTMCell(num_units=lstm_size)
        # 2、添加dropput
        drop = tf.nn.rnn_cell.DropoutWrapper(cell, output_keep_prob=keep_prob)
        return drop
    # 堆栈多层rnn隐藏层
    # cell1 = build_cell(lstm_size, keep_prob)
    # cell2 = build_cell(lstm_size, keep_prob)
    # multi_cell = tf.nn.rnn_cell.MultiRNNCell([cell1, cell2])

    multi_cell = tf.nn.rnn_cell.MultiRNNCell(
        [build_cell(lstm_size, keep_prob) for _ in range(num_layers)]
    )
    # 初始化状态值。
    initial_state = multi_cell.zero_state(batch_size, dtype=tf.float32)
    return multi_cell, initial_state

def build_output(rnn_output, lstm_size, output_size):
    """
    将隐藏层的输出 做全连接,得到logits。
    :param rnn_output:  隐藏层的输出 3-D tensor [N, n_steps, lstm_size]
    :param lstm_size:   隐藏层节点数量
    :param output_size:  logits的维度(num_classes == vocab_size)
    :return:
    """
    # 对隐藏层的输出 进行重塑[N, n_steps, lstm_size] ---> [N*n_steps, lstm_size]
    x = tf.reshape(rnn_output, shape=[-1, lstm_size])

    # 构建全连接的变量
    with tf.variable_scope('logits'):
        softmax_w = tf.get_variable(
            'w', shape=[lstm_size, output_size], dtype=tf.float32,
            initializer=tf.truncated_normal_initializer(stddev=0.1)
        )
        softmax_b = tf.get_variable(
            'b', shape=[output_size], dtype=tf.float32,
            initializer=tf.zeros_initializer()
        )
    logits = tf.nn.xw_plus_b(x, softmax_w, softmax_b)

    # 使用softmax激活得到预测值
    predictions = tf.nn.softmax(logits)
    return predictions, logits

def build_loss(logits, labels, num_classes):
    """
    创建模型损失
    :param logits:   shape 2-D [N*n_steps, num_classes]
    :param labels:
    :param num_classes:  类别数量,即 vocab_size
    :return:
    """
    # 1、对标签做one-hot
    y_one_hot = tf.one_hot(indices=labels, depth=num_classes)
    # [N, n_steps, num_classes]

    # 2、reshape
    y_reshaped = tf.reshape(y_one_hot, shape=logits.get_shape())

    # 3、求损失
    loss = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits_v2(
        logits=logits, labels=y_reshaped
    ))
    return loss

def build_optimizer(loss, learning_rate, grads_clip):
    """
    构建优化器
    :param loss:
    :param learning_rate:
    :param grads_clip:  梯度裁剪的阈值
    :return:
    """
    # 获取所有变量
    vars_list = tf.trainable_variables()

    grads_clipped, _ = tf.clip_by_global_norm(tf.gradients(loss, vars_list), grads_clip)
    optimizer = tf.train.AdamOptimizer(learning_rate=learning_rate)
    train_opt = optimizer.apply_gradients(zip(grads_clipped, vars_list))
    return train_opt


class CharRNN:
    """
    构建模型图
    """
    def __init__(self, num_classes, batch_size=64, n_steps=50, lstm_size=128, num_layers=2,
                 lr=1e-3, grads_clip=5, sampling=False):
        # 后面用于预测的。
        if sampling == True:
            batch_size, n_steps = 1, 1
        else:
            batch_size, n_steps = batch_size, n_steps

        tf.reset_default_graph()  # 移除之前所有默认图
        # 1、占位符
        self.inputs, self.targets, self.keep_prob = build_inputs(batch_size, n_steps)
        # 2、创建细胞核 cell
        multi_cell, self.initial_state = build_lstm(
            lstm_size, num_layers, batch_size, self.keep_prob)

        # 3、调用  动态rnn运行模型
        # 将输入 ---> one-hot
        x_one_hot = tf.one_hot(self.inputs, num_classes)

        # 动态rnn
        rnn_outputs, self.final_state = tf.nn.dynamic_rnn(
            multi_cell, x_one_hot, initial_state=self.initial_state)
        # rnn_outputs shape [N, n_steps, lstm_size]

        # 获得logits 和预测值
        self.prediction, self.logits = build_output(rnn_outputs, lstm_size, num_classes)

        # 创建损失
        self.loss = build_loss(self.logits, self.targets, num_classes)
        # 创建优化器
        self.train_opt = build_optimizer(self.loss, lr, grads_clip)

# 设置超参数
batch_size = 64
n_steps = 100  # 时间步,也就是序列长度
lstm_size = 128  # 隐藏层节点数量
num_layers = 2  # 隐藏层层数
learning_rate = 1e-3
keep_probab = 0.5
epochs = 20
print_every_n = 10
save_every_n = 300

model = CharRNN(
    len(vocab), batch_size=batch_size, n_steps=n_steps, lstm_size=lstm_size,
    num_layers=num_layers, lr=learning_rate, grads_clip=5)


def train():
    saver = tf.train.Saver(max_to_keep=1)
    # 构建持久化路径
    checkpoint_dir = './models/anna_rnn'
    if not os.path.exists(checkpoint_dir):
        os.makedirs(checkpoint_dir)

    with tf.Session() as sess:
        sess.run(tf.global_variables_initializer())

        step = 1
        for e in range(1, epochs):
            # 将初始化状态值跑出来
            new_state = sess.run(model.initial_state)
            for x, y in get_batches(encoded, batch_size, n_steps):
                feed = {model.inputs:x, model.targets: y, model.keep_prob: keep_probab,
                        model.initial_state: new_state}
                # 执行训练
                _, new_state = sess.run([model.train_opt, model.final_state], feed)

                if step % print_every_n ==0:
                    loss_ = sess.run(model.loss, feed)
                    print('Epochs:{} - Step:{} - Train loss:{:.5f}'.format(e, step, loss_))
                # 模型持久化
                if step % save_every_n == 0:
                    files = 'model.ckpt'
                    save_files = os.path.join(checkpoint_dir, files)
                    saver.save(sess, save_path=save_files)
                step += 1


def pick_top_n(preds, vocab_size, top_n=5):
    # 取样,生成新小说。
    p = np.squeeze(preds)
    p[np.argsort(p)[:-top_n]] = 0
    p = p / np.sum(p)
    c = np.random.choice(vocab_size, 1, p=p)[0]
    return c


def sample(checkpoint, n_samples, lstm_size, vocab_size, prime="The "):
    samples = [c for c in prime]
    model = CharRNN(len(vocab), lstm_size=lstm_size, sampling=True)
    saver = tf.train.Saver()
    with tf.Session() as sess:
        saver.restore(sess, checkpoint)
        new_state = sess.run(model.initial_state)
        for c in prime:
            x = np.zeros((1, 1))
            x[0, 0] = vocab_to_int[c]
            feed = {model.inputs: x,
                    model.keep_prob: 1.,
                    model.initial_state: new_state}
            preds, new_state = sess.run([model.prediction, model.final_state],
                                        feed_dict=feed)

        c = pick_top_n(preds, len(vocab))
        samples.append(int_to_vocab[c])

        for i in range(n_samples):
            x[0, 0] = c
            feed = {model.inputs: x,
                    model.keep_prob: 1.,
                    model.initial_state: new_state}
            preds, new_state = sess.run([model.prediction, model.final_state],
                                        feed_dict=feed)

            c = pick_top_n(preds, len(vocab))
            samples.append(int_to_vocab[c])

    return ''.join(samples)


def test():
    print(tf.train.get_checkpoint_state('checkpoints'))

    tf.train.latest_checkpoint('checkpoints')      # 读取checkpoint传入网络中
    print(tf.train.latest_checkpoint('checkpoints'))
    checkpoint = tf.train.latest_checkpoint('checkpoints')
    samp = sample(checkpoint, 2000, lstm_size, len(vocab), prime="Far")
    print('训练800个batch效果是:')
    print(samp)

    beifeng = tf.train.get_checkpoint_state('checkpoints')

    checkpoint = beifeng.all_model_checkpoint_paths[2]
    samp = sample(checkpoint, 1000, lstm_size, len(vocab), prime="Far")
    print('训练200个batch效果是:')
    print(samp)

    checkpoint = 'checkpoints\i2000_l64.ckpt'
    samp = sample(checkpoint, 1000, lstm_size, len(vocab), prime="Far")
    print('训练2000个batch效果是:')
    print(samp)

    checkpoint = 'checkpoints\i3960_l64.ckpt'
    samp = sample(checkpoint, 1000, lstm_size, len(vocab), prime="Far")
    print('训练3900个batch效果是:')
    print(samp)

if __name__ == '__main__':
    train()
D:\Anaconda\python.exe D:/AI20/HJZ/04-深度学习/4-RNN/20191228___AI20_RNN/01_字符集RNN.py
所有的字符是:['\n', ' ', '!', '"', '$', '%', '&', "'", '(', ')', '*', ',', '-', '.', '/', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', ':', ';', '?', '@', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z', '_', '`', 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z']
Chapter 1


Happy families are all alike; every unhappy family is unhappy in its own
way.

Everythin
1985223
[31 64 57 72 76 61 74  1 16  0  0  0 36 57 72 72 81  1 62 57 69 65 68 65
 61 75  1 57 74 61  1 57 68 68  1 57 68 65 67 61 26  1 61 78 61 74 81  1
 77 70 64 57 72 72 81  1 62 57 69 65 68 81  1 65 75  1 77 70 64 57 72 72
 81  1 65 70  1 65 76 75  1 71 79 70  0 79 57 81 13  0  0 33 78 61 74 81
 76 64 65 70]
83
2020-02-14 12:40:22.881860: I tensorflow/core/platform/cpu_feature_guard.cc:141] Your CPU supports instructions that this TensorFlow binary was not compiled to use: AVX AVX2
Epochs:1 - Step:10 - Train loss:3.54455
Epochs:1 - Step:20 - Train loss:3.33840
Epochs:1 - Step:30 - Train loss:3.23758
Epochs:1 - Step:40 - Train loss:3.22496
Epochs:1 - Step:50 - Train loss:3.19728
Epochs:1 - Step:60 - Train loss:3.16408
Epochs:1 - Step:70 - Train loss:3.17018
Epochs:1 - Step:80 - Train loss:3.18734
Epochs:1 - Step:90 - Train loss:3.16217
Epochs:1 - Step:100 - Train loss:3.15650
Epochs:1 - Step:110 - Train loss:3.16833
Epochs:1 - Step:120 - Train loss:3.14899
Epochs:1 - Step:130 - Train loss:3.11324
Epochs:1 - Step:140 - Train loss:3.13758
Epochs:1 - Step:150 - Train loss:3.12261
Epochs:1 - Step:160 - Train loss:3.11176
Epochs:1 - Step:170 - Train loss:3.10151
Epochs:1 - Step:180 - Train loss:3.08428
Epochs:1 - Step:190 - Train loss:3.07161
Epochs:1 - Step:200 - Train loss:2.98235
Epochs:1 - Step:210 - Train loss:2.96754
Epochs:1 - Step:220 - Train loss:2.91381
Epochs:1 - Step:230 - Train loss:2.87618
Epochs:1 - Step:240 - Train loss:2.82366
Epochs:1 - Step:250 - Train loss:2.82974
Epochs:1 - Step:260 - Train loss:2.76710
Epochs:1 - Step:270 - Train loss:2.73414
Epochs:1 - Step:280 - Train loss:2.70318
Epochs:1 - Step:290 - Train loss:2.73773
Epochs:1 - Step:300 - Train loss:2.71744
Epochs:1 - Step:310 - Train loss:2.66217
Epochs:2 - Step:320 - Train loss:2.61928
Epochs:2 - Step:330 - Train loss:2.61885
Epochs:2 - Step:340 - Train loss:2.60499
Epochs:2 - Step:350 - Train loss:2.61273
Epochs:2 - Step:360 - Train loss:2.59224
Epochs:2 - Step:370 - Train loss:2.55241
Epochs:2 - Step:380 - Train loss:2.55967
Epochs:2 - Step:390 - Train loss:2.56632
Epochs:2 - Step:400 - Train loss:2.51611
Epochs:2 - Step:410 - Train loss:2.50722
Epochs:2 - Step:420 - Train loss:2.55249
Epochs:2 - Step:430 - Train loss:2.50538
Epochs:2 - Step:440 - Train loss:2.49122
Epochs:2 - Step:450 - Train loss:2.52298
Epochs:2 - Step:460 - Train loss:2.50542
Epochs:2 - Step:470 - Train loss:2.49632
Epochs:2 - Step:480 - Train loss:2.45350
Epochs:2 - Step:490 - Train loss:2.45035
Epochs:2 - Step:500 - Train loss:2.47056
Epochs:2 - Step:510 - Train loss:2.43135
Epochs:2 - Step:520 - Train loss:2.45130
Epochs:2 - Step:530 - Train loss:2.42257
Epochs:2 - Step:540 - Train loss:2.42217
Epochs:2 - Step:550 - Train loss:2.40365
Epochs:2 - Step:560 - Train loss:2.42641
Epochs:2 - Step:570 - Train loss:2.40197
Epochs:2 - Step:580 - Train loss:2.38928
Epochs:2 - Step:590 - Train loss:2.41386
Epochs:2 - Step:600 - Train loss:2.45456
Epochs:2 - Step:610 - Train loss:2.41045
Epochs:2 - Step:620 - Train loss:2.39241
Epochs:3 - Step:630 - Train loss:2.35893
Epochs:3 - Step:640 - Train loss:2.35747
Epochs:3 - Step:650 - Train loss:2.37416
Epochs:3 - Step:660 - Train loss:2.36970
Epochs:3 - Step:670 - Train loss:2.37576
Epochs:3 - Step:680 - Train loss:2.32869
Epochs:3 - Step:690 - Train loss:2.34683
Epochs:3 - Step:700 - Train loss:2.33717
Epochs:3 - Step:710 - Train loss:2.31075
Epochs:3 - Step:720 - Train loss:2.32863
Epochs:3 - Step:730 - Train loss:2.37468
Epochs:3 - Step:740 - Train loss:2.31977
Epochs:3 - Step:750 - Train loss:2.31786
Epochs:3 - Step:760 - Train loss:2.34707
Epochs:3 - Step:770 - Train loss:2.33737
Epochs:3 - Step:780 - Train loss:2.33303
Epochs:3 - Step:790 - Train loss:2.28803
Epochs:3 - Step:800 - Train loss:2.29348
Epochs:3 - Step:810 - Train loss:2.30449
Epochs:3 - Step:820 - Train loss:2.28299
Epochs:3 - Step:830 - Train loss:2.28929
Epochs:3 - Step:840 - Train loss:2.27322
Epochs:3 - Step:850 - Train loss:2.26948
Epochs:3 - Step:860 - Train loss:2.26625
Epochs:3 - Step:870 - Train loss:2.27460
Epochs:3 - Step:880 - Train loss:2.24430
Epochs:3 - Step:890 - Train loss:2.24035
Epochs:3 - Step:900 - Train loss:2.26537
Epochs:3 - Step:910 - Train loss:2.30440
Epochs:3 - Step:920 - Train loss:2.26912
Epochs:3 - Step:930 - Train loss:2.25907
Epochs:4 - Step:940 - Train loss:2.21893
Epochs:4 - Step:950 - Train loss:2.22525
Epochs:4 - Step:960 - Train loss:2.24017
Epochs:4 - Step:970 - Train loss:2.24947
Epochs:4 - Step:980 - Train loss:2.24476
Epochs:4 - Step:990 - Train loss:2.19938
Epochs:4 - Step:1000 - Train loss:2.21579
Epochs:4 - Step:1010 - Train loss:2.21166
Epochs:4 - Step:1020 - Train loss:2.20049
Epochs:4 - Step:1030 - Train loss:2.19456
Epochs:4 - Step:1040 - Train loss:2.26025
Epochs:4 - Step:1050 - Train loss:2.20980
Epochs:4 - Step:1060 - Train loss:2.21083
Epochs:4 - Step:1070 - Train loss:2.23755
Epochs:4 - Step:1080 - Train loss:2.22802
Epochs:4 - Step:1090 - Train loss:2.21269
Epochs:4 - Step:1100 - Train loss:2.16518
Epochs:4 - Step:1110 - Train loss:2.17998
Epochs:4 - Step:1120 - Train loss:2.18320
Epochs:4 - Step:1130 - Train loss:2.17664
Epochs:4 - Step:1140 - Train loss:2.19933
Epochs:4 - Step:1150 - Train loss:2.17602
Epochs:4 - Step:1160 - Train loss:2.16423
Epochs:4 - Step:1170 - Train loss:2.17096
Epochs:4 - Step:1180 - Train loss:2.15613
Epochs:4 - Step:1190 - Train loss:2.15004
Epochs:4 - Step:1200 - Train loss:2.14178
Epochs:4 - Step:1210 - Train loss:2.16882
Epochs:4 - Step:1220 - Train loss:2.22165
Epochs:4 - Step:1230 - Train loss:2.16654
Epochs:4 - Step:1240 - Train loss:2.17306
Epochs:5 - Step:1250 - Train loss:2.13370
Epochs:5 - Step:1260 - Train loss:2.13113
Epochs:5 - Step:1270 - Train loss:2.14861
Epochs:5 - Step:1280 - Train loss:2.14362
Epochs:5 - Step:1290 - Train loss:2.15046
Epochs:5 - Step:1300 - Train loss:2.11533
Epochs:5 - Step:1310 - Train loss:2.14178
Epochs:5 - Step:1320 - Train loss:2.14305
Epochs:5 - Step:1330 - Train loss:2.10673
Epochs:5 - Step:1340 - Train loss:2.11374
Epochs:5 - Step:1350 - Train loss:2.17035
Epochs:5 - Step:1360 - Train loss:2.11851
Epochs:5 - Step:1370 - Train loss:2.10784
Epochs:5 - Step:1380 - Train loss:2.16428
Epochs:5 - Step:1390 - Train loss:2.14669
Epochs:5 - Step:1400 - Train loss:2.13869
Epochs:5 - Step:1410 - Train loss:2.08080
Epochs:5 - Step:1420 - Train loss:2.09572
Epochs:5 - Step:1430 - Train loss:2.11748
Epochs:5 - Step:1440 - Train loss:2.10466
Epochs:5 - Step:1450 - Train loss:2.10941
Epochs:5 - Step:1460 - Train loss:2.09303
Epochs:5 - Step:1470 - Train loss:2.10501
Epochs:5 - Step:1480 - Train loss:2.08640
Epochs:5 - Step:1490 - Train loss:2.08168
Epochs:5 - Step:1500 - Train loss:2.07580
Epochs:5 - Step:1510 - Train loss:2.06370
Epochs:5 - Step:1520 - Train loss:2.09451
Epochs:5 - Step:1530 - Train loss:2.14611
Epochs:5 - Step:1540 - Train loss:2.10930
Epochs:5 - Step:1550 - Train loss:2.09031
Epochs:6 - Step:1560 - Train loss:2.04816
Epochs:6 - Step:1570 - Train loss:2.04903
Epochs:6 - Step:1580 - Train loss:2.07293
Epochs:6 - Step:1590 - Train loss:2.08193
Epochs:6 - Step:1600 - Train loss:2.07189
Epochs:6 - Step:1610 - Train loss:2.03196
Epochs:6 - Step:1620 - Train loss:2.08385
Epochs:6 - Step:1630 - Train loss:2.06050
Epochs:6 - Step:1640 - Train loss:2.04408
Epochs:6 - Step:1650 - Train loss:2.03526
Epochs:6 - Step:1660 - Train loss:2.11971
Epochs:6 - Step:1670 - Train loss:2.06088
Epochs:6 - Step:1680 - Train loss:2.05915
Epochs:6 - Step:1690 - Train loss:2.10081
Epochs:6 - Step:1700 - Train loss:2.09524
Epochs:6 - Step:1710 - Train loss:2.05561
Epochs:6 - Step:1720 - Train loss:2.02120
Epochs:6 - Step:1730 - Train loss:2.03221
Epochs:6 - Step:1740 - Train loss:2.05406
Epochs:6 - Step:1750 - Train loss:2.03541
Epochs:6 - Step:1760 - Train loss:2.05996
Epochs:6 - Step:1770 - Train loss:2.02366
Epochs:6 - Step:1780 - Train loss:2.04593
Epochs:6 - Step:1790 - Train loss:2.04380
Epochs:6 - Step:1800 - Train loss:2.00776
Epochs:6 - Step:1810 - Train loss:2.01968
Epochs:6 - Step:1820 - Train loss:2.00536
Epochs:6 - Step:1830 - Train loss:2.03703
Epochs:6 - Step:1840 - Train loss:2.07217
Epochs:6 - Step:1850 - Train loss:2.04154
Epochs:6 - Step:1860 - Train loss:2.04066
Epochs:7 - Step:1870 - Train loss:1.99834
Epochs:7 - Step:1880 - Train loss:1.99425
Epochs:7 - Step:1890 - Train loss:2.02569
Epochs:7 - Step:1900 - Train loss:2.02427
Epochs:7 - Step:1910 - Train loss:2.03435
Epochs:7 - Step:1920 - Train loss:1.98192
Epochs:7 - Step:1930 - Train loss:2.01667
Epochs:7 - Step:1940 - Train loss:1.98834
Epochs:7 - Step:1950 - Train loss:1.97661
Epochs:7 - Step:1960 - Train loss:1.99590
Epochs:7 - Step:1970 - Train loss:2.06474
Epochs:7 - Step:1980 - Train loss:2.01409
Epochs:7 - Step:1990 - Train loss:2.01495
Epochs:7 - Step:2000 - Train loss:2.05487
Epochs:7 - Step:2010 - Train loss:2.04369
Epochs:7 - Step:2020 - Train loss:1.99952
Epochs:7 - Step:2030 - Train loss:1.97687
Epochs:7 - Step:2040 - Train loss:1.97657
Epochs:7 - Step:2050 - Train loss:1.99870
Epochs:7 - Step:2060 - Train loss:1.99196
Epochs:7 - Step:2070 - Train loss:1.99306
Epochs:7 - Step:2080 - Train loss:1.97640
Epochs:7 - Step:2090 - Train loss:1.97139
Epochs:7 - Step:2100 - Train loss:1.98442
Epochs:7 - Step:2110 - Train loss:1.96258
Epochs:7 - Step:2120 - Train loss:1.95430
Epochs:7 - Step:2130 - Train loss:1.95868
Epochs:7 - Step:2140 - Train loss:1.98052
Epochs:7 - Step:2150 - Train loss:2.02343
Epochs:7 - Step:2160 - Train loss:1.99707
Epochs:7 - Step:2170 - Train loss:1.98916
Epochs:8 - Step:2180 - Train loss:1.96080
Epochs:8 - Step:2190 - Train loss:1.92898
Epochs:8 - Step:2200 - Train loss:1.97923
Epochs:8 - Step:2210 - Train loss:1.97714
Epochs:8 - Step:2220 - Train loss:1.96837
Epochs:8 - Step:2230 - Train loss:1.92836
Epochs:8 - Step:2240 - Train loss:1.98162
Epochs:8 - Step:2250 - Train loss:1.95136
Epochs:8 - Step:2260 - Train loss:1.93496
Epochs:8 - Step:2270 - Train loss:1.95293
Epochs:8 - Step:2280 - Train loss:2.02543
Epochs:8 - Step:2290 - Train loss:1.95841
Epochs:8 - Step:2300 - Train loss:1.94123
Epochs:8 - Step:2310 - Train loss:2.00665
Epochs:8 - Step:2320 - Train loss:1.98445
Epochs:8 - Step:2330 - Train loss:1.95668
Epochs:8 - Step:2340 - Train loss:1.92542
Epochs:8 - Step:2350 - Train loss:1.95749
Epochs:8 - Step:2360 - Train loss:1.95525
Epochs:8 - Step:2370 - Train loss:1.95679
Epochs:8 - Step:2380 - Train loss:1.96994
Epochs:8 - Step:2390 - Train loss:1.93487
Epochs:8 - Step:2400 - Train loss:1.92627
Epochs:8 - Step:2410 - Train loss:1.95220
Epochs:8 - Step:2420 - Train loss:1.92313
Epochs:8 - Step:2430 - Train loss:1.92023
Epochs:8 - Step:2440 - Train loss:1.91842
Epochs:8 - Step:2450 - Train loss:1.94985
Epochs:8 - Step:2460 - Train loss:1.98439
Epochs:8 - Step:2470 - Train loss:1.94901
Epochs:8 - Step:2480 - Train loss:1.94836
Epochs:9 - Step:2490 - Train loss:1.91048
Epochs:9 - Step:2500 - Train loss:1.91277
Epochs:9 - Step:2510 - Train loss:1.93474
Epochs:9 - Step:2520 - Train loss:1.94050
Epochs:9 - Step:2530 - Train loss:1.92659
Epochs:9 - Step:2540 - Train loss:1.89847
Epochs:9 - Step:2550 - Train loss:1.94440
Epochs:9 - Step:2560 - Train loss:1.92104
Epochs:9 - Step:2570 - Train loss:1.88160
Epochs:9 - Step:2580 - Train loss:1.91325
Epochs:9 - Step:2590 - Train loss:1.96506
Epochs:9 - Step:2600 - Train loss:1.92739
Epochs:9 - Step:2610 - Train loss:1.92210
Epochs:9 - Step:2620 - Train loss:1.95166
Epochs:9 - Step:2630 - Train loss:1.93860
Epochs:9 - Step:2640 - Train loss:1.90662
Epochs:9 - Step:2650 - Train loss:1.88889
Epochs:9 - Step:2660 - Train loss:1.89739
Epochs:9 - Step:2670 - Train loss:1.91875
Epochs:9 - Step:2680 - Train loss:1.91305
Epochs:9 - Step:2690 - Train loss:1.92109
Epochs:9 - Step:2700 - Train loss:1.90419
Epochs:9 - Step:2710 - Train loss:1.87245
Epochs:9 - Step:2720 - Train loss:1.90199
Epochs:9 - Step:2730 - Train loss:1.87742
Epochs:9 - Step:2740 - Train loss:1.87480
Epochs:9 - Step:2750 - Train loss:1.88147
Epochs:9 - Step:2760 - Train loss:1.90371
Epochs:9 - Step:2770 - Train loss:1.95066
Epochs:9 - Step:2780 - Train loss:1.91283
Epochs:9 - Step:2790 - Train loss:1.89513
Epochs:10 - Step:2800 - Train loss:1.88244
Epochs:10 - Step:2810 - Train loss:1.86199
Epochs:10 - Step:2820 - Train loss:1.89912
Epochs:10 - Step:2830 - Train loss:1.90274
Epochs:10 - Step:2840 - Train loss:1.88731
Epochs:10 - Step:2850 - Train loss:1.87528
Epochs:10 - Step:2860 - Train loss:1.90960
Epochs:10 - Step:2870 - Train loss:1.87467
Epochs:10 - Step:2880 - Train loss:1.85027
Epochs:10 - Step:2890 - Train loss:1.87162
Epochs:10 - Step:2900 - Train loss:1.93184
Epochs:10 - Step:2910 - Train loss:1.89131
Epochs:10 - Step:2920 - Train loss:1.88831
Epochs:10 - Step:2930 - Train loss:1.94189
Epochs:10 - Step:2940 - Train loss:1.91062
Epochs:10 - Step:2950 - Train loss:1.88498
Epochs:10 - Step:2960 - Train loss:1.85281
Epochs:10 - Step:2970 - Train loss:1.86549
Epochs:10 - Step:2980 - Train loss:1.89211
Epochs:10 - Step:2990 - Train loss:1.88492
Epochs:10 - Step:3000 - Train loss:1.88199
Epochs:10 - Step:3010 - Train loss:1.86823
Epochs:10 - Step:3020 - Train loss:1.86298
Epochs:10 - Step:3030 - Train loss:1.87048
Epochs:10 - Step:3040 - Train loss:1.84485
Epochs:10 - Step:3050 - Train loss:1.84458
Epochs:10 - Step:3060 - Train loss:1.83135
Epochs:10 - Step:3070 - Train loss:1.86271
Epochs:10 - Step:3080 - Train loss:1.90478
Epochs:10 - Step:3090 - Train loss:1.86060
Epochs:10 - Step:3100 - Train loss:1.88224
Epochs:11 - Step:3110 - Train loss:1.84049
Epochs:11 - Step:3120 - Train loss:1.82988
Epochs:11 - Step:3130 - Train loss:1.87715
Epochs:11 - Step:3140 - Train loss:1.86736
Epochs:11 - Step:3150 - Train loss:1.86185
Epochs:11 - Step:3160 - Train loss:1.81853
Epochs:11 - Step:3170 - Train loss:1.88438
Epochs:11 - Step:3180 - Train loss:1.85916
Epochs:11 - Step:3190 - Train loss:1.82266
Epochs:11 - Step:3200 - Train loss:1.85173
Epochs:11 - Step:3210 - Train loss:1.91818
Epochs:11 - Step:3220 - Train loss:1.86020
Epochs:11 - Step:3230 - Train loss:1.85915
Epochs:11 - Step:3240 - Train loss:1.90223
Epochs:11 - Step:3250 - Train loss:1.88074
Epochs:11 - Step:3260 - Train loss:1.84350
Epochs:11 - Step:3270 - Train loss:1.82891
Epochs:11 - Step:3280 - Train loss:1.83940
Epochs:11 - Step:3290 - Train loss:1.84462
Epochs:11 - Step:3300 - Train loss:1.84571
Epochs:11 - Step:3310 - Train loss:1.86805
Epochs:11 - Step:3320 - Train loss:1.85200
Epochs:11 - Step:3330 - Train loss:1.82612
Epochs:11 - Step:3340 - Train loss:1.83853
Epochs:11 - Step:3350 - Train loss:1.81901
Epochs:11 - Step:3360 - Train loss:1.81473
Epochs:11 - Step:3370 - Train loss:1.80765
Epochs:11 - Step:3380 - Train loss:1.84590
Epochs:11 - Step:3390 - Train loss:1.88557
Epochs:11 - Step:3400 - Train loss:1.86225
Epochs:11 - Step:3410 - Train loss:1.85915
Epochs:12 - Step:3420 - Train loss:1.82198
Epochs:12 - Step:3430 - Train loss:1.81864
Epochs:12 - Step:3440 - Train loss:1.86162
Epochs:12 - Step:3450 - Train loss:1.84117
Epochs:12 - Step:3460 - Train loss:1.82873
Epochs:12 - Step:3470 - Train loss:1.81366
Epochs:12 - Step:3480 - Train loss:1.84185
Epochs:12 - Step:3490 - Train loss:1.81752
Epochs:12 - Step:3500 - Train loss:1.80210
Epochs:12 - Step:3510 - Train loss:1.82410
Epochs:12 - Step:3520 - Train loss:1.88478
Epochs:12 - Step:3530 - Train loss:1.83269
Epochs:12 - Step:3540 - Train loss:1.83280
Epochs:12 - Step:3550 - Train loss:1.87289
Epochs:12 - Step:3560 - Train loss:1.85642
Epochs:12 - Step:3570 - Train loss:1.81554
Epochs:12 - Step:3580 - Train loss:1.79663
Epochs:12 - Step:3590 - Train loss:1.82244
Epochs:12 - Step:3600 - Train loss:1.83551
Epochs:12 - Step:3610 - Train loss:1.82753
Epochs:12 - Step:3620 - Train loss:1.83558
Epochs:12 - Step:3630 - Train loss:1.82226
Epochs:12 - Step:3640 - Train loss:1.80378
Epochs:12 - Step:3650 - Train loss:1.81853
Epochs:12 - Step:3660 - Train loss:1.79977
Epochs:12 - Step:3670 - Train loss:1.80790
Epochs:12 - Step:3680 - Train loss:1.78391
Epochs:12 - Step:3690 - Train loss:1.81289
Epochs:12 - Step:3700 - Train loss:1.86782
Epochs:12 - Step:3710 - Train loss:1.84201
Epochs:12 - Step:3720 - Train loss:1.82479
Epochs:13 - Step:3730 - Train loss:1.79080
Epochs:13 - Step:3740 - Train loss:1.79578
Epochs:13 - Step:3750 - Train loss:1.81918
Epochs:13 - Step:3760 - Train loss:1.83776
Epochs:13 - Step:3770 - Train loss:1.79976
Epochs:13 - Step:3780 - Train loss:1.79207
Epochs:13 - Step:3790 - Train loss:1.82569
Epochs:13 - Step:3800 - Train loss:1.80643
Epochs:13 - Step:3810 - Train loss:1.78901
Epochs:13 - Step:3820 - Train loss:1.80878
Epochs:13 - Step:3830 - Train loss:1.86569
Epochs:13 - Step:3840 - Train loss:1.81982
Epochs:13 - Step:3850 - Train loss:1.81145
Epochs:13 - Step:3860 - Train loss:1.86005
Epochs:13 - Step:3870 - Train loss:1.83832
Epochs:13 - Step:3880 - Train loss:1.80629
Epochs:13 - Step:3890 - Train loss:1.76894
Epochs:13 - Step:3900 - Train loss:1.78789
Epochs:13 - Step:3910 - Train loss:1.78591
Epochs:13 - Step:3920 - Train loss:1.79470
Epochs:13 - Step:3930 - Train loss:1.81125
Epochs:13 - Step:3940 - Train loss:1.78784
Epochs:13 - Step:3950 - Train loss:1.79188
Epochs:13 - Step:3960 - Train loss:1.81107
Epochs:13 - Step:3970 - Train loss:1.76494
Epochs:13 - Step:3980 - Train loss:1.77415
Epochs:13 - Step:3990 - Train loss:1.75600
Epochs:13 - Step:4000 - Train loss:1.80300
Epochs:13 - Step:4010 - Train loss:1.83982
Epochs:13 - Step:4020 - Train loss:1.80527
Epochs:13 - Step:4030 - Train loss:1.79808
Epochs:14 - Step:4040 - Train loss:1.76483
Epochs:14 - Step:4050 - Train loss:1.77607
Epochs:14 - Step:4060 - Train loss:1.80798
Epochs:14 - Step:4070 - Train loss:1.80415
Epochs:14 - Step:4080 - Train loss:1.77829
Epochs:14 - Step:4090 - Train loss:1.76050
Epochs:14 - Step:4100 - Train loss:1.81013
Epochs:14 - Step:4110 - Train loss:1.77078
Epochs:14 - Step:4120 - Train loss:1.76736
Epochs:14 - Step:4130 - Train loss:1.78285
Epochs:14 - Step:4140 - Train loss:1.85905
Epochs:14 - Step:4150 - Train loss:1.78240
Epochs:14 - Step:4160 - Train loss:1.78759
Epochs:14 - Step:4170 - Train loss:1.83556
Epochs:14 - Step:4180 - Train loss:1.82520
Epochs:14 - Step:4190 - Train loss:1.77328
Epochs:14 - Step:4200 - Train loss:1.75143
Epochs:14 - Step:4210 - Train loss:1.78669
Epochs:14 - Step:4220 - Train loss:1.78099
Epochs:14 - Step:4230 - Train loss:1.76681
Epochs:14 - Step:4240 - Train loss:1.78078
Epochs:14 - Step:4250 - Train loss:1.78427
Epochs:14 - Step:4260 - Train loss:1.78309
Epochs:14 - Step:4270 - Train loss:1.80530
Epochs:14 - Step:4280 - Train loss:1.74955
Epochs:14 - Step:4290 - Train loss:1.74764
Epochs:14 - Step:4300 - Train loss:1.74767
Epochs:14 - Step:4310 - Train loss:1.78269
Epochs:14 - Step:4320 - Train loss:1.81613
Epochs:14 - Step:4330 - Train loss:1.80089
Epochs:14 - Step:4340 - Train loss:1.78524
Epochs:15 - Step:4350 - Train loss:1.75492
Epochs:15 - Step:4360 - Train loss:1.75809
Epochs:15 - Step:4370 - Train loss:1.78574
Epochs:15 - Step:4380 - Train loss:1.79550
Epochs:15 - Step:4390 - Train loss:1.76434
Epochs:15 - Step:4400 - Train loss:1.74794
Epochs:15 - Step:4410 - Train loss:1.78805
Epochs:15 - Step:4420 - Train loss:1.74278
Epochs:15 - Step:4430 - Train loss:1.74776
Epochs:15 - Step:4440 - Train loss:1.76758
Epochs:15 - Step:4450 - Train loss:1.84332
Epochs:15 - Step:4460 - Train loss:1.77790
Epochs:15 - Step:4470 - Train loss:1.76246
Epochs:15 - Step:4480 - Train loss:1.81289
Epochs:15 - Step:4490 - Train loss:1.79908
Epochs:15 - Step:4500 - Train loss:1.77334
Epochs:15 - Step:4510 - Train loss:1.72899
Epochs:15 - Step:4520 - Train loss:1.76627
Epochs:15 - Step:4530 - Train loss:1.76083
Epochs:15 - Step:4540 - Train loss:1.77034
Epochs:15 - Step:4550 - Train loss:1.76898
Epochs:15 - Step:4560 - Train loss:1.75916
Epochs:15 - Step:4570 - Train loss:1.74139
Epochs:15 - Step:4580 - Train loss:1.77941
Epochs:15 - Step:4590 - Train loss:1.74197
Epochs:15 - Step:4600 - Train loss:1.72706
Epochs:15 - Step:4610 - Train loss:1.73873
Epochs:15 - Step:4620 - Train loss:1.75235
Epochs:15 - Step:4630 - Train loss:1.79823
Epochs:15 - Step:4640 - Train loss:1.78119
Epochs:15 - Step:4650 - Train loss:1.76675
Epochs:16 - Step:4660 - Train loss:1.74411
Epochs:16 - Step:4670 - Train loss:1.73939
Epochs:16 - Step:4680 - Train loss:1.75789
Epochs:16 - Step:4690 - Train loss:1.77551
Epochs:16 - Step:4700 - Train loss:1.75087
Epochs:16 - Step:4710 - Train loss:1.74309
Epochs:16 - Step:4720 - Train loss:1.77395
Epochs:16 - Step:4730 - Train loss:1.73717
Epochs:16 - Step:4740 - Train loss:1.72338
Epochs:16 - Step:4750 - Train loss:1.75518
Epochs:16 - Step:4760 - Train loss:1.81827
Epochs:16 - Step:4770 - Train loss:1.75892
Epochs:16 - Step:4780 - Train loss:1.74805
Epochs:16 - Step:4790 - Train loss:1.81040
Epochs:16 - Step:4800 - Train loss:1.78147
Epochs:16 - Step:4810 - Train loss:1.76128
Epochs:16 - Step:4820 - Train loss:1.71262
Epochs:16 - Step:4830 - Train loss:1.74533
Epochs:16 - Step:4840 - Train loss:1.74295
Epochs:16 - Step:4850 - Train loss:1.73067
Epochs:16 - Step:4860 - Train loss:1.75668
Epochs:16 - Step:4870 - Train loss:1.72731
Epochs:16 - Step:4880 - Train loss:1.72819
Epochs:16 - Step:4890 - Train loss:1.77909
Epochs:16 - Step:4900 - Train loss:1.71646
Epochs:16 - Step:4910 - Train loss:1.71716
Epochs:16 - Step:4920 - Train loss:1.70791
Epochs:16 - Step:4930 - Train loss:1.74269
Epochs:16 - Step:4940 - Train loss:1.78038
Epochs:16 - Step:4950 - Train loss:1.75320
Epochs:16 - Step:4960 - Train loss:1.74215
Epochs:17 - Step:4970 - Train loss:1.73049
Epochs:17 - Step:4980 - Train loss:1.73403
Epochs:17 - Step:4990 - Train loss:1.75734
Epochs:17 - Step:5000 - Train loss:1.74503
Epochs:17 - Step:5010 - Train loss:1.73616
Epochs:17 - Step:5020 - Train loss:1.70899
Epochs:17 - Step:5030 - Train loss:1.76094
Epochs:17 - Step:5040 - Train loss:1.71514
Epochs:17 - Step:5050 - Train loss:1.71582
Epochs:17 - Step:5060 - Train loss:1.74260
Epochs:17 - Step:5070 - Train loss:1.81387
Epochs:17 - Step:5080 - Train loss:1.72843
Epochs:17 - Step:5090 - Train loss:1.72903
Epochs:17 - Step:5100 - Train loss:1.77706
Epochs:17 - Step:5110 - Train loss:1.76160
Epochs:17 - Step:5120 - Train loss:1.72113
Epochs:17 - Step:5130 - Train loss:1.69669
Epochs:17 - Step:5140 - Train loss:1.72363
Epochs:17 - Step:5150 - Train loss:1.74154
Epochs:17 - Step:5160 - Train loss:1.72845
Epochs:17 - Step:5170 - Train loss:1.73219
Epochs:17 - Step:5180 - Train loss:1.72354
Epochs:17 - Step:5190 - Train loss:1.72392
Epochs:17 - Step:5200 - Train loss:1.74545
Epochs:17 - Step:5210 - Train loss:1.70654
Epochs:17 - Step:5220 - Train loss:1.69620
Epochs:17 - Step:5230 - Train loss:1.71349
Epochs:17 - Step:5240 - Train loss:1.72960
Epochs:17 - Step:5250 - Train loss:1.76950
Epochs:17 - Step:5260 - Train loss:1.73831
Epochs:17 - Step:5270 - Train loss:1.73863
Epochs:18 - Step:5280 - Train loss:1.72166
Epochs:18 - Step:5290 - Train loss:1.70701
Epochs:18 - Step:5300 - Train loss:1.74283
Epochs:18 - Step:5310 - Train loss:1.73690
Epochs:18 - Step:5320 - Train loss:1.72736
Epochs:18 - Step:5330 - Train loss:1.71471
Epochs:18 - Step:5340 - Train loss:1.75664
Epochs:18 - Step:5350 - Train loss:1.69725
Epochs:18 - Step:5360 - Train loss:1.68932
Epochs:18 - Step:5370 - Train loss:1.71925
Epochs:18 - Step:5380 - Train loss:1.79503
Epochs:18 - Step:5390 - Train loss:1.71847
Epochs:18 - Step:5400 - Train loss:1.71954
Epochs:18 - Step:5410 - Train loss:1.76308
Epochs:18 - Step:5420 - Train loss:1.74787
Epochs:18 - Step:5430 - Train loss:1.72286
Epochs:18 - Step:5440 - Train loss:1.67672
Epochs:18 - Step:5450 - Train loss:1.72031
Epochs:18 - Step:5460 - Train loss:1.72851
Epochs:18 - Step:5470 - Train loss:1.71643
Epochs:18 - Step:5480 - Train loss:1.72122
Epochs:18 - Step:5490 - Train loss:1.70821
Epochs:18 - Step:5500 - Train loss:1.70390
Epochs:18 - Step:5510 - Train loss:1.73612
Epochs:18 - Step:5520 - Train loss:1.68654
Epochs:18 - Step:5530 - Train loss:1.70012
Epochs:18 - Step:5540 - Train loss:1.67746
Epochs:18 - Step:5550 - Train loss:1.71306
Epochs:18 - Step:5560 - Train loss:1.75904
Epochs:18 - Step:5570 - Train loss:1.73981
Epochs:18 - Step:5580 - Train loss:1.73152
Epochs:19 - Step:5590 - Train loss:1.69388
Epochs:19 - Step:5600 - Train loss:1.70037
Epochs:19 - Step:5610 - Train loss:1.73347
Epochs:19 - Step:5620 - Train loss:1.72690
Epochs:19 - Step:5630 - Train loss:1.69948
Epochs:19 - Step:5640 - Train loss:1.69377
Epochs:19 - Step:5650 - Train loss:1.73919
Epochs:19 - Step:5660 - Train loss:1.69157
Epochs:19 - Step:5670 - Train loss:1.69673
Epochs:19 - Step:5680 - Train loss:1.71912
Epochs:19 - Step:5690 - Train loss:1.76542
Epochs:19 - Step:5700 - Train loss:1.70967
Epochs:19 - Step:5710 - Train loss:1.69678
Epochs:19 - Step:5720 - Train loss:1.73990
Epochs:19 - Step:5730 - Train loss:1.73797
Epochs:19 - Step:5740 - Train loss:1.70160
Epochs:19 - Step:5750 - Train loss:1.65952
Epochs:19 - Step:5760 - Train loss:1.70583
Epochs:19 - Step:5770 - Train loss:1.71603
Epochs:19 - Step:5780 - Train loss:1.69620
Epochs:19 - Step:5790 - Train loss:1.70808
Epochs:19 - Step:5800 - Train loss:1.70482
Epochs:19 - Step:5810 - Train loss:1.70271
Epochs:19 - Step:5820 - Train loss:1.70875
Epochs:19 - Step:5830 - Train loss:1.68698
Epochs:19 - Step:5840 - Train loss:1.67908
Epochs:19 - Step:5850 - Train loss:1.67088
Epochs:19 - Step:5860 - Train loss:1.70454
Epochs:19 - Step:5870 - Train loss:1.74670
Epochs:19 - Step:5880 - Train loss:1.71638
Epochs:19 - Step:5890 - Train loss:1.71494

Process finished with exit code 0

发布了88 篇原创文章 · 获赞 2 · 访问量 1330

猜你喜欢

转载自blog.csdn.net/HJZ11/article/details/104310477
RNN