pytorch1.0 batch of training the neural network

pytorch1.0 batch of training the neural network

Import Torch
 Import torch.utils.data the Data AS
 # provided Torch of a tool to help organize the data structure, called DataLoader, use it to package their data, batch training. 
torch.manual_seed (1)     # reproducible 
# Batch the number of training data 
BATCH_SIZE =. 5 
BATCH_SIZE =. 8 

X = torch.linspace (. 1, 10, 10)        # the this iS X data (Torch Tensor) 
Y = torch.linspace (10,. 1, 10)        # the this iS Y data ( Tensor torch) 
# DataLoader packaging developer tools used to torch their own data. 
# own (numpy array or other) data format loaded into Tensor, and then put the wrapper. 
# benefits of using DataLoader is they help you to effectively iterative data 

# first converted into torch can recognize Dataset
= Data.TensorDataset torch_dataset (X, Y)   # torch_dataset = Data.TensorDataset (data_tensor = X, Y = target_tensor) 
# the dataset into DataLoader 
Loader = Data.DataLoader ( 
    dataset = torch_dataset,       # Torch TensorDataset the format 
    the batch_size = BATCH_SIZE,       # BATCH size Mini 
    shuffle = True,                # random shuffle for Training # random data upset - upset the better 
    num_workers = 2,               # subprocesses for loading the data # multithreading to read data 
) 


DEF show_batch ():
     for Epoch in the Range (3 ):    #train entire dataset 3 times # train all / the whole data three times 
        for the STEP, (batch_x, batch_y) in the enumerate (loader):   # for the each Training every step of the STEP # loader release a small number of data used to learn the 
            # Train your the Data .. # this is assumed training block ... 
            Print ( ' Epoch: ' , Epoch, ' | the Step: ' , STEP, ' | BATCH X: ' , 
                  batch_x.numpy (), ' | Y BATCH: ' , batch_y .numpy ()) 

IF  the __name__ == ' __main__ ' : 
    show_batch ()
# BATCH_SIZE = 5
'''
Epoch: 0 | Step: 0 | batch x: [ 5. 7. 10. 3. 4.] | batch y: [6. 4. 1. 8. 7.]
Epoch: 0 | Step: 1 | batch x: [2. 1. 8. 9. 6.] | batch y: [ 9. 10. 3. 2. 5.]
Epoch: 1 | Step: 0 | batch x: [ 4. 6. 7. 10. 8.] | batch y: [7. 5. 4. 1. 3.]
Epoch: 1 | Step: 1 | batch x: [5. 3. 2. 1. 9.] | batch y: [ 6. 8. 9. 10. 2.]
Epoch: 2 | Step: 0 | batch x: [ 4. 2. 5. 6. 10.] | batch y: [7. 9. 6. 5. 1.]
Epoch: 2 | Step: 1 | batch x: [3. 9. 1. 8. 7.] | batch y: [ 8. 2. 10. 3. 4.]
'''
# BATCH_SIZE = 8
'''
Epoch: 0 | Step: 0 | batch x: [ 5. 7. 10. 3. 4. 2. 1. 8.] | batch y: [ 6. 4. 1. 8. 7. 9. 10. 3.]
Epoch: 0 | Step: 1 | batch x: [9. 6.] | batch y: [2. 5.]
Epoch: 1 | Step: 0 | batch x: [ 4. 6. 7. 10. 8. 5. 3. 2.] | batch y: [7. 5. 4. 1. 3. 6. 8. 9.]
Epoch: 1 | Step: 1 | batch x: [1. 9.] | batch y: [10. 2.]
Epoch: 2 | Step: 0 | batch x: [ 4. 2. 5. 6. 10. 3. 9. 1.] | batch y: [ 7. 9. 6. 5. 1. 8. 2. 10.]
Epoch: 2 | Step: 1 | batch x: [8. 7.] | batch y: [3. 4.]
'''

 

Guess you like

Origin www.cnblogs.com/jeshy/p/11200000.html