import random
data_img = []
data_lable = []
m_index_i = 0
for astep, samples in enumerate(dataloader):
start = time.time()
images, labels = samples["image"], samples["label"]
for i in range(images.size(0)):
data_img.append(images[i, :, :, :])
data_lable.append(labels[i, :, :])
asamples = torch.Tensor(config["batch_size"] * len(config["parallels"]), 3, config["img_w"],
config["img_h"])
asamples[0] = data_img[m_index_i]
m_index_i += 1
m_index = [i for i in range(len(data_img))]
# print("len(data_img)",len(data_img))
# for j in range(200):
# random.shuffle(m_index)
# timestr = datetime.datetime.now().strftime('%Y%m%d_%H%M%S_%f')
# samples = torch.Tensor(config["batch_size"]* len(config["parallels"]), 3, config["img_w"],config["img_h"])
# for i in range(m_index_i):
# sample = data_img[m_index[i]]
# label = data_lable[m_index[i]]
# samples[i % config["batch_size"]* len(config["parallels"])] = sample
# if i % config["batch_size"]* len(config["parallels"]) == (config["batch_size"]* len(config["parallels"])-1):
# # samples = torch.Tensor(90, 3, 352, 352)
# print(i,timestr)
best_acc = 0.2
next_need = 0
batch_size=config["batch_size"] * len(config["parallels"])
samples = torch.Tensor(batch_size, 3, config["img_w"], config["img_h"])
labels = torch.Tensor(batch_size, 10, 5)
for epoch in range(config["epochs"]):
recall = 0
random.shuffle(m_index)
timestr = datetime.datetime.now().strftime('%Y%m%d_%H%M%S_%f')
step = 0
for i in range(m_index_i):
# print(i,timestr)
sample = data_img[m_index[i]]
label = data_lable[m_index[i]]
samples[i % batch_size] = sample
labels[i % batch_size] = label
if i % batch_size == (batch_size - 1):
# samples = torch.Tensor(90, 3, 352, 352)
config["global_step"] += 1
# Forward and backward
optimizer.zero_grad()
losses = net(samples.cuda(), labels.cuda())
step += 1