关于大量数据的随机打乱重保存.(少了详细解释,之后会更新)

ps:想要数据随机打乱时遇到由于数据量太大导致电脑卡死:

def myshuffle(path,save_path,b,bb):
    for parent, dirnames, filenames in os.walk(path):
        l = len(filenames)
    lst = list(range(l-1))
    random.shuffle(lst)
    lst.append(l)
    su=np.ceil(l/b)
    for id in range(su):
        if id < su-1:
            for i in range(b):
                si = lst[id * b + i]
                neg_patch_sum = np.load(os.path.join(path, 'neg' + str(si) + '.npy'))
                if i == 0:
                    sample = np.copy(neg_patch_sum)
                else:
                    sample = np.concatenate((sample, neg_patch_sum), axis=0)

            lst1 = list(len(sample))
            random.shuffle(lst1)
            npy = sample[lst1]
            for ii in range(b):
                iii = id * b + ii
                np.save(os.path.join(save_path, 'neg%d.npy' % iii), npy[ii * bb:(ii + 1) * bb])
        else:
            for i in range(l-su*b):
                si = lst[id * b + i]
                neg_patch_sum = np.load(os.path.join(path, 'neg' + str(si) + '.npy'))
                if i == 0:
                    sample = np.copy(neg_patch_sum)
                else:
                    sample = np.concatenate((sample, neg_patch_sum), axis=0)
            lst1 = list(range(len(sample)))
            random.shuffle(lst1)
            npy = sample[lst1]
            for ii in range(l-su*b):
                iii = id * b + ii
                if ii < l-su*b-1:
                    np.save(os.path.join(save_path, 'neg%d.npy' % iii), npy[ii * bb:(ii + 1) * bb])
                else:
                    np.save(os.path.join(save_path, 'neg%d.npy' % iii), npy[ii * bb:])

猜你喜欢

转载自blog.csdn.net/qq_36401512/article/details/92798147
今日推荐