def process_dump_tohdf5data(X,Y, path, phase):
batch_size = 7000
X_process = np.zeros((batch_size, 3, patch_h, patch_w), dtype = np.float32)
Y_process = np.zeros((batch_size, net_density_h, net_density_w), dtype = np.float32)
with open(os.path.join(path, phase+'.txt'), 'w') as f:
i1 = 0
while i1 < len(X):
if i1+batch_size < len(X):
i2 = i1 + batch_size
else:
i2 = len(X)
file_name = os.path.join(path, phase+'_'+str(i1)+'.h5')
with h5py.File(file_name, 'w') as hf:
for j, img in enumerate(X[i1:i2]):
X_process[j] = img.copy().transpose(2,0,1).astype(np.float32)
Y_process[j] = density_resize(Y[i1+j], fx = float(net_density_w)/patch_w, fy = float(net_density_h) / patch_h)
hf['data'] = X_process[:(i2-i1)]
hf['label'] = Y_process[:(i2-i1)]
f.write(file_name+'\n')
i1 += batch_size
评论列表
文章目录