问题
I have a volumetric data with size 256x128x256
. Due to limit memory, I cannot use the whole data to directly feed to CAFFE. Hence, I will randomly choose n_sample
patches 50x50x50
which extract from the volumetric data and store them into HDF5. I was successful to randomly extract the patches from raw data and its label by extract_patch_from_volumetric_data
function.
I want to store these patches into the HDF5 data. The bellow code performs the task. Could you look at and verify help me my implementation? I am wondering about initial matrix raw_patches, label_patches
(lines 2,3), order matrix storing in lines 8,9,10,11). Thank you for reading
num_sample=1000;
raw_patches = np.zeros([num_sample, 1, 50,50,50], dtype=np.float16)
label_patches = np.zeros([num_sample, 1, 50,50,50], dtype=np.int8)
for i in range(num_sample):
raw_patch, label_patch = extract_patch_from_volumetric_data(in_data)
#raw_patch shape: [50x50x50], label_patch shape [50x50x50]
# Store them in a array
raw_patches[i, 0, :, :, :] = raw_patch
label_patches[i, 0, :, :, :] = label_patch
raw_patches = raw_patches[0:num_sample, :, :, :, :]
label_patches = label_patches[0:num_sample, :, :, :, :]
#Store in HDF5 and txt path
with h5py.File('./trainMS_%s.h5' % index_file, 'w') as f:
f['data'] = raw_patches
f['label'] = label_patches
with open('./trainMS_list.txt', 'a') as f:
f.write('./trainMS_%s.h5\n' % index_file)
This is my prototxt
layer {
name: "data"
type: "HDF5Data"
top: "data"
top: "label"
include {
phase: TRAIN
}
hdf5_data_param {
source: "./trainMS_list.txt"
batch_size: 1
}
}
layer {
name: "conv1a"
type: "Convolution"
bottom: "data"
top: "conv1a"
convolution_param {
num_output: 32
pad: 1
kernel_size: 3
stride: 1
weight_filler {
type: "msra"
}
bias_filler {
type: "constant"
}
}
}
Update: Script to test convolution
filename='./trainMS_0.h5'
with h5py.File(filename) as hf:
data=hf.get('data')
npdata=np.array(data)
print npdata.shape # 100x1x50x50x50
label = hf.get('label')
nplabel = np.array(label)
print nplabel.shape # 100x50x50x50
caffe.set_mode_cpu()
net = caffe.Net('conv.prototxt', caffe.TEST)
im =npdata[20,0,:,:,:] # input is 50x50x50
im_input = im[np.newaxis, np.newaxis, :, :]
net.blobs['data'].reshape(*im_input.shape)
net.blobs['data'].data[...] = im_input
# pick first filter output
conv0 = net.blobs['conv'].data[0, 0]
print("pre-surgery output mean {:.2f}".format(conv0.mean())) #output is 0
# set first filter bias to 1
net.params['conv'][1].data[0] = 1.
net.forward()
conv_out=net.blobs['conv'].data[0, 0];
print conv_out.shape # 50x50x50
来源:https://stackoverflow.com/questions/42310081/how-to-store-volumetric-patch-into-hdf5