In [ ]:
%pylab inline
In [ ]:
import os
import numpy as np
import pylab as plt
import h5py
import astra
import cv2
import json
from pprint import pprint
In [ ]:
#Исходный файл с данными
data_file = './8b3c11c2-371c-48a6-85ad-972b33151b43.h5'
In [ ]:
# Загружаем пустые пучки и темновой ток
with h5py.File(data_file,'r') as h5f:
empty_images_count = len(h5f['empty'])
empty_images = None
empty_file_number = 0
for k,v in h5f['empty'].iteritems():
if empty_images is None:
empty_images = np.zeros(shape=(empty_images_count, v.shape[1], v.shape[0]), dtype='float32')
attributes = json.loads(v.attrs.items()[0][1])[0]
exposure = attributes['frame']['image_data']['exposure']
empty_images[empty_file_number] = np.flipud(v.value.astype('float32').swapaxes(0,1)) / exposure
empty_file_number = empty_file_number + 1
dark_images_count = len(h5f['dark'])
dark_images = None
dark_file_number = 0
for k,v in h5f['dark'].iteritems():
if dark_images is None:
dark_images = np.zeros(shape=(dark_images_count, v.shape[1], v.shape[0]), dtype='float32')
attributes = json.loads(v.attrs.items()[0][1])[0]
exposure = int(attributes['frame']['image_data']['exposure'])
dark_images[dark_file_number] = np.flipud(v.value.astype('float32').swapaxes(0,1)) / exposure
dark_file_number = dark_file_number + 1
In [ ]:
# считаем среднее значение
empty_image = empty_images.sum(axis=0)/len(empty_images)
dark_image = dark_images.sum(axis=0)/len(empty_images)
%xdel empty_images
%xdel dark_images
In [ ]:
plt.figure(figsize=(10,10))
plt.subplot(221)
plt.imshow(empty_image, vmin=0)
plt.colorbar(orientation='horizontal')
plt.title('Empty beam')
plt.subplot(222)
plt.imshow(dark_image, vmin=0)
plt.colorbar(orientation='horizontal')
plt.title('Dark beam')
empty_beam = empty_image-dark_image
plt.subplot(223)
plt.imshow(empty_beam, vmin=0)
plt.colorbar(orientation='horizontal')
plt.title('Empty-Dark beam')
mask = empty_beam > 0.01
plt.subplot(224)
plt.imshow(mask)
plt.colorbar(orientation='horizontal')
plt.title('Mask')
#TODO: Check thresshold automaticaly
# plt.plot([sum(empty_beam > i) for i in np.arange(100)])
In [ ]:
# try find automatic threshold for mask
x_range = np.arange(0,0.2,0.01)
plt.plot(x_range, [sum(empty_beam > x).astype('float32')/np.prod(empty_beam.shape) for x in x_range])
In [ ]:
#build mask
threshold = 100
mask_x_profile = mask.sum(axis=1)
mask_x_min = np.argwhere(mask_x_profile>threshold).min()+200 # up border
mask_x_max = np.argwhere(mask_x_profile>threshold).max()-200 # bottom border
print mask_x_min, mask_x_max
mask_y_profile = mask.sum(axis=0)
mask_y_min = np.argwhere(mask_y_profile>threshold).min()+300 # left border
mask_y_max = np.argwhere(mask_y_profile>threshold).max()-500 # right border
print mask_y_min, mask_y_max
plt.imshow(mask[mask_x_min:mask_x_max,mask_y_min:mask_y_max])
In [ ]:
mask = mask[mask_x_min:mask_x_max,mask_y_min:mask_y_max]
empty_beam = empty_beam[mask_x_min:mask_x_max,mask_y_min:mask_y_max]
dark_image = dark_image[mask_x_min:mask_x_max,mask_y_min:mask_y_max]
plt.imshow(empty_beam)
plt.colorbar(orientation='horizontal')
In [ ]:
# Загружаем кадры с даннымии
#TODO: добавить поддержку, когда много кадров на одном угле
with h5py.File(data_file,'r') as h5f:
data_images_count = len(h5f['data'])
data_images = None
data_file_number = 0
data_angles = None
for k,v in h5f['data'].iteritems():
if data_images is None:
data_images = np.zeros(shape=(data_images_count,
mask_x_max-mask_x_min,
mask_y_max-mask_y_min
),
dtype='float32')
if data_angles is None:
data_angles = np.zeros(shape=(data_images_count,), dtype='float32')
attributes = json.loads(v.attrs.items()[0][1])[0]
exposure = attributes['frame']['image_data']['exposure']
data_angles[data_file_number] = attributes['frame']['object']['angle position']
# вырезаем область под маску
data_images[data_file_number] = np.flipud(
v[mask_y_min:mask_y_max,-mask_x_max:-mask_x_min].astype('float32').swapaxes(0,1)) / exposure
data_file_number = data_file_number + 1
In [ ]:
# remove dark current from data frames and remove min/max values
data_beam = data_images - dark_image
data_beam[data_beam <= 0] = 1e-6
empty_beam[empty_beam <= 0] = 1e-6
%xdel data_images
In [ ]:
plt.figure(figsize=(10,10))
plt.imshow(cv2.medianBlur(data_beam[5],3))
plt.colorbar(orientation='horizontal')
In [ ]:
#normalize data frames and calculate sinograms
data_beam/=empty_beam
data_beam*=mask
data_beam[data_beam>1] = 1
data_beam[np.isclose(data_beam,0)]=1
data_beam[data_beam<1e-6] = 1e-6
In [ ]:
sinogram = -np.log(data_beam)
In [ ]:
# %xdel data_beam
In [ ]:
plt.figure(figsize=(10,10))
plt.imshow(sinogram[np.argsort(data_angles),500, :])
plt.axis('tight')
plt.colorbar(orientation='horizontal')
In [ ]:
# seraching detector rotation angle
position_0 = np.argwhere(np.isclose(data_angles, 19, atol=0.05))[0][0]
print position_0
position_180 = np.argwhere(np.isclose(data_angles, 199, atol=0.05))[0][0]
print position_180
def get_region(position):
return cv2.medianBlur(sinogram[position,:,:],3)
def cv_rotate(x, angle):
"""
Rotate square array using OpenCV2 around center of the array
:param x: 2d numpy array
:param angle: angle in degrees
:return: rotated array
"""
x_center = tuple(
np.array((x.shape[1], x.shape[0]), dtype='float32') / 2.0 - 0.5)
rot_mat = cv2.getRotationMatrix2D(x_center, angle, 1.0)
xro = cv2.warpAffine(
x, rot_mat, (x.shape[1], x.shape[0]), flags=cv2.INTER_LINEAR)
return xro
data_0 = get_region(position_0)
data_180 = get_region(position_180)
plt.figure(figsize=(15,15))
plt.subplot(221)
plt.imshow(data_0)
plt.axis('tight')
plt.colorbar(orientation='horizontal')
plt.subplot(222)
plt.imshow(data_180)
plt.axis('tight')
plt.colorbar(orientation='horizontal')
plt.subplot(223)
plt.imshow(np.fliplr(data_180))
plt.axis('tight')
plt.colorbar(orientation='horizontal')
plt.subplot(224)
alfa = 1.21
shift = -126
plt.imshow(np.fliplr(cv_rotate(data_180[:,:shift],alfa))-cv_rotate(data_0[:,:shift],alfa))
plt.axis('tight')
plt.colorbar(orientation='horizontal')
In [ ]:
print (data_0.shape[1]+shift)/2.
In [ ]:
import tomopy
In [ ]:
#fix axis tlit
for i in range(sinogram.shape[0]):
sinogram[i] = cv_rotate(sinogram[i,:],1.21)
In [ ]:
# rot_center = tomopy.find_center(sinogram[:,1200:1400,:], data_angles/180*np.pi, ind = 0, emission=True)
# print rot_center
rot_center = 467
rec = tomopy.recon(sinogram[:,1300:1302,:], data_angles/180*np.pi,
center=rot_center, algorithm='art', emission=True)
plt.figure(figsize=(10,10))
plt.imshow(rec[0],vmin=0)
plt.colorbar(orientation='horizontal')
plt.show()
In [ ]:
plt.figure(figsize=(15,15))
plt.imshow(rec[0],vmin=0.0, vmax=0.03)
plt.colorbar(orientation='horizontal')
plt.show()
In [ ]:
plt.imshow(data_beam[:,500,:])
In [ ]:
import astra
def astra_tomo2d_parallel(sinogram, angles):
angles = angles.astype('float64')
detector_size = sinogram.shape[1]
rec_size = detector_size
vol_geom = astra.create_vol_geom(rec_size, rec_size)
proj_geom = astra.create_proj_geom('parallel', 1.0, detector_size, angles)
sinogram_id = astra.data2d.create('-sino', proj_geom, data=sinogram)
# Create a data object for the reconstruction
rec_id = astra.data2d.create('-vol', vol_geom)
# proj_id = astra.create_projector('strip', proj_geom, vol_geom) # for CPU reconstruction only
# Set up the parameters for a reconstruction algorithm using the GPU
cfg = astra.astra_dict('SIRT_CUDA')
cfg['ReconstructionDataId'] = rec_id
cfg['ProjectionDataId'] = sinogram_id
# cfg['ProjectorId'] = proj_id # for CPU reconstruction only
cfg['option'] = {}
#cfg['option']['MinConstraint'] = 0
# cfg['option']['MaxConstraint'] = 5
# Available algorithms:
# SIRT_CUDA, SART_CUDA, EM_CUDA, FBP_CUDA (see the FBP sample)
# Create the algorithm object from the configuration structure
alg_id = astra.algorithm.create(cfg)
# Run 150 iterations of the algorithm
astra.algorithm.run(alg_id, 150)
# Get the result
rec = astra.data2d.get(rec_id)
# Clean up. Note that GPU memory is tied up in the algorithm object,
# and main RAM in the data objects.
astra.algorithm.delete(alg_id)
astra.data2d.delete(rec_id)
astra.data2d.delete(sinogram_id)
astra.clear()
return rec
def astra_tomo3d_parallel(sinogram, angles):
angles = angles.astype('float64')
detector_size = sinogram.shape[-1]
slices_number = sinogram.shape[0]
rec_size = detector_size
vol_geom = astra.create_vol_geom(rec_size, rec_size, slices_number)
proj_geom = astra.create_proj_geom('parallel3d', 1.0, 1.0, slices_number, detector_size, angles)
# print proj_geom
# print sinogram.shape
sinogram_id = astra.data3d.create('-sino', proj_geom, data=sinogram)
# Create a data object for the reconstruction
rec_id = astra.data3d.create('-vol', vol_geom)
# proj_id = astra.create_projector('strip', proj_geom, vol_geom) # for CPU reconstruction only
# Set up the parameters for a reconstruction algorithm using the GPU
cfg = astra.astra_dict('SIRT3D_CUDA')
cfg['ReconstructionDataId'] = rec_id
cfg['ProjectionDataId'] = sinogram_id
# cfg['ProjectorId'] = proj_id # for CPU reconstruction only
cfg['option'] = {}
#cfg['option']['MinConstraint'] = 0
# cfg['option']['MaxConstraint'] = 5
# Available algorithms:
# SIRT_CUDA, SART_CUDA, EM_CUDA, FBP_CUDA (see the FBP sample)
# Create the algorithm object from the configuration structure
alg_id = astra.algorithm.create(cfg)
# Run 150 iterations of the algorithm
astra.algorithm.run(alg_id, 150)
# Get the result
rec = astra.data3d.get(rec_id)
# Clean up. Note that GPU memory is tied up in the algorithm object,
# and main RAM in the data objects.
astra.algorithm.delete(alg_id)
astra.data3d.delete(rec_id)
astra.data3d.delete(sinogram_id)
astra.clear()
return rec
In [ ]:
s1 = np.require(sinogram[:,1300:1400,:shift].swapaxes(0,1),dtype=np.float32, requirements=['C'])
In [ ]:
s1.flags
In [ ]:
%time rec_slice = astra_tomo3d_parallel(s1, data_angles*np.pi/180)
In [ ]:
from time import time
In [ ]:
res = None
t0 = time()
total_slices = sinogram.shape[1]
for i in range(total_slices):
if i%10==0:
print i, (time()-t0), (time()-t0)/(i+1), (time()-t0)/(i+1)*total_slices
rec_slice = astra_tomo2d_parallel(sinogram[:,i,:shift], data_angles*np.pi/180)
if res is None:
res = np.zeros(shape=(total_slices, rec_slice.shape[0], rec_slice.shape[1]),
dtype='float32')
res[i] = rec_slice
In [ ]:
data_file_parts = os.path.splitext(data_file)
out_file = '{}_reconstruction.h5'.format(''.join(data_file_parts[:-1]))
print out_file
with h5py.File(out_file,'w') as h5f:
h5f.create_dataset('Result', data=res[::2,::2,::2], dtype=np.float32)
In [ ]:
def save_amira(result_file):
"""
Функция сохраняет реконструированные слои в формате Amira raw file
Inputs:
data_path - путь к директории, где находиться файл res_tomo.hdf5 в формате HDF5
в этом файде должен быть раздел (node) /Results в котором в виде 3D массива
записаны реконструированный объём
Outputs:
Файлы amira.raw и tomo.hx. Если файлы уже существуют, то они перезаписываются.
Тип данных: float32 little endian
"""
data_path = os.path.dirname(result_file)
with open(os.path.join(data_path, 'amira.raw'), 'wb') as amira_file:
with h5py.File(result_file, 'r') as h5f:
x = h5f['Result']
for i in range(x.shape[0]):
numpy.array(x[i, :, :]).tofile(amira_file)
file_shape = x.shape
with open(os.path.join(data_path, 'tomo.hx'), 'w') as af:
af.write('# Amira Script\n')
af.write('remove -all\n')
af.write(r'[ load -raw ${SCRIPTDIR}/amira.raw little xfastest float 1 '+
str(file_shape[1])+' '+str(file_shape[2])+' '+str(file_shape[0])+
' 0 '+str(file_shape[1]-1)+' 0 '+str(file_shape[2]-1)+' 0 '+str(file_shape[0]-1)+
' ] setLabel tomo.raw\n')
In [ ]:
save_amira(out_file)
In [ ]:
!ls -la
In [ ]: