"This notebook corresponds to version {{ version }} of the pipeline tool: https://github.com/NSLS-II/pipelines"
This notebook begins with a raw time-series of images and ends with $g_2(t)$ for a range of $q$, fit to an exponential or stretched exponential, and a two-time correlation functoin.
CHX Olog (https://logbook.nsls2.bnl.gov/11-ID/)
Import packages for I/O, visualization, and analysis.
In [1]:
from chxanalys.chx_libs import (np, roi, time, datetime, os, get_events,
getpass, db, get_images,LogNorm, plt,tqdm, utils, Model)
from chxanalys.chx_generic_functions import (get_detector, get_fields, get_sid_filenames,
load_data, load_mask,get_fields, reverse_updown, ring_edges,get_avg_img,check_shutter_open,
apply_mask, show_img,check_ROI_intensity,run_time, plot1D, get_each_frame_intensity,
create_hot_pixel_mask,show_ROI_on_image,create_time_slice,save_lists,
save_arrays, psave_obj,pload_obj, get_non_uniform_edges )
from chxanalys.XPCS_SAXS import (get_circular_average,save_lists,get_ring_mask, get_each_ring_mean_intensity,
plot_qIq_with_ROI,save_saxs_g2,plot_saxs_g2,fit_saxs_g2,cal_g2,
create_hot_pixel_mask,get_circular_average,get_t_iq,save_saxs_g2,
plot_saxs_g2,fit_saxs_g2,fit_q2_rate,plot_saxs_two_g2,fit_q_rate,
circular_average,plot_saxs_g4, get_t_iqc,multi_uids_saxs_xpcs_analysis)
from chxanalys.Two_Time_Correlation_Function import (show_C12, get_one_time_from_two_time,
get_four_time_from_two_time,rotate_g12q_to_rectangle)
from chxanalys.chx_compress_analysis import ( compress_eigerdata, read_compressed_eigerdata,
Multifile,get_avg_imgc, get_each_frame_intensityc,
get_each_ring_mean_intensityc, mean_intensityc,cal_waterfallc,plot_waterfallc)
from chxanalys.SAXS import fit_form_factor
from chxanalys.chx_correlationc import ( cal_g2c,Get_Pixel_Arrayc,auto_two_Arrayc,get_pixelist_interp_iq,)
from chxanalys.chx_correlationp import (cal_g2p, auto_two_Arrayp)
from chxanalys.Create_Report import (create_pdf_report,
create_multi_pdf_reports_for_uids,create_one_pdf_reports_for_uids)
from chxanalys.XPCS_GiSAXS import (get_qedge,get_qmap_label,get_qr_tick_label, get_reflected_angles,
convert_gisaxs_pixel_to_q, show_qzr_map, get_1d_qr, get_qzrmap, show_qzr_roi,get_each_box_mean_intensity,
save_gisaxs_g2,plot_gisaxs_g2, fit_gisaxs_g2,plot_gisaxs_two_g2,plot_qr_1d_with_ROI,fit_qr_qz_rate,
multi_uids_gisaxs_xpcs_analysis,plot_gisaxs_g4)
#%matplotlib notebook
%matplotlib inline
In [2]:
plt.rcParams.update({'figure.max_open_warning': 0})
In [3]:
#%%javascript
#var nb = IPython.notebook;
#var kernel = IPython.notebook.kernel;
#var command = "NOTEBOOK_FULL_PATH = '" + nb.base_url + nb.notebook_path + "'";
#kernel.execute(command);
In [4]:
#print("NOTEBOOK_FULL_PATH:\n", NOTEBOOK_FULL_PATH)
In [ ]:
#Test
In [5]:
CYCLE = '2016_3'
username = getpass.getuser()
#username = "commissioning"
username = "zhangz"
date_path = datetime.now().strftime('%Y/%m/%d') # e.g., '2016/03/01'
data_dir = os.path.join('/XF11ID/analysis/', CYCLE, username, 'Results/')
##Or define data_dir here, e.g.,#data_dir = '/XF11ID/analysis/2016_2/rheadric/test/'
os.makedirs(data_dir, exist_ok=True)
print('Results from this analysis will be stashed in the directory %s' % data_dir)
In [6]:
# 50ms dose
uid_= 'PEO430k_AuNP20nm_25nM_Capillary_RT'
start_time, stop_time = '2016-11-10 21:16:59', '2016-11-10 21:20:08' #4ms --> 0.4s , 110 frames
uid_= 'PEO430k_AuNP20nm_25nM_Capillary_RT'
start_time, stop_time = '2016-11-10 21:20:09', '2016-11-10 21:23:15' #40ms --> 4s , 110 frames
uid_= 'PEO430k_AuNP20nm_25nM_Capillary_RT'
start_time, stop_time = '2016-11-10 21:23:16', '2016-11-10 21:33:52' #0.5s --> 50s , 110 frames
uid_= 'dose_test'
start_time, stop_time = '2016-11-10 21:53:19', '2016-11-10 21:54:37'
# 12.5ms dose
uid_= 'PEO430k_AuNP20nm_25nM_Capillary_RT'
start_time, stop_time = '2016-11-10 22:05:03', '2016-11-10 22:07:37' #4ms --> 0.4s , 110 frames
start_time, stop_time = '2016-11-10 22:07:38', '2016-11-10 22:10:18' #40ms --> 4s , 110 frames
start_time, stop_time = '2016-11-10 22:10:19', '2016-11-10 22:17:59' #0.5s --> 50s , 110 frames
uid_= 'PEO430k_AuNP20nm_25nM_Capillary_70C'
start_time, stop_time = '2016-11-11 00:08:08', '2016-11-11 00:10:16 ' #4ms --> 0.4s , 110 frames
start_time, stop_time = '2016-11-11 00:10:16', '2016-11-11 00:12:31 ' #40ms --> 4s , 110 frames
start_time, stop_time = '2016-11-11 00:12:32', '2016-11-11 00:22:50 ' #0.5s --> 50s , 110 frames
uid_= 'PEO430k_AuNP20nm_25nM_Capillary_85C'
start_time, stop_time = '2016-11-11 01:02:02', '2016-11-11 01:04:21 ' #4ms --> 0.4s , 110 frames
start_time, stop_time = '2016-11-11 01:04:22', '2016-11-11 01:06:37 ' #40ms --> 4s , 110 frames
start_time, stop_time = '2016-11-11 01:06:37', '2016-11-11 01:17:02 ' #0.5s --> 50s , 110 frames
bad_list = []
uid_= 'PEO430k_AuNP20nm_25nM_Capillary_120C'
start_time, stop_time = '2016-11-11 02:02:36', '2016-11-11 02:04:48 ' #4ms --> 0.4s , 110 frames
start_time, stop_time = '2016-11-11 02:04:49', '2016-11-11 02:07:02 ' #40ms --> 4s , 110 frames
start_time, stop_time = '2016-11-11 02:07:03', '2016-11-11 02:17:24 ' #0.5s --> 50s , 110 frames
bad_list = []
hdrs = db(start_time= start_time, stop_time = stop_time)
hdrs = [x for i,x in enumerate(hdrs) if x['start']['scan_id'] not in bad_list]
print ('Totally %s uids are found'%(len(hdrs)))
sids=[]
uids=[]
for hdr in hdrs:
s= get_sid_filenames( hdr)
print (s)
sids.append( s[0] )
uids.append( s[1] )
sids=sids[::-1]
uids=uids[::-1]
uid = uids[0]
In [7]:
data_dir = os.path.join('/XF11ID/analysis/', CYCLE, username, 'Results/%s/'%uid_)
os.makedirs(data_dir, exist_ok=True)
print('Results from this analysis will be stashed in the directory %s' % data_dir)
In [8]:
data_dir_ = os.path.join( data_dir, 'Average/')
os.makedirs(data_dir_, exist_ok=True)
print ( data_dir_)
In [9]:
detector = get_detector( db[uid ] )
print ('Detector is: %s'%detector )
sud = get_sid_filenames(db[uid])
print ('scan_id, full-uid, data path are: %s--%s--%s'%(sud[0], sud[1], sud[2][0] ))
In [10]:
imgs = load_data( uid, detector , reverse= True )
Nimg = len(imgs)
md = imgs.md
In [11]:
try:
md['Measurement']= db[uid]['start']['Measurement']
md['sample']=db[uid]['start']['sample']
m#d['sample']= 'PS205000-PMMA-207000-SMMA3'
print( md['sample'])
except:
md['Measurement']= 'Measurement'
md['sample']='sample'
In [12]:
imgs
Out[12]:
In [13]:
md
Out[13]:
In [14]:
# The physical size of the pixels
dpix = md['x_pixel_size'] * 1000. #in mm, eiger 4m is 0.075 mm
lambda_ =md['incident_wavelength'] # wavelegth of the X-rays in Angstroms
Ldet = 4.84 * 1000 # detector to sample distance (mm), currently, *1000 for saxs, *1 for gisaxs
exposuretime= md['count_time']
acquisition_period = md['frame_time']
print( 'The sample is %s'%( md['sample'] ))
print( 'Exposuretime=%s sec, Acquisition_period=%s sec'%( exposuretime, acquisition_period ))
timeperframe = acquisition_period#for g2
#timeperframe = exposuretime#for visiblitly
#timeperframe = 2 ## manual overwrite!!!! we apparently writing the wrong metadata....
center = [ md['beam_center_x'], md['beam_center_y'] ] #for 4M
center=[center[1], center[0]]
print ('Beam center=', center)
In [15]:
setup_pargs=dict(uid=uid_, dpix= dpix, Ldet=Ldet, lambda_= lambda_,
timeperframe=timeperframe, center=center, path= data_dir_, md=md)
In [16]:
setup_pargs
Out[16]:
In [17]:
mask_path = '/XF11ID/analysis/2016_3/masks/'
mask_name = 'Nov10_4M_mask.npy'
In [18]:
mask = load_mask(mask_path, mask_name, plot_ = True, reverse=True, image_name = 'uid=%s-mask'%uid )
In [19]:
md['mask'] = mask
md['mask_file']= mask_path + mask_name
md['NOTEBOOK_FULL_PATH'] = None #NOTEBOOK_FULL_PATH
#psave_obj( md, data_dir + 'uid=%s-md'%uid ) #save the setup parameters
#md = pload_obj(data_dir + 'uid=%s-md'%uid )
In [20]:
imgsa = apply_mask( imgs, mask )
In [21]:
avg_img = get_avg_img( imgsa, sampling = int(Nimg/3), plot_ = True, uid =uid)
In [22]:
#avg_img = get_avg_imgc( FD, beg=0,end=10000,sampling = 1, plot_ = False )
show_img( avg_img, vmin=.1, vmax=3.0, logs=False, image_name= 'uid=%s--img-avg-'%uid_,
save=True, path=data_dir_)
md['avg_img'] = avg_img
In [23]:
hmask = create_hot_pixel_mask( avg_img, 2**15 )
mask = mask * hmask
hmask = create_hot_pixel_mask( avg_img, 1e8)
qp, iq, q = get_circular_average( avg_img, mask * hmask, pargs=setup_pargs, nx=None,
plot_ = True, show_pixel= False, xlim=[0.0001,.14], ylim = [0.00009, 1e0], save=True)
In [24]:
uniform = True
In [25]:
if not uniform:
#width = 4 # in pixel
width = 0.001
number_rings=1
#centers = [ 31, 50, 67, 84, 102, 119] #in pixel
centers = [ 0.00235,0.00379,0.00508,0.00636,0.00773, 0.00902] #in A-1
centers = [ 0.0065,0.0117,0.021,0.0336,0.044, 0.057] #in A-1
edges = get_non_uniform_edges( centers, width, number_rings )
inner_radius= None
outer_radius = None
width = None
num_rings = None
In [26]:
if uniform:
inner_radius= 0.0045
outer_radius = 0.062
width = 0.0035
num_rings = 10
edges = None
In [27]:
ring_mask, q_ring_center, q_ring_val = get_ring_mask( mask, inner_radius=inner_radius, unit='A',
outer_radius = outer_radius , width = width, num_rings = num_rings, edges=edges, pargs=setup_pargs )
qind, pixelist = roi.extract_label_indices( ring_mask )
In [28]:
md['center'] = center
md['ring_mask'] = ring_mask
md['q_ring_center']= q_ring_center
md['q_ring_val'] = q_ring_val
md['beam_center_x'] = center[1]
md['beam_center_y']= center[0]
md['data_dir'] = data_dir
psave_obj( md, data_dir_ + 'uid=%s-md'%uid_ ) #save the setup parameters
In [29]:
#pload_obj( data_dir + 'uid=%s-md'%uid_ )
In [30]:
show_ROI_on_image( avg_img, ring_mask, center, label_on = False, rwidth=800, alpha=.9,
vmax=30, save=True, path=data_dir_, uid=uid_)
In [31]:
plot_qIq_with_ROI( q, iq, q_ring_center, logs=True, uid=uid, xlim=[0.001,.12],
ylim = [1e-4, 1e3], save=True, path=data_dir)
In [33]:
run_num = 1
sub_num = len(uids) //run_num
print( run_num, sub_num)
In [34]:
data_dir
Out[34]:
In [35]:
g2s, taus, useful_uids = multi_uids_saxs_xpcs_analysis( uids, md, run_num, sub_num,
fit= True, force_compress= False, compress= True )
In [36]:
useful_uids
Out[36]:
In [37]:
#%run /XF11ID/analysis/Analysis_Pipelines/Develop/chxanalys/chxanalys/Create_Report.py
In [38]:
create_multi_pdf_reports_for_uids( useful_uids, g2s, data_dir, append_name='_1' )
In [39]:
os.environ['HTTPS_PROXY'] = 'https://proxy:8888'
os.environ['no_proxy'] = 'cs.nsls2.local,localhost,127.0.0.1'
In [40]:
from chxanalys.chx_olog import LogEntry,Attachment, update_olog_uid, update_olog_id
In [41]:
for key in list( useful_uids.keys()):
for k in list( useful_uids[key]):
uid_k = useful_uids[key][k]
filename = data_dir + 'XPCS_Analysis_Report_for_uid=%s_1.pdf'%uid_k
atch=[ Attachment(open(filename, 'rb')) ]
update_olog_uid( uid=uid_k, text='Add XPCS Analysis PDF Report', attachments= atch )
In [42]:
create_one_pdf_reports_for_uids( useful_uids, g2s, data_dir, filename=uid_ )
In [43]:
list( g2s[1].keys() )
Out[43]:
In [44]:
bad_uids =[ ]
In [45]:
g2s_average ={} # g2s[run] = g2 of each time series average
for key in list( g2s.keys()):
g2s_average[key] = np.zeros_like( g2s[key][1])
i=1
for sub_key in list( g2s[key].keys() ):
#print (g2s_average[key].shape, g2s[key][sub_key].shape )
try:
if sub_key not in bad_uids:
g2s_average[key] += g2s[key][sub_key]
i +=1
print( 'Run= %s--sub_run= %s-- pass'%(key, sub_key) )
#print( 'sub= %s,Doing average here'%sub_key)
else:
print( 'sub= %s,Can not Doing average here'%sub_key)
except:
print( 'sub= %s,Can not Doing average here'%sub_key)
pass
g2s_average[key] /= i-1
In [46]:
for key in list( g2s.keys()):
res_pargs = dict(taus=taus, q_ring_center=q_ring_center,
path=data_dir_, uid= uid_ + '@run--%s'%key )
save_saxs_g2( g2s_average[key], res_pargs )
In [47]:
if False:
for key in list( g2s.keys()):
for sub_key in list( g2s[key].keys()):
res_pargs = dict(taus=taus, q_ring_center=q_ring_center,
path=data_dir_, uid= uid_ + '@run--%s--subrun--%s'%(key, sub_key) )
plot_saxs_g2( g2s[key][sub_key], taus, vlim=[0.95, 1.05], res_pargs=res_pargs)
In [48]:
if True:
for key in list( g2s.keys()):
res_pargs = dict(taus=taus, q_ring_center=q_ring_center,
path=data_dir_, uid= uid_ + '@run--%s'%key )
plot_saxs_g2( g2s_average[key], taus, vlim=[0.95, 1.05], res_pargs=res_pargs)
In [49]:
fit = True
In [50]:
if fit:
for key in list( g2s.keys()):
res_pargs = dict(taus=taus, q_ring_center=q_ring_center,
path=data_dir_, uid= uid_ + '@run--%s'%key )
fit_result = fit_saxs_g2( g2s_average[key], res_pargs, function = 'stretched', vlim=[0.95, 1.05],
fit_variables={'baseline':True, 'beta':True, 'alpha':False,'relaxation_rate':True},
guess_values={'baseline':1.45,'beta':0.08,'alpha':1.0,'relaxation_rate':0.01})
#psave_obj( fit_result, data_dir + uid_ + '@run--%s'%key )
fit_q_rate( q_ring_center, fit_result['rate'], power_variable= False,
uid=uid_ + '@run--%s'%key, path= data_dir_ )
save_lists( [q_ring_center**2,fit_result['rate']], ['q2','rate'],
filename= 'Q2-rate-uid=%s'%(uid_ + '@run--%s'%key), path= data_dir)
In [51]:
create_report = True
In [52]:
#data_dir = os.path.join('/XF11ID/analysis/', CYCLE, username, 'Results/')
In [53]:
pdf_out_dir = os.path.join('/XF11ID/analysis/', CYCLE, username, 'Results/')
filename= "XPCS_Analysis_Report_for_uid=%s-average.pdf"%uid_
print (pdf_out_dir + filename)
In [54]:
uid_
Out[54]:
In [55]:
if create_report:
c= create_pdf_report( data_dir_, uid_, pdf_out_dir,
filename= filename)
page =1
#Page one: Meta-data/Iq-Q/ROI
c.report_header(page=page)
c.report_meta( top=730)
c.report_static( top=560)
c.report_ROI( top= 300)
Nk = len( list( g2s.keys()))
Npages = Nk//2 + Nk%2
for i, key in enumerate( list( g2s.keys())):
if not i%2:
page += 1
c.new_page()
c.report_header(page=page)
if i%2:
top = 350
else:
top = 720
c.report_one_time( top= top, g2_fit_file = 'uid=%s@run--%s--g2--fit-.png'%(uid_,key ),
q_rate_file = 'uid=%s@run--%s--Q-Rate--fit-.png'%(uid_,key ))
c.save_page()
c.done()
In [56]:
last_uid_num = len( useful_uids[1] )
In [57]:
uid_last = useful_uids[1][ last_uid_num ]
print( uid_last )
In [58]:
#uid_last = useful_uids[1][-1]
In [59]:
c.filename
Out[59]:
In [60]:
#uid_ = uid_last #useful_uids[key][k]
filename = c.filename #XPCS_Analysis_Report_for_uid=%s.pdf'%uid_
atch=[ Attachment(open(filename, 'rb')) ]
update_olog_uid( uid=uid_last, text='Add XPCS Analysis PDF Report', attachments= atch )
In [ ]: