"This notebook corresponds to version {{ version }} of the pipeline tool: https://github.com/NSLS-II/pipelines"
This notebook begins with a raw time-series of images and ends with $g_2(t)$ for a range of $q$, fit to an exponential or stretched exponential, and a two-time correlation functoin.
CHX Olog (https://logbook.nsls2.bnl.gov/11-ID/)
Import packages for I/O, visualization, and analysis.
In [47]:
from chxanalys.chx_libs import (np, roi, time, datetime, os, get_events,
getpass, db, get_images,LogNorm, plt,tqdm, utils, Model)
from chxanalys.chx_generic_functions import (get_detector, get_fields, get_sid_filenames,
load_data, load_mask,get_fields, reverse_updown, ring_edges,get_avg_img,check_shutter_open,
apply_mask, show_img,check_ROI_intensity,run_time, plot1D, get_each_frame_intensity,
create_hot_pixel_mask,show_ROI_on_image,create_time_slice,save_lists,
save_arrays, psave_obj,pload_obj, get_non_uniform_edges )
from chxanalys.XPCS_SAXS import (get_circular_average,save_lists,get_ring_mask, get_each_ring_mean_intensity,
plot_qIq_with_ROI,save_saxs_g2,plot_saxs_g2,fit_saxs_g2,cal_g2,
create_hot_pixel_mask,get_circular_average,get_t_iq,save_saxs_g2,
plot_saxs_g2,fit_saxs_g2,fit_q2_rate,plot_saxs_two_g2,fit_q_rate,
circular_average,plot_saxs_g4, get_t_iqc,multi_uids_saxs_xpcs_analysis)
from chxanalys.Two_Time_Correlation_Function import (show_C12, get_one_time_from_two_time,
get_four_time_from_two_time,rotate_g12q_to_rectangle)
from chxanalys.chx_compress_analysis import ( compress_eigerdata, read_compressed_eigerdata,
Multifile,get_avg_imgc, get_each_frame_intensityc,
get_each_ring_mean_intensityc, mean_intensityc,cal_waterfallc,plot_waterfallc)
from chxanalys.SAXS import fit_form_factor
from chxanalys.chx_correlationc import ( cal_g2c,Get_Pixel_Arrayc,auto_two_Arrayc,get_pixelist_interp_iq,)
from chxanalys.chx_correlationp import (cal_g2p, auto_two_Arrayp)
from chxanalys.Create_Report import (create_pdf_report,
create_multi_pdf_reports_for_uids,create_one_pdf_reports_for_uids)
from chxanalys.XPCS_GiSAXS import (get_qedge,get_qmap_label,get_qr_tick_label, get_reflected_angles,
convert_gisaxs_pixel_to_q, show_qzr_map, get_1d_qr, get_qzrmap, show_qzr_roi,get_each_box_mean_intensity,
save_gisaxs_g2,plot_gisaxs_g2, fit_gisaxs_g2,plot_gisaxs_two_g2,plot_qr_1d_with_ROI,fit_qr_qz_rate,
multi_uids_gisaxs_xpcs_analysis,plot_gisaxs_g4)
%matplotlib inline
In [48]:
plt.rcParams.update({'figure.max_open_warning': 0})
In [49]:
#%reset
In [50]:
#%%javascript
#var nb = IPython.notebook;
#var kernel = IPython.notebook.kernel;
#var command = "NOTEBOOK_FULL_PATH = '" + nb.base_url + nb.notebook_path + "'";
#kernel.execute(command);
In [51]:
#print("NOTEBOOK_FULL_PATH:\n", NOTEBOOK_FULL_PATH)
In [ ]:
In [52]:
CYCLE = '2016_3'
username = 'zhangz'
username = getpass.getuser()
date_path = datetime.now().strftime('%Y/%m/%d') # e.g., '2016/03/01'
data_dir = os.path.join('/XF11ID/analysis/', CYCLE, username, 'Results/')
os.makedirs(data_dir, exist_ok=True)
print('Results from this analysis will be stashed in the directory %s' % data_dir)
In [53]:
#PEO39kLiTFSI0p085_C3Lines_Paralell_40C
incidence_angle = 0.24
# Dose too low
# uid_= 'PEO39kLiTFSI0p085_C3Lines_Paralell_40C'
# uid_= uid_ + 'Run1'
# start_time, stop_time = '2016-11-13 18:31:11', '2016-11-13 18:36:27'
# uid_= uid_ + 'Run2'
# start_time, stop_time = '2016-11-13 18:36:28', '2016-11-13 18:41:34'
# uid_= uid_ + 'Run3'
# start_time, stop_time = '2016-11-13 18:41:35', '2016-11-13 18:54:10'
# Dose too low
# uid_= 'PEO39kLiTFSI0p085_C3Lines_Orthogonal_40C'
# uid_= uid_ + 'Run1'
# start_time, stop_time = '2016-11-13 19:49:51', '2016-11-13 19:55:03'
# uid_= uid_ + 'Run2'
# start_time, stop_time = '2016-11-13 19:55:04', '2016-11-13 20:00:19'
# uid_= uid_ + 'Run3'
# start_time, stop_time = '2016-11-13 20:00:19', '2016-11-13 20:13:48'
uid_= 'PEO39kLiTFSI0p085_C3Lines_Orthogonal_40C'
uid_= uid_ + 'Run1'
start_time, stop_time = '2016-11-13 21:02:25', '2016-11-13 21:09:49'
uid_= uid_ + 'Run2'
start_time, stop_time = '2016-11-13 21:41:31', '2016-11-13 21:53:23'
hdrs = db(start_time= start_time, stop_time = stop_time)
print ('Totally %s uids are found'%(len(hdrs)))
sids=[]
uids=[]
for hdr in hdrs:
s= get_sid_filenames( hdr)
print (s)
sids.append( s[0] )
uids.append( s[1] )
sids=sids[::-1]
uids=uids[::-1]
uid = uids[0]
In [54]:
data_dir = os.path.join('/XF11ID/analysis/', CYCLE, username, 'Results/%s/'%uid_)
os.makedirs(data_dir, exist_ok=True)
print('Results from this analysis will be stashed in the directory %s' % data_dir)
In [55]:
data_dir_ = os.path.join( data_dir, 'Average/')
os.makedirs(data_dir_, exist_ok=True)
print( data_dir_)
In [56]:
detector = get_detector( db[uid ] )
print ('Detector is: %s'%detector )
sud = get_sid_filenames(db[uid])
print ('scan_id, full-uid, data path are: %s--%s--%s'%(sud[0], sud[1], sud[2][0] ))
In [57]:
imgs = load_data( uid, detector )
Nimg = len(imgs)
md = imgs.md
In [58]:
try:
md['Measurement']= db[uid]['start']['Measurement']
md['sample']=db[uid]['start']['sample']
#md['sample']= 'PS205000-PMMA-207000-SMMA3'
except:
md['Measurement']= 'Measurement'
md['sample']='sample'
print( md['sample'])
In [59]:
imgs
Out[59]:
In [60]:
md
Out[60]:
In [61]:
# The physical size of the pixels
dpix = md['x_pixel_size'] * 1000. #in mm, eiger 4m is 0.075 mm
lambda_ =md['incident_wavelength'] # wavelegth of the X-rays in Angstroms
Ldet = 4.84 # detector to sample distance (mm), currently, *1000 for saxs, *1 for gisaxs
exposuretime= md['count_time']
acquisition_period = md['frame_time']
print( 'The sample is %s'%( md['sample'] ))
print( 'Exposuretime=%s sec, Acquisition_period=%s sec'%( exposuretime, acquisition_period ))
timeperframe = acquisition_period#for g2
#timeperframe = exposuretime#for visiblitly
#timeperframe = 2 ## manual overwrite!!!! we apparently writing the wrong metadata....
In [62]:
setup_pargs=dict(uid=uid_, dpix= dpix, Ldet=Ldet, lambda_= lambda_,
timeperframe=timeperframe, path= data_dir_, md=md)
In [63]:
mask_path = '/XF11ID/analysis/2016_3/masks/'
#mask_name = 'July30_mask.npy' #>= 160 C use this one
mask_name = 'Nov12_4Mpixel_mask.npy'
In [64]:
mask = load_mask(mask_path, mask_name, plot_ = True, image_name = 'uid= %s-mask'%uid )
In [65]:
md['mask'] = mask
md['mask_file']= mask_path + mask_name
md['NOTEBOOK_FULL_PATH'] = None#NOTEBOOK_FULL_PATH
In [66]:
maskr = mask[::-1,:]
imgsr = reverse_updown( imgs )
imgsra = apply_mask( imgsr, maskr )
In [67]:
avg_imgr = get_avg_img( imgsra, sampling = int(Nimg/3), plot_ = True, uid =uid)
In [68]:
show_img( avg_imgr, vmin=.1, vmax=3.0, logs=True, image_name= 'uid=%s--img-avg-'%uid_,
save=True, path=data_dir_)
md['avg_img'] = avg_imgr
In [69]:
inc_x0 = 1476
inc_y0 = 418
refl_x0 = 1476
if incidence_angle == 0.24:
refl_y0 = 965
In [70]:
alphaf,thetaf, alphai, phi = get_reflected_angles( inc_x0, inc_y0,refl_x0 , refl_y0, Lsd=Ldet )
qx, qy, qr, qz = convert_gisaxs_pixel_to_q( inc_x0, inc_y0,refl_x0,refl_y0, lamda=lambda_, Lsd=Ldet )
In [71]:
ticks = show_qzr_map( qr,qz, inc_x0, data = avg_imgr, Nzline=10, Nrline=10 )
In [72]:
qz_start = 0.035
qz_end = 0.041
qz_num= 2
qz_width = (qz_end - qz_start)/(qz_num +1)
qr_start = 0.002
qr_end = 0.025
qr_num = 6
qr_width = ( qr_end- qr_start)/(qr_num+1)
Qr = [qr_start , qr_end, qr_width, qr_num]
Qz= [qz_start, qz_end, qz_width , qz_num ]
In [73]:
qr_edge, qr_center = get_qedge(qr_start, qr_end, qr_width, qr_num )
qz_edge, qz_center = get_qedge(qz_start, qz_end, qz_width, qz_num )
label_array_qz = get_qmap_label(qz, qz_edge)
label_array_qr = get_qmap_label(qr, qr_edge)
label_array_qzr, qzc, qrc = get_qzrmap(label_array_qz, label_array_qr,
qz_center, qr_center)
labels_qzr, indices_qzr = roi.extract_label_indices(label_array_qzr)
labels_qz, indices_qz = roi.extract_label_indices(label_array_qz)
labels_qr, indices_qr = roi.extract_label_indices(label_array_qr)
num_qz = len(np.unique(labels_qz))
num_qr = len(np.unique(labels_qr))
num_qzr = len(np.unique(labels_qzr))
boxes = label_array_qzr
box_maskr = boxes*maskr
qind, pixelist = roi.extract_label_indices(box_maskr)
noqs = len(np.unique(qind))
In [74]:
show_qzr_roi( avg_imgr, box_maskr, inc_x0, ticks, alpha=0.5, save=True, path=data_dir_, uid=uid_ )
In [75]:
qr_1d = get_1d_qr( avg_imgr, Qr, Qz, qr, qz, inc_x0, None, True, ticks, .8,
save= True, setup_pargs=setup_pargs )
In [76]:
plot_qr_1d_with_ROI( qr_1d, qr_center, loglog=False, save=True, setup_pargs=setup_pargs )
In [77]:
md['ring_mask'] = box_maskr
md['qr_center']= qr_center
md['qr_edge'] = qr_edge
md['qz_center']= qz_center
md['qz_edge'] = qz_edge
md['beam_center_x'] = inc_x0
md['beam_center_y']= inc_y0
md['refl_center_x'] = refl_x0
md['refl_center_y']= refl_y0
md['incident angle'] = alphai*180/np.pi
md['data_dir'] = data_dir
#psave_obj( md, data_dir + 'uid=%s-md'%uid ) #save the setup parameters
psave_obj( md, data_dir_ + 'uid=%s-md'%uid_ ) #save the setup parameters
In [78]:
print (len( np.where(avg_imgr)[0] ) / ( imgsra[0].size))
compress = len( np.where(avg_imgr)[0] ) / ( imgsra[0].size) < .1 #if the photon ocupation < 0.1, do compress
print (compress)
In [79]:
run_num =1
sub_num = len(uids) //run_num
print( run_num, sub_num)
In [80]:
g2s, taus, useful_uids = multi_uids_gisaxs_xpcs_analysis(
uids, md, run_num, sub_num, fit= True, compress=True )
In [81]:
useful_uids
Out[81]:
In [82]:
#create_multi_pdf_reports_for_uids( useful_uids, g2s, data_dir )
create_multi_pdf_reports_for_uids( useful_uids, g2s, data_dir, append_name='_1' )
In [83]:
os.environ['HTTPS_PROXY'] = 'https://proxy:8888'
os.environ['no_proxy'] = 'cs.nsls2.local,localhost,127.0.0.1'
In [84]:
from chxanalys.chx_olog import LogEntry,Attachment, update_olog_uid, update_olog_id
In [85]:
for key in list( useful_uids.keys()):
for k in list( useful_uids[key]):
uid_k = useful_uids[key][k]
filename = data_dir + 'XPCS_Analysis_Report_for_uid=%s_1.pdf'%uid_k
atch=[ Attachment(open(filename, 'rb')) ]
update_olog_uid( uid=uid_k, text='Add XPCS Analysis PDF Report', attachments= atch )
In [86]:
create_one_pdf_reports_for_uids( useful_uids, g2s, data_dir, filename=uid_, report_type='gisaxs' )
In [87]:
g2s_average ={} # g2s[run] = g2 of each time series average
for key in list( g2s.keys()):
g2s_average[key] = np.zeros_like( g2s[key][1])
i=1
for sub_key in list( g2s[key].keys() ):
#print (g2s_average[key].shape, g2s[key][sub_key].shape )
try:
g2s_average[key] += g2s[key][sub_key]
i +=1
print( 'Run= %s--sub_run= %s-- pass'%(key, sub_key) )
#print( 'sub= %s,Doing average here'%sub_key)
except:
print( 'sub= %s,Can not Doing average here'%sub_key)
pass
g2s_average[key] /= i-1
In [88]:
for key in list( g2s.keys()):
res_pargs = dict(taus=taus, qz_center=qz_center, qr_center=qr_center,
path=data_dir, uid= 'Average@run-%s'%( key ))
save_gisaxs_g2( g2s_average[key], res_pargs )
In [89]:
if True:
for key in list( g2s.keys()):
res_pargs = dict(taus=taus, qz_center=qz_center, qr_center=qr_center,
path=data_dir, uid= 'Average@run-%s'%( key ))
plot_gisaxs_g2( g2s_average[key], taus, vlim=[0.95, 1.15], res_pargs=res_pargs, one_plot=True)
In [ ]:
fit = False
In [ ]:
if fit:
for key in list( g2s.keys()):
res_pargs = dict(taus=taus, qz_center=qz_center, qr_center=qr_center,
path=data_dir_, uid= uid_ + '@run--%s'%key )
fit_result = fit_gisaxs_g2( g2s_average[key], res_pargs, function = 'stretched', vlim=[0.95, 1.15],
fit_variables={'baseline':True, 'beta':True, 'alpha':False,'relaxation_rate':True},
guess_values={'baseline':1.45,'beta':0.34,'alpha':1.0,'relaxation_rate':0.1},
one_plot= True)
fit_qr_qz_rate( qr_center, qz_center, fit_result, power_variable= False,
uid=uid_ + '@run--%s'%key, path= data_dir_ )
psave_obj( fit_result, data_dir + 'uid=%s-g2-fit-para'%uid )
In [ ]:
data_dir_
In [ ]:
create_report = True
In [ ]:
pdf_out_dir = os.path.join('/XF11ID/analysis/', CYCLE, username, 'Results/')
filename= "XPCS_Analysis_Report_for_uid=%s-average.pdf"%uid_
print (pdf_out_dir + filename)
In [ ]:
if create_report:
c= create_pdf_report( data_dir_, uid_, out_dir = pdf_out_dir, filename = filename,
report_type = 'gisaxs' )
page =1
#Page one: Meta-data/Iq-Q/ROI
c.report_header(page=page)
c.report_meta( top=730)
c.report_static( top=560)
c.report_ROI( top= 300)
Nk = len( list( g2s.keys()))
Npages = Nk//2 + Nk%2
for i, key in enumerate( list( g2s.keys())):
if not i%2:
page += 1
c.new_page()
c.report_header(page=page)
if i%2:
top = 350
else:
top = 720
c.report_one_time( top= top, g2_fit_file = 'uid=%s@run--%s--g2--fit-.png'%(uid_,key ),
q_rate_file = 'uid=%s@run--%s--Q-Rate--fit-.png'%(uid_,key ))
c.save_page()
c.done()
In [ ]:
In [ ]:
last_uid_num = len( useful_uids[1] )
In [ ]:
uid_last = useful_uids[1][ last_uid_num ]
print( uid_last )
In [ ]:
#uid_ = uid_last #useful_uids[key][k]
filename = c.filename #XPCS_Analysis_Report_for_uid=%s.pdf'%uid_
atch=[ Attachment(open(filename, 'rb')) ]
update_olog_uid( uid=uid_last, text='Add XPCS Analysis PDF Report', attachments= atch )
In [ ]: