"This notebook corresponds to version {{ version }} of the pipeline tool: https://github.com/NSLS-II/pipelines"
This notebook begins with a raw time-series of images and ends with $g_2(t)$ for a range of $q$, fit to an exponential or stretched exponential, and a two-time correlation functoin.
CHX Olog (https://logbook.nsls2.bnl.gov/11-ID/)
Import packages for I/O, visualization, and analysis.
In [457]:
from chxanalys.chx_packages import *
from chxanalys.chx_xpcs_xsvs_jupyter_V1 import run_xpcs_xsvs_single
%matplotlib notebook
plt.rcParams.update({'figure.max_open_warning': 0})
#%reset -f #for clean up things in the memory
In [458]:
scat_geometry = 'saxs' #suport 'saxs', 'gi_saxs', 'ang_saxs' (for anisotropics saxs or flow-xpcs)
#scat_geometry = 'saxs' #suport 'saxs', 'gi_saxs', 'ang_saxs' (for anisotropics saxs or flow-xpcs)
force_compress = False #True #force to compress data
para_compress = True #parallel compress
run_fit_form = False #run fit form factor
run_waterfall = True #False #run waterfall analysis
run_t_ROI_Inten = True #run ROI intensity as a function of time
run_one_time = True #run one-time
#run_fit_g2 = True #run fit one-time, the default function is "stretched exponential"
fit_g2_func = 'stretched'
run_two_time = True #False #True #True #False #run two-time
run_four_time = False #True #False #run four-time
run_xsvs= False #True #False #run visibility analysis
att_pdf_report = True #attach the pdf report to CHX olog
qth_interest = 1 #the intested single qth
use_sqnorm = False #if True, use sq to normalize intensity
use_imgsum_norm=True #if True use imgsum to normalize intensity for one-time calculatoin
pdf_version='_1' #for pdf report name
if scat_geometry == 'gi_saxs':run_xsvs= False
In [459]:
taus=None;g2=None;tausb=None;g2b=None;g12b=None;taus4=None;g4=None;times_xsv=None;contrast_factorL=None;
In [460]:
CYCLE= '2017_2' #change clycle here
username = getpass.getuser()
#username = 'jain'
data_dir0 = create_user_folder(CYCLE, username)
print( data_dir0 )
In [461]:
#give a meaningful name for this multi-uid run
uid_average = 'IL17_AuNR_05_PNIPAM_T38degC'
data_dir = os.path.join( data_dir0, uid_average +'/')
os.makedirs(data_dir, exist_ok=True)
print ( data_dir )
uid_average = 'uid=' + uid_average
In [462]:
mask_path = '/XF11ID/analysis/2017_1/masks/'
mask_name = 'Apr7_4M_SAXS16m_mask.npy'
In [463]:
mask = load_mask(mask_path, mask_name, plot_ = False, image_name = '%s_mask'%mask_name, reverse=True )
#mask *= pixel_mask
mask[:,2069] =0 # False #Concluded from the previous results
#np.save( data_dir + 'mask', mask)
show_img(mask,image_name = '%s_mask'%uid_average, save=True, path=data_dir)
mask_load=mask.copy()
In [464]:
start_time, stop_time = '2017-4-8 00:00:00', '2017-4-8 00:16:50'
#start_time, stop_time = '2016-11-18 16:20:00', '2016-11-18 16:35:50' # for gisaxs test purpose
sids, uids, fuids = find_uids(start_time, stop_time)
print( uids )
uid = uids[0]
uidstr = 'uid=%s'%uid
In [465]:
guids, buids = check_bad_uids( uids, mask, img_choice_N = 3 )
print( 'The good uids are: %s.\nThe good uids length is %s.'%(guids, len(guids)) )
In [466]:
gfuids = fuids[np.array( [ np.where( uids == s )[0][0] for s in guids ] )]
In [467]:
md = get_meta_data( uid )
In [468]:
imgs = load_data( uid, md['detector'], reverse= True )
md.update( imgs.md );Nimg = len(imgs);
pixel_mask = 1- np.int_( np.array( imgs.md['pixel_mask'], dtype= bool) )
print( 'The data are: %s' %imgs )
imgsa = apply_mask( imgs, mask )
In [469]:
print_dict( md, ['suid', 'number of images', 'uid', 'scan_id', 'start_time', 'stop_time', 'sample', 'Measurement',
'acquire period', 'exposure time',
'det_distanc', 'beam_center_x', 'beam_center_y', ] )
In [470]:
inc_x0 = None
inc_y0= None
dpix, lambda_, Ldet, exposuretime, timeperframe, center = check_lost_metadata(
md, Nimg, inc_x0 = inc_x0, inc_y0= inc_y0, pixelsize = 7.5*10*(-5) )
setup_pargs=dict(uid=uidstr, dpix= dpix, Ldet=Ldet, lambda_= lambda_, exposuretime=exposuretime,
timeperframe=timeperframe, center=center, path= data_dir)
In [471]:
img_choice_N = 10
img_samp_index = random.sample( range(len(imgs)), img_choice_N)
avg_img = get_avg_img( imgsa, img_samp_index, plot_ = False, uid =uid)
if avg_img.max() == 0:
print('There are no photons recorded fdata_diror this uid: %s'%uid)
print('The data analysis should be terminated! Please try another uid.')
In [472]:
show_img( avg_img, vmin=.1, vmax= 1e1, logs=True, aspect=1,
image_name= uidstr + '_img_avg', save=True, path=data_dir, cmap = cmap_albula )
In [473]:
fp = data_dir0 + 'roi_mask_16rings_16m.pkl'
roi_mask,qval_dict = cpk.load( open(fp, 'rb' ) ) #for load the saved roi data
if scat_geometry =='gi_saxs':
fp = data_dir0 + 'gisaxs_test.pkl'
qr_map, qz_map, ticks, Qrs, Qzs, Qr, Qz, inc_x0 = cpk.load( open(fp, 'rb' ) )
In [ ]:
In [474]:
if scat_geometry =='saxs':
## Get circular average| * Do plot and save q~iq
hmask = create_hot_pixel_mask( avg_img, threshold = 1e2, center=center, center_radius= 100)
qp_saxs, iq_saxs, q_saxs = get_circular_average( avg_img, mask * hmask, pargs=setup_pargs )
plot_circular_average( qp_saxs, iq_saxs, q_saxs, pargs=setup_pargs,
xlim=[q_saxs.min(), q_saxs.max()*1.1], ylim = [iq_saxs.min(), iq_saxs.max()] )
mask =np.array( mask * hmask, dtype=bool)
In [475]:
if scat_geometry =='saxs':
if run_fit_form:
form_res = fit_form_factor( q_saxs,iq_saxs, guess_values={'radius': 2500, 'sigma':0.05,
'delta_rho':1E-10 }, fit_range=[0.0001, 0.015], fit_variables={'radius': T, 'sigma':T,
'delta_rho':T}, res_pargs=setup_pargs, xlim=[0.0001, 0.015])
In [476]:
if scat_geometry =='saxs':
if run_fit_form:
form_res = fit_form_factor( q_saxs,iq_saxs, guess_values={'radius': 2500, 'sigma':0.05,
'delta_rho':1E-10 }, fit_range=[0.0001, 0.015], fit_variables={'radius': T, 'sigma':T,
'delta_rho':T}, res_pargs=setup_pargs, xlim=[0.0001, 0.015])
qr = np.array( [qval_dict[k][0] for k in sorted( qval_dict.keys())] )
show_ROI_on_image( avg_img, roi_mask, center, label_on = False, rwidth = 1340, alpha=.9,
save=True, path=data_dir, uid=uidstr, vmin= np.min(avg_img), vmax= np.max(avg_img),
aspect=1)
plot_qIq_with_ROI( q_saxs, iq_saxs, qr, logs=True, uid=uidstr, xlim=[0.001,0.04],
ylim = [iq_saxs.min(), iq_saxs.max()], save=True, path=data_dir)
In [477]:
if scat_geometry =='gi_saxs':
plot_qzr_map( qr_map, qz_map, inc_x0, ticks = ticks, data= avg_img, uid= uidstr, path = data_dir )
In [478]:
if scat_geometry =='gi_saxs':
roi_masks, qval_dicts = get_gisaxs_roi( Qrs, Qzs, qr_map, qz_map, mask= mask )
show_qzr_roi( avg_img, roi_masks, inc_x0, ticks[:4], alpha=0.5, save=True, path=data_dir, uid=uidstr )
In [479]:
if scat_geometry =='gi_saxs':
Nimg = FD.end - FD.beg
time_edge = create_time_slice( N= Nimg, slice_num= 3, slice_width= 2, edges = None )
time_edge = np.array( time_edge ) + good_start
print( time_edge )
qrt_pds = get_t_qrc( FD, time_edge, Qrs, Qzs, qr_map, qz_map, path=data_dir, uid = uidstr )
plot_qrt_pds( qrt_pds, time_edge, qz_index = 0, uid = uidstr, path = data_dir )
In [480]:
if scat_geometry =='gi_waxs':
badpixel = np.where( avg_img[:600,:] >=300 )
roi_mask[badpixel] = 0
show_ROI_on_image( avg_img, roi_mask, label_on = True, alpha=.5,
save=True, path=data_dir, uid=uidstr, vmin=0.1, vmax=5)
In [481]:
good_start = 5
In [482]:
run_pargs= dict(
scat_geometry = scat_geometry ,
force_compress = force_compress, #force to compress data
para_compress = para_compress, #parallel compress
run_fit_form = run_fit_form, #run fit form factor
run_waterfall = run_waterfall, #run waterfall analysis
run_t_ROI_Inten = run_t_ROI_Inten, #run ROI intensity as a function of time
run_one_time = run_one_time, #run one-time
fit_g2_func = fit_g2_func,
run_two_time = run_two_time, #run two-time
run_four_time = run_four_time, #run four-time
run_xsvs= run_xsvs, #run visibility analysis
att_pdf_report = att_pdf_report, #attach the pdf report to CHX olog
show_plot = False,
CYCLE = CYCLE, mask_path = mask_path, mask_name = mask_name,
good_start = good_start,
qth_interest = qth_interest, #the intested single qth,
use_sqnorm = use_sqnorm, #if True, use sq to normalize intensity
use_imgsum_norm=use_imgsum_norm,#if True use imgsum to normalize intensity for one-time calculatoin
#pdf_version = pdf_version, #for pdf report name
pdf_version = '_test2',
)
add_conf = dict( roi_mask = roi_mask, qval_dict = qval_dict )
if scat_geometry == 'gi_saxs':
add_conf = dict( inc_x0=inc_x0,inc_y0=inc_y0,refl_x0=refl_x0,refl_y0=refl_y0,
Qr = Qr,Qz= Qz,qr_map=qr_map,qz_map= qz_map)
run_pargs.update( add_conf )
In [483]:
guids
Out[483]:
In [484]:
%run /home/yuzhang/chxanalys_link/chxanalys/chx_xpcs_xsvs_jupyter_V1.py
In [485]:
do_loop = True #False
if do_loop:
multi_res = {}
t0 = time.time()
for uid in guids:
multi_res[uid] = run_xpcs_xsvs_single( uid, run_pargs= run_pargs, return_res= True )
run_time(t0)
In [486]:
plt.close('all')
do_loop = True #False
if not do_loop:
multi_res = {}
for uid, fuid in zip(guids,fuids):
multi_res[uid] = extract_xpcs_results_from_h5( filename = 'uid=%s_Res.h5'%fuid, import_dir = data_dir0 + uid +'/' )
In [487]:
multi_res.keys()
Out[487]:
In [488]:
bad_uids = [ ] # put bad uids string inside the list []
for k in bad_uids:
x= multi_res.pop( k )
In [489]:
#multi_res.pop( '3faefacf' )
In [490]:
multi_res.keys()
Out[490]:
In [ ]:
In [491]:
mkeys = list(multi_res.keys())
print(mkeys)
print( multi_res[mkeys[0]].keys())
In [492]:
md = multi_res[mkeys[0]]['md']
uid = uid_average
setup_pargs['uid'] = uid
In [493]:
avg_img = get_averaged_data_from_multi_res( multi_res, keystr='avg_img' )
imgsum = get_averaged_data_from_multi_res( multi_res, keystr='imgsum' )
if scat_geometry == 'saxs':
q_saxs = get_averaged_data_from_multi_res( multi_res, keystr='q_saxs')
iq_saxs = get_averaged_data_from_multi_res( multi_res, keystr='iq_saxs')
qt = get_averaged_data_from_multi_res( multi_res, keystr='qt')
iqst = get_averaged_data_from_multi_res( multi_res, keystr='iqst')
elif scat_geometry == 'gi_saxs':
qr_1d_pds = get_averaged_data_from_multi_res( multi_res, keystr='qr_1d_pds')
qr_1d_pds = trans_data_to_pd( qr_1d_pds, label= qr_1d_pds_label)
if run_waterfall:
wat = get_averaged_data_from_multi_res( multi_res, keystr='wat')
if run_t_ROI_Inten:
times_roi = get_averaged_data_from_multi_res( multi_res, keystr='times_roi')
mean_int_sets = get_averaged_data_from_multi_res( multi_res, keystr='mean_int_sets')
if run_one_time:
g2 = get_averaged_data_from_multi_res( multi_res, keystr='g2' )
taus = get_averaged_data_from_multi_res( multi_res, keystr='taus' )
g2_pds = save_g2_general( g2, taus=taus,qr=np.array( list( qval_dict.values() ) )[:,0],
uid= uid +'_g2.csv', path= data_dir, return_res=True )
g2_fit_result, taus_fit, g2_fit = get_g2_fit_general( g2, taus,
function = fit_g2_func, vlim=[0.95, 1.05], fit_range= None,
fit_variables={'baseline':True, 'beta':True, 'alpha':False,'relaxation_rate':True},
guess_values={'baseline':1.0,'beta':0.05,'alpha':1.0,'relaxation_rate':0.01,})
g2_fit_paras = save_g2_fit_para_tocsv(g2_fit_result, filename= uid +'_g2_fit_paras.csv', path=data_dir )
if run_two_time:
g12b = get_averaged_data_from_multi_res( multi_res, keystr='g12b',different_length= True )
g2b = get_averaged_data_from_multi_res( multi_res, keystr='g2b' )
tausb = get_averaged_data_from_multi_res( multi_res, keystr='tausb' )
g2b_pds = save_g2_general( g2b, taus=tausb, qr= np.array( list( qval_dict.values() ) )[:,0],
qz=None, uid=uid +'_g2b.csv', path= data_dir, return_res=True )
g2_fit_resultb, taus_fitb, g2_fitb = get_g2_fit_general( g2b, tausb,
function = fit_g2_func, vlim=[0.95, 1.05], fit_range= None,
fit_variables={'baseline':True, 'beta':True, 'alpha':False,'relaxation_rate':True},
guess_values={'baseline':1.0,'beta':0.05,'alpha':1.0,'relaxation_rate':0.01,})
g2b_fit_paras = save_g2_fit_para_tocsv(g2_fit_resultb,
filename= uid + '_g2b_fit_paras.csv', path=data_dir )
if run_four_time:
g4 = get_averaged_data_from_multi_res( multi_res, keystr='g4' )
taus4 = get_averaged_data_from_multi_res( multi_res, keystr='taus4' )
g4_pds = save_g2_general( g4, taus=taus4, qr=np.array( list( qval_dict.values() ) )[:,0],
qz=None, uid=uid +'_g4.csv', path= data_dir, return_res=True )
if run_xsvs:
contrast_factorL = get_averaged_data_from_multi_res( multi_res, keystr='contrast_factorL',different_length=False )
times_xsvs = get_averaged_data_from_multi_res( multi_res, keystr='times_xsvs',different_length=False )
cont_pds = save_arrays( contrast_factorL, label= times_xsvs, filename = '%s_contrast_factorL.csv'%uid,
path=data_dir,return_res=True )
In [494]:
if scat_geometry =='saxs':
show_saxs_qmap( avg_img, setup_pargs, width=600,vmin=.1, vmax=np.max(avg_img*.1), logs=True,
image_name= '%s_img_avg'%uid, save=True)
plot_circular_average( q_saxs, iq_saxs, q_saxs, pargs=setup_pargs,
xlim=[q_saxs.min(), q_saxs.max()], ylim = [iq_saxs.min(), iq_saxs.max()] )
plot_qIq_with_ROI( q_saxs, iq_saxs, qr, logs=True, uid=uid, xlim=[q_saxs.min(), q_saxs.max()],
ylim = [iq_saxs.min(), iq_saxs.max()], save=True, path=data_dir)
plot1D( y = imgsum, title ='%s_img_sum_t'%uid, xlabel='Frame', colors='b',
ylabel='Total_Intensity', legend='imgsum', save=True, path=data_dir)
plot_t_iqc( qt, iqst, frame_edge=None, pargs=setup_pargs, xlim=[qt.min(), qt.max()],
ylim = [iqst.min(), iqst.max()], save=True )
show_ROI_on_image( avg_img, roi_mask, center, label_on = False, rwidth =700, alpha=.9,
save=True, path=data_dir, uid=uid, vmin= np.min(avg_img), vmax= np.max(avg_img) )
elif scat_geometry =='gi_saxs':
show_img( avg_img, vmin=.1, vmax=np.max(avg_img*.1), logs=True,image_name= uidstr + '_img_avg', save=True, path=data_dir)
plot_qr_1d_with_ROI( qr_1d_pds, qr_center=np.unique( np.array(list( qval_dict.values() ) )[:,0] ),
loglog=False, save=True, uid=uidstr, path = data_dir)
show_qzr_roi( avg_img, roi_mask, inc_x0, ticks, alpha=0.5, save=True, path=data_dir, uid=uidstr )
In [495]:
if run_waterfall:
plot_waterfallc( wat, qth_interest, aspect=None,vmax= np.max(wat), uid=uid, save =True,
path=data_dir, beg= good_start)
if run_t_ROI_Inten:
plot_each_ring_mean_intensityc( times_roi, mean_int_sets, uid = uid, save=True, path=data_dir )
In [496]:
if run_one_time:
plot_g2_general( g2_dict={1:g2, 2:g2_fit}, taus_dict={1:taus, 2:taus_fit},vlim=[0.95, 1.05],
qval_dict = qval_dict, fit_res= g2_fit_result, geometry= scat_geometry,filename= uid +'_g2',
path= data_dir, function= fit_g2_func, ylabel='g2', append_name= '_fit')
D0, qrate_fit_res = get_q_rate_fit_general( qval_dict, g2_fit_paras['relaxation_rate'], geometry= scat_geometry )
plot_q_rate_fit_general( qval_dict, g2_fit_paras['relaxation_rate'], qrate_fit_res,
geometry= scat_geometry,uid=uid, path= data_dir )
In [497]:
if run_two_time:
show_C12(g12b, q_ind= qth_interest, N1= 0, N2=min( len(imgsa) ,1000), vmin=1.01, vmax=1.25,
timeperframe=timeperframe,save=True,
path= data_dir, uid = uid )
plot_g2_general( g2_dict={1:g2b, 2:g2_fitb}, taus_dict={1:tausb, 2:taus_fitb},vlim=[0.95, 1.05],
qval_dict=qval_dict, fit_res= g2_fit_resultb, geometry=scat_geometry,filename=uid+'_g2',
path= data_dir, function= fit_g2_func, ylabel='g2', append_name= '_b_fit')
In [498]:
if run_two_time and run_one_time:
plot_g2_general( g2_dict={1:g2, 2:g2b}, taus_dict={1:taus, 2:tausb},vlim=[0.95, 1.05],
qval_dict=qval_dict, g2_labels=['from_one_time', 'from_two_time'],
geometry=scat_geometry,filename=uid+'_g2_two_g2', path= data_dir, ylabel='g2', )
if run_four_time:
plot_g2_general( g2_dict={1:g4}, taus_dict={1:taus4},vlim=[0.95, 1.05], qval_dict=qval_dict, fit_res= None,
geometry=scat_geometry,filename=uid+'_g4',path= data_dir, ylabel='g4')
In [499]:
if run_xsvs:
plot_g2_contrast( contrast_factorL, g2, times_xsvs, taus, qr,
vlim=[0.8,2.0], qth = qth_interest, uid=uid,path = data_dir, legend_size=14)
plot_g2_contrast( contrast_factorL, g2, times_xsvs, taus, qr,
vlim=[0.8,1.2], qth = None, uid=uid,path = data_dir, legend_size=4)
In [500]:
md['uid'] = uid
md['suid'] = uid
md['Measurement'] = uid
md['beg'] = None
md['end'] = None
md['bad_frame_list'] = 'unknown'
md['metadata_file'] = data_dir + 'md.csv-&-md.pkl'
psave_obj( md, data_dir + '%s_md'%uid ) #save the setup parameters
save_dict_csv( md, data_dir + '%s_md.csv'%uid, 'w')
Exdt = {}
if scat_geometry == 'gi_saxs':
for k,v in zip( ['md', 'roi_mask','qval_dict','avg_img','mask','pixel_mask', 'imgsum', 'qr_1d_pds'],
[md, roi_mask, qval_dict, avg_img,mask,pixel_mask, imgsum, qr_1d_pds] ):
Exdt[ k ] = v
elif scat_geometry == 'saxs':
for k,v in zip( ['md', 'q_saxs', 'iq_saxs','iqst','qt','roi_mask','qval_dict','avg_img','mask','pixel_mask', 'imgsum', 'bad_frame_list'],
[md, q_saxs, iq_saxs, iqst, qt,roi_mask, qval_dict, avg_img,mask,pixel_mask, imgsum, ] ):
Exdt[ k ] = v
if run_waterfall:Exdt['wat'] = wat
if run_t_ROI_Inten:Exdt['times_roi'] = times_roi;Exdt['mean_int_sets']=mean_int_sets
if run_one_time:
for k,v in zip( ['taus','g2','g2_fit_paras'], [taus,g2,g2_fit_paras] ):Exdt[ k ] = v
if run_two_time:
for k,v in zip( ['tausb','g2b','g2b_fit_paras', 'g12b'], [tausb,g2b,g2b_fit_paras,g12b] ):Exdt[ k ] = v
if run_four_time:
for k,v in zip( ['taus4','g4'], [taus4,g4] ):Exdt[ k ] = v
if run_xsvs:
for k,v in zip( ['spec_kmean','spec_pds','times_xsvs','spec_km_pds','contrast_factorL'],
[ spec_kmean,spec_pds,times_xsvs,spec_km_pds,contrast_factorL] ):Exdt[ k ] = v
In [501]:
if run_xsvs:
contr_pds = save_arrays( Exdt['contrast_factorL'], label= Exdt['times_xsvs'],
filename = '%s_contr.csv'%uid, path=data_dir,return_res=True )
In [502]:
export_xpcs_results_to_h5( uid + '_Res.h5', data_dir, export_dict = Exdt )
#extract_dict = extract_xpcs_results_from_h5( filename = uid + '_Res.h5', import_dir = data_dir )
In [503]:
pdf_out_dir = data_dir
pdf_filename = "XPCS_Analysis_Report_for_%s%s.pdf"%(uid_average,pdf_version)
if run_xsvs:
pdf_filename = "XPCS_XSVS_Analysis_Report_for_%s%s.pdf"%(uid_average,pdf_version)
In [504]:
#%run /XF11ID/analysis/Analysis_Pipelines/Develop/chxanalys/chxanalys/Create_Report.py
In [505]:
make_pdf_report( data_dir, uid_average, pdf_out_dir, pdf_filename, username,
run_fit_form, run_one_time, run_two_time, run_four_time, run_xsvs, report_type = scat_geometry
)
In [506]:
if att_pdf_report:
os.environ['HTTPS_PROXY'] = 'https://proxy:8888'
os.environ['no_proxy'] = 'cs.nsls2.local,localhost,127.0.0.1'
pname = pdf_out_dir + pdf_filename
atch=[ Attachment(open(pname, 'rb')) ]
try:
update_olog_uid( uid= fuids[-1], text='Add XPCS Averaged Analysis PDF Report', attachments= atch )
except:
print("I can't attach this PDF: %s due to a duplicated filename. Please give a different PDF file."%pname)
In [507]:
fuids[-1]
Out[507]: