"This notebook corresponds to version {{ version }} of the pipeline tool: https://github.com/NSLS-II/pipelines"
This notebook begins with a raw time-series of images and ends with $g_2(t)$ for a range of $q$, fit to an exponential or stretched exponential, and a two-time correlation functoin.
CHX Olog (https://logbook.nsls2.bnl.gov/11-ID/)
Import packages for I/O, visualization, and analysis.
In [2]:
from chxanalys.chx_packages import *
%matplotlib notebook
plt.rcParams.update({'figure.max_open_warning': 0})
#%reset -f #for clean up things in the memory
In [3]:
force_compress = True #force to compress data
para_compress = True #parallel compress
run_fit_form = False #run fit form factor
run_waterfall = False #run waterfall analysis
run_t_ROI_Inten = True #run ROI intensity as a function of time
run_fit_g2 = True #run fit one-time, the default function is "stretched exponential"
run_two_time = False #True #False #run two-time
run_four_time = False #run four-time
run_xsvs=True #run visibility analysis
att_pdf_report = True #attach the pdf report to CHX olog
In [4]:
CYCLE = '2016_3'
username = getpass.getuser()
#username = "colosqui" #provide the username to force the results to save in that username folder
data_dir0 = os.path.join('/XF11ID/analysis/', CYCLE, username, 'Results/')
##Or define data_dir here, e.g.,#data_dir = '/XF11ID/analysis/2016_2/rheadric/test/'
os.makedirs(data_dir0, exist_ok=True)
print('Results from this analysis will be stashed in the directory %s' % data_dir0)
In [45]:
uid = 'd851b9' #(scan num: 10871) (Measurement: visibility CoralPor lag time320us #1/1 )
#uid = 'a87398be' #count : 1 ['a87398'] (scan num: 4162) (Measurement: 0.5um Si in PEG 750Hz 1k mbs: .1x.4 )
uid = '641f9d'
uid = 'bed981' #count : 1 ['bed981'] (scan num: 10864) (Measurement: CoralPor 750Hz 5k )
uid = '88db8f' #xf11id, 11/28/16, 8:39 pm #count : 1 ['88db8f'] (scan num: 10382) (Measurement: CoralPor 1.34ms 10s period 100 )
uid = '5486e6' #count : 1 ['5486e6'] (scan num: 10383) (Measurement: CoralPor 1.34ms 10s period 200 )
uid = '0e98c1d4' #count : 1 ['0e98c1'] (scan num: 10384) (Measurement: CoralPor 1.34ms 0s period 10k )
In [46]:
data_dir = os.path.join(data_dir0, '%s/'%uid)
os.makedirs(data_dir, exist_ok=True)
print('Results from this analysis will be stashed in the directory %s' % data_dir)
In [47]:
md = get_meta_data( uid )
In [48]:
imgs = load_data( uid, md['detector'], reverse= True )
md.update( imgs.md );Nimg = len(imgs);
pixel_mask = 1- np.int_( np.array( imgs.md['pixel_mask'], dtype= bool) )
print( 'The data are: %s' %imgs )
In [52]:
print_dict( md, ['suid', 'number of images', 'uid', 'scan_id', 'start_time', 'stop_time', 'sample', 'Measurement',
'acquire period', 'exposure time',
'det_distanc', 'beam_center_x', 'beam_center_y', ] )
In [53]:
dpix = md['x_pixel_size'] * 1000. #in mm, eiger 4m is 0.075 mm
lambda_ =md['incident_wavelength'] # wavelegth of the X-rays in Angstroms
Ldet = md['detector_distance'] *1000 # detector to sample distance (mm)
exposuretime= md['count_time'] #exposure time in sec
acquisition_period = md['frame_time'] #acquisition time in sec
timeperframe = acquisition_period #for g2
center = [ int(md['beam_center_y']),int( md['beam_center_x'] ) ] #beam center [y,x] for python image
#center = [ 2070-1381-0, 1475-4]
In [54]:
setup_pargs=dict(uid=uid, dpix= dpix, Ldet=Ldet, lambda_= lambda_, exposuretime=exposuretime,
timeperframe=timeperframe, center=center, path= data_dir)
In [55]:
print_dict( setup_pargs )
In [56]:
mask_path = '/XF11ID/analysis/2016_3/masks/'
mask_name = 'Nov28_4M_SAXS_mask.npy'
In [57]:
mask = load_mask(mask_path, mask_name, plot_ = False, image_name = 'uid=%s-mask'%uid, reverse=True )
mask *= pixel_mask
mask[:,2069] =0 # False #Concluded from the previous results
#np.save( data_dir + 'mask', mask)
show_img(mask,image_name = 'uid=%s-mask'%uid, save=True, path=data_dir)
mask_load=mask.copy()
imgsa = apply_mask( imgs, mask )
In [59]:
n= 5
avg_img = imgsa[n] #get_avg_img( imgsa, sampling = int(Nimg/3), plot_ = False, uid =uid)
In [60]:
show_saxs_qmap( avg_img, setup_pargs, width=600, show_pixel = True,
vmin=.1, vmax=np.max(avg_img), logs=True, image_name= uid + '--frame_%s'%n )
In [61]:
compress=True
photon_occ = len( np.where(avg_img)[0] ) / ( imgsa[0].size)
#compress = photon_occ < .4 #if the photon ocupation < 0.5, do compress
print ("The non-zeros photon occupation is %s."%( photon_occ))
print("Will " + 'Always ' + ['NOT', 'DO'][compress] + " apply compress process.")
In [62]:
good_start = 5 #make the good_start at least 0
In [33]:
%run /XF11ID/analysis/Analysis_Pipelines/Develop/chxanalys/chxanalys/chx_compress.py
In [63]:
t0= time.time()
filename = '/XF11ID/analysis/Compressed_Data' +'/uid_%s.cmp'%md['uid']
mask, avg_img, imgsum, bad_frame_list = compress_eigerdata(imgs, mask, md, filename,
force_compress= force_compress, para_compress= para_compress, bad_pixel_threshold= 1e14,
bins=1, num_sub= 100, num_max_para_process= 500 )
min_inten = 10
good_start = max(good_start, np.where( np.array(imgsum) > min_inten )[0][0] )
print ('The good_start frame number is: %s '%good_start)
FD = Multifile(filename, good_start, len(imgs))
#FD = Multifile(filename, good_start, 100)
plot1D( y = imgsum[ np.array( [i for i in np.arange(good_start, len(imgsum)) if i not in bad_frame_list])],
title ='Uid= %s--imgsum'%uid, xlabel='Frame', ylabel='Total_Intensity', legend='imgsum' )
run_time(t0)
In [64]:
t0= time.time()
filename = '/XF11ID/analysis/Compressed_Data' +'/uid_%s.cmp'%md['uid']
mask, avg_img, imgsum, bad_frame_list = compress_eigerdata(imgs, mask, md, filename,
force_compress= force_compress, para_compress= para_compress, bad_pixel_threshold= 1e14,
bins=1, num_sub= 100, num_max_para_process= 500 )
min_inten = 10
good_start = max(good_start, np.where( np.array(imgsum) > min_inten )[0][0] )
print ('The good_start frame number is: %s '%good_start)
FD = Multifile(filename, good_start, len(imgs))
#FD = Multifile(filename, good_start, 100)
plot1D( y = imgsum[ np.array( [i for i in np.arange(good_start, len(imgsum)) if i not in bad_frame_list])],
title ='Uid= %s--imgsum'%uid, xlabel='Frame', ylabel='Total_Intensity', legend='imgsum' )
run_time(t0)
In [65]:
t0= time.time()
filename = '/XF11ID/analysis/Compressed_Data' +'/uid_%s.cmp'%md['uid']
mask, avg_img, imgsum, bad_frame_list = compress_eigerdata(imgs, mask, md, filename,
force_compress= force_compress, para_compress= para_compress, bad_pixel_threshold= 1e14,
bins=1, num_sub= 150, num_max_para_process= 500 )
min_inten = 10
good_start = max(good_start, np.where( np.array(imgsum) > min_inten )[0][0] )
print ('The good_start frame number is: %s '%good_start)
FD = Multifile(filename, good_start, len(imgs))
#FD = Multifile(filename, good_start, 100)
plot1D( y = imgsum[ np.array( [i for i in np.arange(good_start, len(imgsum)) if i not in bad_frame_list])],
title ='Uid= %s--imgsum'%uid, xlabel='Frame', ylabel='Total_Intensity', legend='imgsum' )
run_time(t0)
In [66]:
t0= time.time()
filename = '/XF11ID/analysis/Compressed_Data' +'/uid_%s.cmp'%md['uid']
mask, avg_img, imgsum, bad_frame_list = compress_eigerdata(imgs, mask, md, filename,
force_compress= force_compress, para_compress= para_compress, bad_pixel_threshold= 1e14,
bins=1, num_sub= 300, num_max_para_process= 500 )
min_inten = 10
good_start = max(good_start, np.where( np.array(imgsum) > min_inten )[0][0] )
print ('The good_start frame number is: %s '%good_start)
FD = Multifile(filename, good_start, len(imgs))
#FD = Multifile(filename, good_start, 100)
plot1D( y = imgsum[ np.array( [i for i in np.arange(good_start, len(imgsum)) if i not in bad_frame_list])],
title ='Uid= %s--imgsum'%uid, xlabel='Frame', ylabel='Total_Intensity', legend='imgsum' )
run_time(t0)
In [ ]:
t0= time.time()
filename = '/XF11ID/analysis/Compressed_Data' +'/uid_%s.cmp'%md['uid']
mask, avg_img, imgsum, bad_frame_list = compress_eigerdata(imgs, mask, md, filename,
force_compress= force_compress, para_compress= para_compress, bad_pixel_threshold= 1e14,
bins=1, num_sub= 500, num_max_para_process= 500 )
min_inten = 10
good_start = max(good_start, np.where( np.array(imgsum) > min_inten )[0][0] )
print ('The good_start frame number is: %s '%good_start)
FD = Multifile(filename, good_start, len(imgs))
#FD = Multifile(filename, good_start, 100)
plot1D( y = imgsum[ np.array( [i for i in np.arange(good_start, len(imgsum)) if i not in bad_frame_list])],
title ='Uid= %s--imgsum'%uid, xlabel='Frame', ylabel='Total_Intensity', legend='imgsum' )
run_time(t0)
In [ ]:
In [ ]:
In [ ]:
In [ ]:
In [67]:
t0= time.time()
filename = '/XF11ID/analysis/Compressed_Data' +'/uid_%s.cmp'%md['uid']
mask, avg_img, imgsum, bad_frame_list = compress_eigerdata(imgs, mask, md, filename,
force_compress= force_compress, para_compress= False, bad_pixel_threshold= 1e14,
bins=1, num_sub= 100,num_max_para_process= 5000 )
min_inten = 10
good_start = max(good_start, np.where( np.array(imgsum) > min_inten )[0][0] )
print ('The good_start frame number is: %s '%good_start)
FD = Multifile(filename, good_start, len(imgs))
#FD = Multifile(filename, good_start, 100)
plot1D( y = imgsum[ np.array( [i for i in np.arange(good_start, len(imgsum)) if i not in bad_frame_list])],
title ='Uid= %s--imgsum'%uid, xlabel='Frame', ylabel='Total_Intensity', legend='imgsum' )
run_time(t0)
In [69]:
150*72
Out[69]:
In [ ]:
#%system free && sync && echo 3 > /proc/sys/vm/drop_caches && free
In [ ]:
bad_frame_list = get_bad_frame_list( imgsum, fit=True, plot=True,
polyfit_order = 30, scale= 5.5, good_start = good_start)
print( 'The bad frame list length is: %s'%len(bad_frame_list) )
In [ ]:
bp = find_bad_pixels( FD, bad_frame_list, md['uid'] )
In [ ]:
#bp
In [ ]:
bp.to_csv('/XF11ID/analysis/Commissioning/eiger4M_badpixel.csv', mode='a' )
In [ ]:
if False:
mask = mask_exclude_badpixel( bp, mask, md['uid'])
avg_img = get_avg_imgc( FD, sampling = 1, bad_frame_list=bad_frame_list )
In [ ]:
imgsum_y = imgsum[ np.array( [i for i in np.arange( len(imgsum)) if i not in bad_frame_list])]
imgsum_x = np.arange( len( imgsum_y))
save_lists( [imgsum_x, imgsum_y], label=['Frame', 'Total_Intensity'],
filename='uid=%s--img-sum-t'%uid, path= data_dir )
In [ ]:
plot1D( y = imgsum_y, title ='uid=%s--img-sum-t'%uid, xlabel='Frame',
ylabel='Total_Intensity', legend='imgsum', save=True, path=data_dir)
In [ ]:
show_saxs_qmap( avg_img, setup_pargs, width=600,vmin=.1, vmax=np.max(avg_img*.1), logs=True,
image_name= 'uid=%s--img-avg'%uid, save=True)
np.save( data_dir + 'uid=%s--img-avg'%uid, avg_img)
In [ ]:
hmask = create_hot_pixel_mask( avg_img, threshold = 100, center=center, center_radius= 400)
qp, iq, q = get_circular_average( avg_img, mask * hmask, pargs=setup_pargs, nx=None,
plot_ = True, show_pixel= False, xlim=[0.0001,.14], ylim = [0.00009, 1e3], save=True)
In [ ]:
pd = trans_data_to_pd( np.where( hmask !=1),
label=[md['uid']+'_hmask'+'x', md['uid']+'_hmask'+'y' ], dtype='list')
pd.to_csv('/XF11ID/analysis/Commissioning/eiger4M_badpixel.csv', mode='a' )
In [ ]:
mask =np.array( mask * hmask, dtype=bool)
#show_img( mask )
In [ ]:
if run_fit_form:
form_res = fit_form_factor( q,iq, guess_values={'radius': 2500, 'sigma':0.05,
'delta_rho':1E-10 }, fit_range=[0.0001, 0.015], fit_variables={'radius': T, 'sigma':T,
'delta_rho':T}, res_pargs=setup_pargs, xlim=[0.0001, 0.015])
In [ ]:
uniform = True #False
In [ ]:
if not uniform:
width = 0.0002
number_rings= 1
centers = [ 0.00235,0.00379,0.00508,0.00636,0.00773, 0.00902] #in A-1
edges = get_non_uniform_edges( centers, width, number_rings )
inner_radius= None
outer_radius = None
width = None
num_rings = None
In [ ]:
if uniform:
inner_radius= 0.006 #16
outer_radius = 0.05 #112
num_rings = 12
width = ( outer_radius - inner_radius)/(num_rings + 6)
edges = None
In [ ]:
ring_mask, q_ring_center, q_ring_val = get_ring_mask( mask, inner_radius=inner_radius,
outer_radius = outer_radius , width = width, num_rings = num_rings, edges=edges,
unit='A', pargs=setup_pargs )
qind, pixelist = roi.extract_label_indices( ring_mask )
q_ring_center = np.round( q_ring_center, 4)
In [ ]:
show_ROI_on_image( avg_img, ring_mask, center, label_on = False, rwidth =700, alpha=.9,
save=True, path=data_dir, uid=uid, vmin=.09, vmax=np.max(avg_img))
In [ ]:
plot_qIq_with_ROI( q, iq, q_ring_center, logs=True, uid=uid, xlim=[0.001,.08],
ylim = [iq.min(), iq.max()], save=True, path=data_dir)
In [ ]:
roi_inten = check_ROI_intensity( avg_img, ring_mask, ring_number= 4, uid =uid, save=True, path=data_dir )
In [ ]:
Nimg = FD.end - FD.beg
time_edge = create_time_slice( N= Nimg, slice_num= 3, slice_width= 1, edges = None )
time_edge = np.array( time_edge ) + good_start
print( time_edge )
In [ ]:
qpt, iqst, qt = get_t_iqc( FD, time_edge, mask, pargs=setup_pargs, nx=1500 )
In [ ]:
plot_t_iqc( qt, iqst, time_edge, pargs=setup_pargs, xlim=[0.0001,.08],
ylim = [0.0001, 10], save=True )
In [ ]:
if run_waterfall:
qindex = 3
wat = cal_waterfallc( FD, ring_mask, qindex= qindex, save =True, path=data_dir, uid=uid)
In [ ]:
if run_waterfall:
plot_waterfallc( wat, qindex, aspect=None,
vmax= 10, uid=uid, save =True,
path=data_dir, beg= FD.beg)
In [ ]:
if run_t_ROI_Inten:
times, mean_int_sets = cal_each_ring_mean_intensityc(FD, ring_mask, timeperframe = None, )
plot_each_ring_mean_intensityc( times, mean_int_sets, uid = uid, save=True, path=data_dir )
ring_avg = np.average( mean_int_sets, axis=0)
Note : Enter the number of buffers for Muliti tau one time correlation number of buffers has to be even. More details in https://github.com/scikit-beam/scikit-beam/blob/master/skbeam/core/correlation.py
In [ ]:
define_good_series = False
if define_good_series:
FD = Multifile(filename, beg = good_start, end = Nimg)
In [ ]:
uid_ = uid + '--fra-%s-%s'%(FD.beg, FD.end)
print( uid_ )
In [ ]:
lag_steps = None
In [ ]:
norm = get_pixelist_interp_iq( qp, iq, ring_mask, center)
norm=None
In [ ]:
bad_frame_list
In [ ]:
t0 = time.time()
g2, lag_steps = cal_g2p( FD, ring_mask, bad_frame_list,good_start, num_buf = 8, num_lev= None,
imgsum= None, norm=norm )
run_time(t0)
In [ ]:
taus = lag_steps * timeperframe
res_pargs = dict(taus=taus, q_ring_center=q_ring_center, path=data_dir, uid=uid_ )
In [ ]:
save_g2( g2, taus=taus, qr= q_ring_center, qz=None, uid=uid_, path= data_dir )
In [ ]:
if not run_fit_g2:
plot_g2( g2, res_pargs= res_pargs, master_plot='qz',vlim=[0.95, 1.05], geometry='saxs', append_name= '' )
In [ ]:
if run_fit_g2:
g2_fit_result, taus_fit, g2_fit = get_g2_fit( g2, res_pargs=res_pargs,
function = 'stretched', vlim=[0.95, 1.05], fit_range= None,
fit_variables={'baseline':True, 'beta':True, 'alpha':False,'relaxation_rate':True},
guess_values={'baseline':1.0,'beta':0.05,'alpha':1.0,'relaxation_rate':0.01,})
res_pargs_fit = dict(taus=taus_fit, q_ring_center= q_ring_center,
path=data_dir, uid=uid +'_fra-%s-%s'%(FD.beg, FD.end) +'_g2_fit' )
In [ ]:
if run_fit_g2:
dfv = save_g2_fit_para_tocsv(g2_fit_result,
filename= 'uid=%s'%uid_ +'_g2_fit_paras', path=data_dir )
In [ ]:
if run_fit_g2:
plot_g2( g2, res_pargs= res_pargs, tau_2 = taus_fit, g2_2 = g2_fit,
fit_res= g2_fit_result, function = 'stretched', master_plot='qz',
vlim=[0.95, 1.05], geometry='saxs', append_name= '-fit' )
In [ ]:
if run_fit_g2:
fit_q_rate( q_ring_center[:],dfv['relaxation_rate'], power_variable= False,
uid=uid_ , path= data_dir )
In [ ]:
data_pixel = None
if run_two_time:
data_pixel = Get_Pixel_Arrayc( FD, pixelist, norm=norm ).get_data()
In [ ]:
t0=time.time()
if run_two_time:
g12b = auto_two_Arrayc( data_pixel, ring_mask, index = None )
if lag_steps is None:
num_bufs=8
noframes = FD.end - FD.beg
num_levels = int(np.log( noframes/(num_bufs-1))/np.log(2) +1) +1
tot_channels, lag_steps, dict_lag = multi_tau_lags(num_levels, num_bufs)
max_taus= lag_steps.max()
run_time( t0 )
In [ ]:
if run_two_time:
show_C12(g12b, q_ind= 0, N1= FD.beg, N2=min( FD.end,1000), vmin=1.1, vmax=1.25,
timeperframe=timeperframe,save=True,
path= data_dir, uid = uid_ )
In [ ]:
if run_two_time:
if lag_steps is None:
num_bufs=8
noframes = FD.end - FD.beg
num_levels = int(np.log( noframes/(num_bufs-1))/np.log(2) +1) +1
tot_channels, lag_steps, dict_lag = multi_tau_lags(num_levels, num_bufs)
max_taus= lag_steps.max()
#max_taus= lag_steps.max()
max_taus = Nimg
t0=time.time()
g2b = get_one_time_from_two_time(g12b)[:max_taus]
run_time(t0)
tausb = np.arange( g2b.shape[0])[:max_taus] *timeperframe
res_pargsb = dict(taus=tausb, q_ring_center=q_ring_center, path=data_dir, uid=uid_ )
save_g2( g2b, taus=tausb, qr= q_ring_center, qz=None, uid=uid_ +'-fromTwotime', path= data_dir )
#plot_saxs_g2( g2b, tausb, vlim=[0.95, 1.05], res_pargs=res_pargsb)
In [ ]:
if run_two_time:
g2_fit_resultb, taus_fitb, g2_fitb = get_g2_fit( g2b, res_pargs=res_pargsb,
function = 'stretched', vlim=[0.95, 1.05], fit_range= None,
fit_variables={'baseline':True, 'beta':True, 'alpha':False,'relaxation_rate':True},
guess_values={'baseline':1.0,'beta':0.05,'alpha':1.0,'relaxation_rate':0.01,})
res_pargs_fitb = dict(taus=taus_fitb, q_ring_center= q_ring_center,
path=data_dir, uid=uid_ +'_g2b_fit' )
dfvb = save_g2_fit_para_tocsv(g2_fit_resultb,
filename= 'uid=%s'%uid_ + '_g2b_fit_paras', path=data_dir )
In [ ]:
if run_two_time:
plot_g2( g2b, res_pargs= res_pargsb, tau_2 = taus_fitb, g2_2 = g2_fitb,
fit_res= g2_fit_resultb, function = 'stretched', master_plot='qz',
vlim=[0.95, 1.05], geometry='saxs', append_name= '-b-fit' )
In [ ]:
if run_two_time:
#fit_q_rate( q_ring_center[:],dfvb['relaxation_rate'], power_variable= False, uid=uid +'-g2b' , path= data_dir )
plot_saxs_two_g2( g2, taus,
g2b, tausb, res_pargs=res_pargs, vlim=[.95, 1.05], uid= uid_ )
In [ ]:
if run_four_time:
t0=time.time()
g4 = get_four_time_from_two_time(g12b, g2=g2b)[:max_taus]
run_time(t0)
In [ ]:
if run_four_time:
taus4 = np.arange( g4.shape[0])*timeperframe
res_pargs4 = dict(taus=taus4, q_ring_center=q_ring_center, path=data_dir, uid=uid_ )
save_saxs_g2( g4, res_pargs4, taus=taus4, filename= 'uid=%s'%uid_ + '--g4.csv' )
In [ ]:
if run_four_time:
plot_saxs_g4( g4, taus4, vlim=[0.95, 1.05], logx=True, res_pargs=res_pargs4)
In [ ]:
In [ ]:
if run_xsvs:
max_cts = get_max_countc(FD, ring_mask )
qind, pixelist = roi.extract_label_indices( ring_mask )
noqs = len( np.unique(qind) )
nopr = np.bincount(qind, minlength=(noqs+1))[1:]
time_steps = np.array( utils.geometric_series(2, len(imgs) ) )
num_times = len(time_steps)
#times = time_steps * exposuretime
times = exposuretime + (2**( np.arange( len(time_steps) ) ) -1 ) * acquisition_period
#print( 'The time steps are :%s'%time_steps )
print( 'The max counts are: %s'%max_cts )
#print( times )
In [ ]:
if run_xsvs:
t0=time.time()
spec_bins, spec_his, spec_std = xsvsp( FD, np.int_(ring_mask), norm=None,
max_cts=int(max_cts+2), bad_images=bad_frame_list, only_two_levels=True )
spec_kmean = np.array( [ring_avg * 2**j for j in range( spec_his.shape[0] )] )
run_time(t0)
In [ ]:
run_xsvs_all_lags = False
if run_xsvs_all_lags:
times = exposuretime + lag_steps * acquisition_period
if data_pixel is None:
data_pixel = Get_Pixel_Arrayc( FD, pixelist, norm=norm ).get_data()
if run_xsvs_all_lags:
t0=time.time()
spec_bins, spec_his, spec_std, spec_kmean = get_binned_his_std(data_pixel, np.int_(ring_mask), lag_steps )
run_time(t0)
In [ ]:
if run_xsvs:
save_arrays( spec_his, label= q_ring_center, filename = 'uid=%s--spec_his'%uid_, path=data_dir )
save_arrays( spec_std, label= q_ring_center, filename = 'uid=%s--spec_std'%uid_, path=data_dir )
np.save( data_dir + 'uid=%s--spec_his'%uid_, spec_his)
np.save( data_dir + 'uid=%s--spec_std'%uid_, spec_std)
np.save( data_dir + 'uid=%s--K_mean'%uid_, spec_kmean)
In [ ]:
if run_xsvs:
qth= None
#max_bins = 1
hist_err = spec_std #None# spec_std #None
ML_val, KL_val,K_ = get_xsvs_fit( spec_his, spec_kmean, spec_std = hist_err, max_bins=2,
varyK= False, qth= qth, spec_bins= spec_bins, lag_steps=lag_steps, rois_lowthres= None)
In [ ]:
if run_xsvs:
#print( 'The observed average photon counts are: %s'%np.round(K_mean,4))
#print( 'The fitted average photon counts are: %s'%np.round(K_,4))
print( 'The difference sum of average photon counts between fit and data are: %s'%np.round(
abs(np.sum( spec_kmean[0,:] - K_ )),4))
print( '#'*30)
qth= 10
print( 'The fitted M for Qth= %s are: %s'%(qth, ML_val[qth]) )
print( K_[qth])
print( '#'*30)
In [ ]:
if run_xsvs:
qth = 3
plot_xsvs_fit( spec_his, ML_val, KL_val, K_mean = spec_kmean, spec_std=spec_std,
xlim = [0,10], vlim =[.9, 1.1],
uid=uid_, qth= qth, logy= True, times= times, q_ring_center=q_ring_center, path=data_dir)
In [ ]:
if run_xsvs:
qth = None
plot_xsvs_fit( spec_his, ML_val, KL_val, K_mean = spec_kmean, spec_std = spec_std,
xlim = [0,15], vlim =[.9, 1.1],
uid=uid_, qth= qth, logy= True, times= times, q_ring_center=q_ring_center, path=data_dir )
In [ ]:
if run_xsvs:
contrast_factorL = get_contrast( ML_val)
df = save_KM( spec_kmean, KL_val, ML_val, qs=q_ring_center, uid=uid , path = data_dir )
#print( df )
In [ ]:
if run_xsvs:
qth=3
plot_g2_contrast( contrast_factorL, g2, times, taus, q_ring_center,
vlim=[0.8,1.2], qth = qth, uid=uid_,path = data_dir, legend_size=14)
In [ ]:
if run_xsvs:
plot_g2_contrast( contrast_factorL, g2, times, taus, q_ring_center,
vlim=[0.8,1.2], qth = None, uid=uid_,path = data_dir, legend_size=4)
In [ ]:
pdf_out_dir = os.path.join('/XF11ID/analysis/', CYCLE, username, 'Results/')
ver='_1'
pdf_filename = "XPCS_Analysis_Report_for_uid=%s%s.pdf"%(uid,ver)
if run_xsvs:
pdf_filename = "XPCS_XSVS_Analysis_Report_for_uid=%s%s.pdf"%(uid,ver)
In [ ]:
pdf_filename
In [ ]:
md['mask_file']= mask_path + mask_name
md['mask'] = mask
md['NOTEBOOK_FULL_PATH'] = None
md['good_start'] = good_start
md['bad_frame_list'] = bad_frame_list
md['avg_img'] = avg_img
md['ring_mask'] = ring_mask
md['q_ring_center']= q_ring_center
md['q_ring_val'] = q_ring_val
md['beam_center_x'] = center[1]
md['beam_center_y']= center[0]
md['beg'] = FD.beg
md['end'] = FD.end
md['metadata_file'] = data_dir + 'md.csv-&-md.pkl'
psave_obj( md, data_dir + 'uid=%s-md'%uid ) #save the setup parameters
save_dict_csv( md, data_dir + 'uid=%s-md.csv'%uid, 'w')
In [ ]:
#%run /XF11ID/analysis/Analysis_Pipelines/Develop/chxanalys/chxanalys/Create_Report.py
In [ ]:
c= create_pdf_report( data_dir, uid, pdf_out_dir, filename= pdf_filename, user= username)
#Page one: Meta-data/Iq-Q/ROI
c.report_header(page=1)
c.report_meta( top=730)
c.report_static( top=550, iq_fit = run_fit_form )
c.report_ROI( top= 300)
#Page Two: img~t/iq~t/waterfall/mean~t/g2/rate~q
c.new_page()
c.report_header(page=2)
c.report_time_analysis( top= 720)
c.report_one_time( top= 350)
#Page Three: two-time/two g2
page = 2
if run_two_time:
c.new_page()
page +=1
c.report_header(page= page)
c.report_two_time( top= 720 )
if run_four_time:
c.new_page()
page +=1
c.report_header(page= page)
c.report_four_time( top= 720 )
if run_xsvs:
c.new_page()
page +=1
c.report_header(page= page)
c.report_xsvs( top= 720 )
c.save_page()
c.done()
In [ ]:
#att_pdf_report = True
In [ ]:
if att_pdf_report:
c.filename
os.environ['HTTPS_PROXY'] = 'https://proxy:8888'
os.environ['no_proxy'] = 'cs.nsls2.local,localhost,127.0.0.1'
filename = c.filename
atch=[ Attachment(open(filename, 'rb')) ]
try:
update_olog_uid( uid= md['uid'], text='Add XPCS Analysis PDF Report', attachments= atch )
except:
print("I can't attach this PDF: %s due to a duplicated filename. Please give a different PDF file."%c.filename)
In [ ]: