"This notebook corresponds to version {{ version }} of the pipeline tool: https://github.com/NSLS-II/pipelines"
This notebook begins with a raw time-series of images and ends with $g_2(t)$ for a range of $q$, fit to an exponential or stretched exponential, and a two-time correlation functoin.
CHX Olog (https://logbook.nsls2.bnl.gov/11-ID/)
Import packages for I/O, visualization, and analysis.
In [6]:
from chxanalys.chx_libs import (np, roi, time, datetime, os, get_events,
getpass, db, get_images,LogNorm, plt,tqdm, utils, Model)
from chxanalys.chx_generic_functions import (get_detector, get_fields, get_sid_filenames,
load_data, load_mask,get_fields, reverse_updown, ring_edges,get_avg_img,check_shutter_open,
apply_mask, show_img,check_ROI_intensity,run_time, plot1D, get_each_frame_intensity,
create_hot_pixel_mask,show_ROI_on_image,create_time_slice,save_lists,
save_arrays, psave_obj,pload_obj, get_non_uniform_edges )
from chxanalys.XPCS_SAXS import (get_circular_average,save_lists,get_ring_mask, get_each_ring_mean_intensity,
plot_qIq_with_ROI,save_saxs_g2,plot_saxs_g2,fit_saxs_g2,cal_g2,
create_hot_pixel_mask,get_circular_average,get_t_iq,save_saxs_g2,
plot_saxs_g2,fit_saxs_g2,fit_q2_rate,plot_saxs_two_g2,fit_q_rate,
circular_average,plot_saxs_g4, get_t_iqc,multi_uids_saxs_xpcs_analysis)
from chxanalys.Two_Time_Correlation_Function import (show_C12, get_one_time_from_two_time,
get_four_time_from_two_time,rotate_g12q_to_rectangle)
from chxanalys.chx_compress_analysis import ( compress_eigerdata, read_compressed_eigerdata,
Multifile,get_avg_imgc, get_each_frame_intensityc,
get_each_ring_mean_intensityc, mean_intensityc,cal_waterfallc,plot_waterfallc)
from chxanalys.SAXS import fit_form_factor
from chxanalys.chx_correlationc import ( cal_g2c,Get_Pixel_Arrayc,auto_two_Arrayc,get_pixelist_interp_iq,)
from chxanalys.chx_correlationp import (cal_g2p, auto_two_Arrayp)
from chxanalys.Create_Report import (create_pdf_report,
create_multi_pdf_reports_for_uids,create_one_pdf_reports_for_uids)
from chxanalys.XPCS_GiSAXS import (get_qedge,get_qmap_label,get_qr_tick_label, get_reflected_angles,
convert_gisaxs_pixel_to_q, show_qzr_map, get_1d_qr, get_qzrmap, show_qzr_roi,get_each_box_mean_intensity,
save_gisaxs_g2,plot_gisaxs_g2, fit_gisaxs_g2,plot_gisaxs_two_g2,plot_qr_1d_with_ROI,fit_qr_qz_rate,
multi_uids_gisaxs_xpcs_analysis,plot_gisaxs_g4)
%matplotlib notebook
In [7]:
plt.rcParams.update({'figure.max_open_warning': 0})
In [8]:
%%javascript
var nb = IPython.notebook;
var kernel = IPython.notebook.kernel;
var command = "NOTEBOOK_FULL_PATH = '" + nb.base_url + nb.notebook_path + "'";
kernel.execute(command);
In [9]:
print("NOTEBOOK_FULL_PATH:\n", NOTEBOOK_FULL_PATH)
In [10]:
CYCLE = '2016_3'
username = getpass.getuser()
#username = "commissioning"
username = "Dursch"
date_path = datetime.now().strftime('%Y/%m/%d') # e.g., '2016/03/01'
data_dir = os.path.join('/XF11ID/analysis/', CYCLE, username, 'Results/')
##Or define data_dir here, e.g.,#data_dir = '/XF11ID/analysis/2016_2/rheadric/test/'
os.makedirs(data_dir, exist_ok=True)
print('Results from this analysis will be stashed in the directory %s' % data_dir)
In [72]:
start_time, stop_time = '2016-11-3 15:59:00', '2016-11-3 16:05:00'
#count : 1 ['8c9112'] (scan num: 126) (Measurement: PSPMMA1b 200C 50 x.002s 2s period_run_0_0 )
hdrs = db(start_time= start_time, stop_time = stop_time)
print ('Totally %s uids are found'%(len(hdrs)))
sids=[]
uids=[]
for hdr in hdrs:
s= get_sid_filenames( hdr)
print (s)
sids.append( s[0] )
uids.append( s[1] )
sids=sids[::-1]
uids=uids[::-1]
uid = uids[0]
In [12]:
uids[4]
Out[12]:
In [ ]:
In [ ]:
In [122]:
uid_= 'MKSAG_20-T=50C-Nov3-8scan' #give a meaningful folder name
data_dir = os.path.join('/XF11ID/analysis/', CYCLE, username, 'Results/%s/'%uid_)
os.makedirs(data_dir, exist_ok=True)
print('Results from this analysis will be stashed in the directory %s' % data_dir)
In [123]:
data_dir_ = os.path.join( data_dir, 'Average/')
os.makedirs(data_dir_, exist_ok=True)
print ( data_dir_)
In [124]:
detector = get_detector( db[uid ] )
print ('Detector is: %s'%detector )
sud = get_sid_filenames(db[uid])
print ('scan_id, full-uid, data path are: %s--%s--%s'%(sud[0], sud[1], sud[2][0] ))
In [125]:
imgs = load_data( uid, detector , reverse= True )
Nimg = len(imgs)
md = imgs.md
In [126]:
try:
md['Measurement']= db[uid]['start']['Measurement']
md['sample']=db[uid]['start']['sample']
m#d['sample']= 'PS205000-PMMA-207000-SMMA3'
print( md['sample'])
except:
md['Measurement']= 'Measurement'
md['sample']='sample'
In [127]:
imgs
Out[127]:
In [128]:
md
Out[128]:
In [129]:
# The physical size of the pixels
dpix = md['x_pixel_size'] * 1000. #in mm, eiger 4m is 0.075 mm
lambda_ =md['incident_wavelength'] # wavelegth of the X-rays in Angstroms
Ldet = 4.84 * 1000 # detector to sample distance (mm), currently, *1000 for saxs, *1 for gisaxs
exposuretime= md['count_time']
acquisition_period = md['frame_time']
print( 'The sample is %s'%( md['sample'] ))
print( 'Exposuretime=%s sec, Acquisition_period=%s sec'%( exposuretime, acquisition_period ))
timeperframe = acquisition_period#for g2
#timeperframe = exposuretime#for visiblitly
#timeperframe = 2 ## manual overwrite!!!! we apparently writing the wrong metadata....
center = [ md['beam_center_x'], md['beam_center_y'] ] #for 4M
center=[center[1], center[0]]
print ('Beam center=', center)
In [ ]:
In [130]:
setup_pargs=dict(uid=uid_, dpix= dpix, Ldet=Ldet, lambda_= lambda_,
timeperframe=timeperframe, center=center, path= data_dir_, md=md)
In [131]:
setup_pargs
Out[131]:
In [132]:
mask_path = '/XF11ID/analysis/2016_3/masks/'
mask_name = 'Nov3_4M_mask.npy'
In [133]:
mask = load_mask(mask_path, mask_name, plot_ = True, reverse=True, image_name = 'uid=%s-mask'%uid )
In [134]:
md['mask'] = mask
md['mask_file']= mask_path + mask_name
md['NOTEBOOK_FULL_PATH'] = NOTEBOOK_FULL_PATH
#psave_obj( md, data_dir + 'uid=%s-md'%uid ) #save the setup parameters
#md = pload_obj(data_dir + 'uid=%s-md'%uid )
In [135]:
imgsa = apply_mask( imgs, mask )
In [136]:
avg_img = get_avg_img( imgsa, sampling = int(Nimg/3), plot_ = True, uid =uid)
In [137]:
#avg_img = get_avg_imgc( FD, beg=0,end=10000,sampling = 1, plot_ = False )
show_img( avg_img, vmin=.1, vmax=3.0, logs=False, image_name= 'uid=%s--img-avg-'%uid_,
save=True, path=data_dir_)
md['avg_img'] = avg_img
In [138]:
hmask = create_hot_pixel_mask( avg_img, 2**15 )
mask = mask * hmask
hmask = create_hot_pixel_mask( avg_img, 1e8)
qp, iq, q = get_circular_average( avg_img, mask * hmask, pargs=setup_pargs, nx=None,
plot_ = True, show_pixel= False, xlim=[0.0001,.14], ylim = [0.00009, 1e0], save=True)
In [139]:
uniform = True
In [140]:
if not uniform:
#width = 4 # in pixel
width = 0.001
number_rings=1
#centers = [ 31, 50, 67, 84, 102, 119] #in pixel
centers = [ 0.00235,0.00379,0.00508,0.00636,0.00773, 0.00902] #in A-1
centers = [ 0.0065,0.0117,0.021,0.0336,0.044, 0.057] #in A-1
edges = get_non_uniform_edges( centers, width, number_rings )
inner_radius= None
outer_radius = None
width = None
num_rings = None
In [141]:
if uniform:
inner_radius= 0.0115
outer_radius = 0.069
width = 0.0035
num_rings = 10
edges = None
In [142]:
ring_mask, q_ring_center, q_ring_val = get_ring_mask( mask, inner_radius=inner_radius, unit='A',
outer_radius = outer_radius , width = width, num_rings = num_rings, edges=edges, pargs=setup_pargs )
qind, pixelist = roi.extract_label_indices( ring_mask )
In [143]:
md['center'] = center
md['ring_mask'] = ring_mask
md['q_ring_center']= q_ring_center
md['q_ring_val'] = q_ring_val
md['beam_center_x'] = center[1]
md['beam_center_y']= center[0]
md['data_dir'] = data_dir
psave_obj( md, data_dir_ + 'uid=%s-md'%uid_ ) #save the setup parameters
In [144]:
#pload_obj( data_dir + 'uid=%s-md'%uid_ )
In [186]:
show_ROI_on_image( avg_img, ring_mask, center, label_on = False, rwidth=800, alpha=.9,
vmax=30, save=True, path=data_dir_, uid=uid_)
In [146]:
plot_qIq_with_ROI( q, iq, q_ring_center, logs=True, uid=uid, xlim=[0.001,.12],
ylim = [1e-4, 1e0], save=True, path=data_dir)
In [147]:
run_num = 1
sub_num = len(uids) //run_num
In [148]:
data_dir
Out[148]:
In [149]:
g2s, taus, useful_uids = multi_uids_saxs_xpcs_analysis( uids, md, run_num, sub_num,
fit= True, force_compress=False, compress=True )
In [152]:
useful_uids
Out[152]:
In [155]:
%run /XF11ID/analysis/Analysis_Pipelines/Develop/chxanalys/chxanalys/Create_Report.py
In [158]:
create_multi_pdf_reports_for_uids( useful_uids, g2s, data_dir, append_name='_2' )
In [159]:
os.environ['HTTPS_PROXY'] = 'https://proxy:8888'
os.environ['no_proxy'] = 'cs.nsls2.local,localhost,127.0.0.1'
In [160]:
from chxanalys.chx_olog import LogEntry,Attachment, update_olog_uid, update_olog_id
In [163]:
for key in list( useful_uids.keys()):
for k in list( useful_uids[key]):
uid_k = useful_uids[key][k]
filename = data_dir + 'XPCS_Analysis_Report_for_uid=%s_2.pdf'%uid_k
atch=[ Attachment(open(filename, 'rb')) ]
update_olog_uid( uid=uid_k, text='Add XPCS Analysis PDF Report', attachments= atch )
In [165]:
create_one_pdf_reports_for_uids( useful_uids, g2s, data_dir, filename=uid_ )
In [166]:
list( g2s[1].keys() )
Out[166]:
In [167]:
bad_uids =[ ]
In [168]:
g2s_average ={} # g2s[run] = g2 of each time series average
for key in list( g2s.keys()):
g2s_average[key] = np.zeros_like( g2s[key][1])
i=1
for sub_key in list( g2s[key].keys() ):
#print (g2s_average[key].shape, g2s[key][sub_key].shape )
try:
if sub_key not in bad_uids:
g2s_average[key] += g2s[key][sub_key]
i +=1
print( 'Run= %s--sub_run= %s-- pass'%(key, sub_key) )
#print( 'sub= %s,Doing average here'%sub_key)
else:
print( 'sub= %s,Can not Doing average here'%sub_key)
except:
print( 'sub= %s,Can not Doing average here'%sub_key)
pass
g2s_average[key] /= i-1
In [169]:
for key in list( g2s.keys()):
res_pargs = dict(taus=taus, q_ring_center=q_ring_center,
path=data_dir_, uid= uid_ + '@run--%s'%key )
save_saxs_g2( g2s_average[key], res_pargs )
In [170]:
if False:
for key in list( g2s.keys()):
for sub_key in list( g2s[key].keys()):
res_pargs = dict(taus=taus, q_ring_center=q_ring_center,
path=data_dir_, uid= uid_ + '@run--%s--subrun--%s'%(key, sub_key) )
plot_saxs_g2( g2s[key][sub_key], taus, vlim=[0.95, 1.05], res_pargs=res_pargs)
In [171]:
if False:
for key in list( g2s.keys()):
res_pargs = dict(taus=taus, q_ring_center=q_ring_center,
path=data_dir_, uid= uid_ + '@run--%s'%key )
plot_saxs_g2( g2s_average[key], taus, vlim=[0.95, 1.05], res_pargs=res_pargs)
In [172]:
fit = True
In [174]:
if fit:
for key in list( g2s.keys()):
res_pargs = dict(taus=taus, q_ring_center=q_ring_center,
path=data_dir_, uid= uid_ + '@run--%s'%key )
fit_result = fit_saxs_g2( g2s_average[key], res_pargs, function = 'stretched', vlim=[0.95, 1.05],
fit_variables={'baseline':True, 'beta':True, 'alpha':False,'relaxation_rate':True},
guess_values={'baseline':1.45,'beta':0.08,'alpha':1.0,'relaxation_rate':0.01})
#psave_obj( fit_result, data_dir + uid_ + '@run--%s'%key )
fit_q_rate( q_ring_center, fit_result['rate'], power_variable= False,
uid=uid_ + '@run--%s'%key, path= data_dir_ )
save_lists( [q_ring_center**2,fit_result['rate']], ['q2','rate'],
filename= 'Q2-rate-uid=%s'%(uid_ + '@run--%s'%key), path= data_dir)
In [175]:
create_report = True
In [176]:
#data_dir = os.path.join('/XF11ID/analysis/', CYCLE, username, 'Results/')
In [177]:
pdf_out_dir = os.path.join('/XF11ID/analysis/', CYCLE, username, 'Results/')
filename= "XPCS_Analysis_Report_for_uid=%s-average.pdf"%uid_
print (pdf_out_dir + filename)
In [178]:
uid_
Out[178]:
In [179]:
if create_report:
c= create_pdf_report( data_dir_, uid_, pdf_out_dir,
filename= filename)
page =1
#Page one: Meta-data/Iq-Q/ROI
c.report_header(page=page)
c.report_meta( top=730)
c.report_static( top=560)
c.report_ROI( top= 300)
Nk = len( list( g2s.keys()))
Npages = Nk//2 + Nk%2
for i, key in enumerate( list( g2s.keys())):
if not i%2:
page += 1
c.new_page()
c.report_header(page=page)
if i%2:
top = 350
else:
top = 720
c.report_one_time( top= top, g2_fit_file = 'uid=%s@run--%s--g2--fit-.png'%(uid_,key ),
q_rate_file = 'uid=%s@run--%s--Q-Rate--fit-.png'%(uid_,key ))
c.save_page()
c.done()
In [180]:
last_uid_num = len( useful_uids[1] )
In [185]:
uid_last = useful_uids[1][ last_uid_num ]
print( uid_last )
In [182]:
#uid_last = useful_uids[1][-1]
In [183]:
c.filename
Out[183]:
In [184]:
#uid_ = uid_last #useful_uids[key][k]
filename = c.filename #XPCS_Analysis_Report_for_uid=%s.pdf'%uid_
atch=[ Attachment(open(filename, 'rb')) ]
update_olog_uid( uid=uid_last, text='Add XPCS Analysis PDF Report', attachments= atch )
In [ ]:
In [ ]:
In [ ]:
In [ ]:
In [ ]:
In [ ]:
In [ ]:
In [ ]:
In [ ]:
In [ ]:
In [ ]: