XPCS Pipeline for GiSAXS

"This notebook corresponds to version {{ version }} of the pipeline tool: https://github.com/NSLS-II/pipelines"

This notebook begins with a raw time-series of images and ends with $g_2(t)$ for a range of $q$, fit to an exponential or stretched exponential, and a two-time correlation functoin.

Overview

  • Setup: load packages/setup path
  • Load Metadata & Image Data
  • Apply Mask
  • Clean Data: shutter open/bad frames
  • Get Q-Map
  • Get 1D curve
  • Define Q-ROI (qr, qz)
  • Check beam damage
  • One-time Correlation
  • Fitting
  • Two-time Correlation The important scientific code is imported from the chxanalys and scikit-beam project. Refer to chxanalys and scikit-beam for additional documentation and citation information.

CHX Olog NoteBook

CHX Olog (https://logbook.nsls2.bnl.gov/11-ID/)

Setup

Import packages for I/O, visualization, and analysis.


In [577]:
from chxanalys.chx_libs import (np, roi, time, datetime, os, get_events, 
                                getpass, db, get_images,LogNorm, plt,tqdm, utils, Model,
                               multi_tau_lags)

from chxanalys.chx_generic_functions import (get_detector, get_fields, get_sid_filenames,  
     load_data, load_mask,get_fields, reverse_updown, ring_edges,get_avg_img,check_shutter_open,
    apply_mask, show_img,check_ROI_intensity,run_time, plot1D, get_each_frame_intensity,                                             
    create_hot_pixel_mask,show_ROI_on_image,create_time_slice,save_lists, 
                        save_arrays, psave_obj,pload_obj, get_non_uniform_edges )
 

from chxanalys.XPCS_SAXS import (get_circular_average,save_lists,get_ring_mask, get_each_ring_mean_intensity,
                                 plot_qIq_with_ROI,save_saxs_g2,plot_saxs_g2,fit_saxs_g2,cal_g2,
                                create_hot_pixel_mask,get_circular_average,get_t_iq,save_saxs_g2,
                                plot_saxs_g2,fit_saxs_g2,fit_q2_rate,plot_saxs_two_g2,fit_q_rate,
                                circular_average,plot_saxs_g4, get_t_iqc,multi_uids_saxs_xpcs_analysis)


from chxanalys.Two_Time_Correlation_Function import (show_C12, get_one_time_from_two_time,
                                                get_four_time_from_two_time,rotate_g12q_to_rectangle)

from chxanalys.chx_compress_analysis import ( compress_eigerdata, read_compressed_eigerdata,
                                             Multifile,get_avg_imgc, get_each_frame_intensityc,
                get_each_ring_mean_intensityc, mean_intensityc,cal_waterfallc,plot_waterfallc)

from chxanalys.SAXS import fit_form_factor
from chxanalys.chx_correlationc import ( cal_g2c,Get_Pixel_Arrayc,auto_two_Arrayc,get_pixelist_interp_iq,)
from chxanalys.chx_correlationp import (cal_g2p, auto_two_Arrayp)

from chxanalys.Create_Report import (create_pdf_report, 
                                create_multi_pdf_reports_for_uids,create_one_pdf_reports_for_uids)


from chxanalys.XPCS_GiSAXS import (get_qedge,get_qmap_label,get_qr_tick_label, get_reflected_angles,
    convert_gisaxs_pixel_to_q, show_qzr_map, get_1d_qr, get_qzrmap, show_qzr_roi,get_each_box_mean_intensity,
    save_gisaxs_g2,plot_gisaxs_g2, fit_gisaxs_g2,plot_gisaxs_two_g2,plot_qr_1d_with_ROI,fit_qr_qz_rate,
                                  multi_uids_gisaxs_xpcs_analysis,plot_gisaxs_g4,
                                  get_t_qrc, plot_t_qrc)

%matplotlib notebook

In [10]:
from chxtools.handlers import LazyEigerHandler
db.fs.register_handler('AD_EIGER', LazyEigerHandler)

In [11]:
plt.rcParams.update({'figure.max_open_warning': 0})

In [12]:
#%reset

In [13]:
%%javascript
var nb = IPython.notebook;
var kernel = IPython.notebook.kernel;
var command = "NOTEBOOK_FULL_PATH = '" + nb.base_url + nb.notebook_path + "'";
kernel.execute(command);



In [14]:
print("NOTEBOOK_FULL_PATH:\n", NOTEBOOK_FULL_PATH)


NOTEBOOK_FULL_PATH:
 /user/yuzhang/analysis/2016_3/yuzhang/XPCS_GiSAXS_Single_Run_Oct11.ipynb

Make a directory for saving results


In [15]:
CYCLE = '2016_3'

username = getpass.getuser()

date_path = datetime.now().strftime('%Y/%m/%d')  # e.g., '2016/03/01'
data_dir = os.path.join('/XF11ID/analysis/', CYCLE, username, 'Results/')



##Or define data_dir here, e.g.,#data_dir = '/XF11ID/analysis/2016_2/rheadric/test/'


os.makedirs(data_dir, exist_ok=True)
print('Results from this analysis will be stashed in the directory %s' % data_dir)


Results from this analysis will be stashed in the directory /XF11ID/analysis/2016_3/yuzhang/Results/

Load Metadata & Image Data

Print detector, scan-id, uid, datapath of data collected.

Change these lines


In [16]:
uid = 'a3e323'  #['a3e323'] (scan num: 2626) (Measurement: XPCS: C60 on Si, 18k frames, 10Hz, incidence angle 0.7 deg )

In [17]:
data_dir = os.path.join('/XF11ID/analysis/', CYCLE, username, 'Results/%s/'%uid)

os.makedirs(data_dir, exist_ok=True)
print('Results from this analysis will be stashed in the directory %s' % data_dir)


Results from this analysis will be stashed in the directory /XF11ID/analysis/2016_3/yuzhang/Results/a3e323/

Don't Change these lines below here


In [18]:
detector = get_detector( db[uid ] )
print ('Detector is:  %s'%detector  )
sud = get_sid_filenames(db[uid])
print ('scan_id, full-uid, data path are:  %s--%s--%s'%(sud[0], sud[1], sud[2][0] ))


Detector is:  eiger4m_single_image
scan_id, full-uid, data path are:  2626--a3e323b4-d2aa-428f-8a1d-5d87b4fa30f1--/XF11ID/data/2016/10/11/b0aadcff-7922-4559-bda6_584

In [19]:
imgs = load_data( uid, detector  )
Nimg = len(imgs)
md = imgs.md


hdf5 path = /XF11ID/data/2016/10/11/b0aadcff-7922-4559-bda6_584_master.h5

In [20]:
try:
    md['Measurement']= db[uid]['start']['Measurement']
    md['sample']=db[uid]['start']['sample']  
    #md['sample']= 'SiO2 Colloidal'  #change the sample name if the md['sample'] is wrong
    print( 'The sample is %s' %md['sample'])
    
except:
    md['Measurement']= 'Measurement'
    md['sample']='sample'


The sample is OTS Si substrate 1

In [21]:
print( 'The data are: %s' %imgs )


The data are: <Frames>
Length: 18000 frames
Frame Shape: 2167 x 2070
Pixel Datatype: uint32

Overwrite Some Metadata Due to Wrong Input


In [22]:
print( 'The Metadata are: \n%s' %md )


The Metadata are: 
{'sample': 'OTS Si substrate 1', 'incident_wavelength': 0.9686265, 'frame_time': 0.1, 'x_pixel_size': 7.5000004e-05, 'count_time': 0.099990003, 'pixel_mask': array([[0, 0, 0, ..., 0, 0, 4],
       [0, 0, 0, ..., 0, 0, 0],
       [0, 0, 0, ..., 0, 0, 0],
       ..., 
       [0, 0, 0, ..., 0, 0, 0],
       [0, 0, 0, ..., 0, 0, 0],
       [0, 0, 0, ..., 0, 0, 0]], dtype=uint32), 'beam_center_y': 64.0, 'beam_center_x': 1572.0, 'detector_distance': 3990.0, 'y_pixel_size': 7.5000004e-05, 'Measurement': 'XPCS: C60 on Si, 18k frames, 10Hz, incidence angle 0.7 deg'}

In [23]:
# The physical size of the pixels
dpix = md['x_pixel_size'] * 1000.  #in mm, eiger 4m is 0.075 mm
lambda_ =md['incident_wavelength']    # wavelegth of the X-rays in Angstroms
Ldet = md['detector_distance']/1000.  
# detector to sample distance (m), 
exposuretime= md['count_time']
acquisition_period = md['frame_time']
print( 'The sample is %s'%(  md['sample']  ))
print( 'Exposuretime=%s sec, Acquisition_period=%s sec'%( exposuretime, acquisition_period  ))

timeperframe = acquisition_period#for g2
#timeperframe = exposuretime#for visiblitly
#timeperframe = 2  ## manual overwrite!!!! we apparently writing the wrong metadata....


The sample is OTS Si substrate 1
Exposuretime=0.09999 sec, Acquisition_period=0.1 sec

In [24]:
setup_pargs=dict(uid=uid, dpix= dpix, Ldet=Ldet, lambda_= lambda_, 
        timeperframe=timeperframe,  path= data_dir)

In [25]:
setup_pargs


Out[25]:
{'Ldet': 3.9900000000000002,
 'dpix': 0.075000003562308848,
 'lambda_': 0.9686265,
 'path': '/XF11ID/analysis/2016_3/yuzhang/Results/a3e323/',
 'timeperframe': 0.1,
 'uid': 'a3e323'}

Apply Mask

  • load and plot mask if exist
  • otherwise create a mask using Mask pipeline
  • Reverse the mask in y-direction due to the coordination difference between python and Eiger software
  • Reverse images in y-direction
  • Apply the mask

Change these lines


In [26]:
# mask_path = '/XF11ID/analysis/2016_3/masks/'
mask_path = '/XF11ID/analysis/2016_3/masks/'
mask_name = 'Octo_11_mask.npy' #>= 160 C use this one

In [27]:
mask = load_mask(mask_path, mask_name, plot_ =  True, image_name = 'uid= %s-mask'%uid )
mask = np.array(mask, dtype = np.int32)



In [28]:
md['mask'] = mask
md['mask_file']= mask_path + mask_name 
md['NOTEBOOK_FULL_PATH'] = NOTEBOOK_FULL_PATH

In [29]:
maskr = mask[::-1,:]
imgsr = reverse_updown( imgs )
imgsra = apply_mask( imgsr, maskr )

In [30]:
#show_img( imgsra[0],  vmin=0.1, vmax=100, logs=True, image_name= 'uid= %s'%uid)

In [31]:
#show_img( imgsa[0],  vmin= .01, vmax=50, logs= True, image_name= 'uid= %s'%uid)

In [32]:
avg_imgr =  get_avg_img( imgsra, sampling = int(Nimg/3), plot_ = True, uid =uid)


Determine Compress Or Not

  • For sake of simplicity, we always compress data

In [33]:
print (len( np.where(avg_imgr)[0] ) / ( imgsra[0].size))
compress =  len( np.where(avg_imgr)[0] ) / ( imgsra[0].size) < 1  #if the photon ocupation < 0.1, do compress

print (compress)
#compress = False


0.014493645347761437
True

Compress Data

  • Generate a compressed data with filename
  • Replace old mask with a new mask with removed hot pixels
  • Do average image
  • Do each image sum
  • Find badframe_list for where image sum above bad_pixel_threshold
  • Check shutter open frame to get good time series

In [34]:
good_start = 0  #make the good_start at least 2

In [35]:
bin_frame = True

In [36]:
if bin_frame:
    bins=10
    timeperframe *= bins
else:
    bins =1

In [37]:
if compress:
    if bins==1:
        filename = '/XF11ID/analysis/Compressed_Data' +'/uid_%s.cmp'%sud[1]
    else:
        filename = '/XF11ID/analysis/Compressed_Data' +'/uid_%s_bined--%s.cmp'%(sud[1],bins)    
    
    maskr, avg_imgr, imgsum, bad_frame_list = compress_eigerdata(imgsr, maskr, md, 
                        filename, 
                force_compress= False, bad_pixel_threshold= 5e10,nobytes=4, bins=bins)    
    
    min_inten = 0    
    good_start = max(good_start, np.where( np.array(imgsum) > min_inten )[0][0] )    
    print ('The good_start frame number is: %s '%good_start)
    
    FD = Multifile(filename, good_start, len(imgsr)//bins)
    
    plot1D( y = imgsum[ np.array( [i for i in np.arange( len(imgsum)) if i not in bad_frame_list])],
           title ='Uid= %s--imgsum'%uid, xlabel='Frame', ylabel='Total_Intensity',
           legend='imgsum'   )
    Nimg = Nimg/bins


Averaging images:   1%|          | 20/1800 [00:00<00:08, 198.29it/s]
Using already created compressed file with filename as :/XF11ID/analysis/Compressed_Data/uid_a3e323b4-d2aa-428f-8a1d-5d87b4fa30f1_bined--10.cmp.
Averaging images: 100%|██████████| 1800/1800 [00:08<00:00, 224.16it/s]
Get each frame intensity: 100%|██████████| 1800/1800 [00:01<00:00, 1649.85it/s]
No bad frames are involved.
The good_start frame number is: 0 


In [ ]:
#%system ls -lh {sud[2][0]+"*"}|tail -2 ; ls -lh {filename}

In [38]:
bad_pixel_threshold= 1e15  #if re-define a bad pixel threshold
#bad_pixel_low_threshold= 7.5e5 #if re-define a bad pixel low threshold

In [39]:
if bad_pixel_threshold<1e14:
    mask, avg_img, imgsum, bad_frame_list = compress_eigerdata(imgs, mask, md, filename, 
                    force_compress=False, bad_pixel_threshold= bad_pixel_threshold,
                bad_pixel_low_threshold=bad_pixel_low_threshold, nobytes=4)
    min_inten = 10
    good_start = max(good_start, np.where( np.array(imgsum) > min_inten )[0][0] )   
    
    print ('The good_start frame number is: %s '%good_start)

Define a good time series by defining a good start and good end


In [40]:
if False:
        good_start =0  #0
        min_inten = 10
        good_start =  max(good_start, np.where( np.array(imgsum) > min_inten )[0][0])    
        good_end =     len(imgsr)
        filename = '/XF11ID/analysis/Compressed_Data' +'/uid_%s.cmp'%sud[1] 
        FD = Multifile(filename, good_start, good_end)
        avg_img= get_avg_imgc( FD,  beg= None,end=None, plot_=False )
        imgsum,bad_frame_list = get_each_frame_intensityc( FD, bad_pixel_threshold= bad_pixel_threshold,
                    bad_pixel_low_threshold=bad_pixel_low_threshold, plot_=False )

In [41]:
if not compress:   
    #sampling = 1   #sampling should be one    
    sampling = 100  #sampling should be one     
    
    good_start = check_shutter_open( imgsra,  min_inten=5, time_edge = [0,10], plot_ = False )
    print ('The good_start frame number is: %s '%good_start)
    good_series = max(good_start, apply_mask( imgsra[good_start:], maskr ))
    avg_imgr =  get_avg_img( good_series, sampling = sampling, plot_ = False, uid =uid)
    imgsum, bad_frame_list = get_each_frame_intensity(good_series ,sampling = sampling, 
                                bad_pixel_threshold=1.2e8,  plot_ = False, uid=uid)

In [42]:
#print ('The bad frame list is: %s'% bad_frame_list)
print ('The number of bad frames is : %s '%len(bad_frame_list))
print ('The good_start frame number is: %s '%good_start)
md['good_start'] = good_start
md['bad_frame_list'] = bad_frame_list


The number of bad frames is : 0 
The good_start frame number is: 0 

In [43]:
imgsum_y = imgsum[ np.array( [i for i in np.arange( len(imgsum)) if i not in bad_frame_list])]
imgsum_x = np.arange( len( imgsum_y))
save_lists(  [imgsum_x, imgsum_y], label=['Frame', 'Total_Intensity'], filename='uid=%s-imgsum'%uid, path= data_dir  )

In [44]:
plot1D( y = imgsum_y, title ='uid=%s--img-sum-t'%uid, xlabel='Frame',
       ylabel='Total_Intensity', legend='imgsum', save=True, path=data_dir)


Plot intensity average image


In [552]:
#avg_img = get_avg_imgc( FD,  beg=0,end=10000,sampling = 1, plot_ = False )
show_img( avg_imgr+1e-6,  vmin=.002, vmax= 1, logs= True, image_name= 'uid=%s--img-avg-'%uid,
        save=True, path=data_dir) 
md['avg_img'] = avg_imgr



In [46]:
#plot1D(  np.array(avg_imgr)[1405,] )

In [47]:
#plot1D(  np.array(avg_imgr)[:,1576] )

In [ ]:

Get Q-Map (Qz and Qr)

  • Users put incident-Beam and Reflection_Beam Centers here!!!

Change these lines


In [48]:
inc_x0 =  1572
inc_y0 =  64

#refl_x0 =  1575
refl_x0 =  1575

refl_y0 =  1405

Don't Change these lines below here


In [49]:
alphaf,thetaf, alphai, phi = get_reflected_angles( inc_x0, inc_y0,refl_x0 , refl_y0, Lsd=Ldet )
qx, qy, qr, qz = convert_gisaxs_pixel_to_q( inc_x0, inc_y0,refl_x0,refl_y0, lamda=lambda_, Lsd=Ldet )


The incident_angle (alphai) is: 0.721967797434
The incident_angle (alphai) is: 0.721967797434

In [50]:
#show_img( qr +qz)

In [51]:
#ticks = show_qzr_map(  qr,qz, inc_x0, data = None, Nzline=10,  Nrline=10   )

In [52]:
ticks = show_qzr_map(  qr,qz, inc_x0, data = avg_imgr, Nzline=10,  Nrline=10   )


Define Q-ROI

  • User provide the interested Qz and Qr here for XPCS analysis

Get 1D Curve (Q||-intensity)

  • Users put cuts here for static analysis

Change these lines


In [53]:
qz_start = 0.093
qz_end = 0.12
qz_num= 1
qz_width = 0.004


qr_start =  0.002
qr_end = 0.14
qr_num = 1
qr_width = 0.14

Qr = [qr_start , qr_end, qr_width, qr_num]
Qz=  [qz_start,   qz_end,  qz_width , qz_num ]
qr_edge, qr_center = get_qedge(qr_start, qr_end, qr_width, qr_num )
qz_edge, qz_center = get_qedge(qz_start, qz_end, qz_width, qz_num )      
label_array_qz = get_qmap_label(qz, qz_edge)
label_array_qr = get_qmap_label(qr, qr_edge)
label_array_qzr, qzc, qrc = get_qzrmap(label_array_qz, label_array_qr, 
                                       qz_center, qr_center)
labels_qzr, indices_qzr = roi.extract_label_indices(label_array_qzr)
labels_qz, indices_qz = roi.extract_label_indices(label_array_qz)
labels_qr, indices_qr = roi.extract_label_indices(label_array_qr)
num_qz = len(np.unique(labels_qz))
num_qr = len(np.unique(labels_qr))
num_qzr = len(np.unique(labels_qzr))

In [54]:
boxes_qr = label_array_qzr  
box_maskr_qr = boxes_qr*maskr
#show_qzr_roi( avg_imgr, box_maskr_qr, refl_x0, ticks, alpha=0.5, save=True, path=data_dir, uid=uid   )

In [55]:
qr_1d = get_1d_qr( avg_imgr, Qr, Qz, qr, qz, inc_x0,  None,  True, ticks, .8,
                  save= True, setup_pargs=setup_pargs )


The qr_edge is:  [ 0.002  0.142]
The qr_center is:  [0.072000000000000008]
The qz_edge is:  [ 0.093  0.097]
The qz_center is:  [0.095000000000000001]
The qr_1d is saved in /XF11ID/analysis/2016_3/yuzhang/Results/a3e323/ with filename as uid=a3e323--qr_1d.csv

get qr as a function of time


In [73]:
time_edge = create_time_slice( N= Nimg, slice_num= 10, slice_width= 18, edges = None )
time_edge =  np.array( time_edge, dtype=np.int_ ) + good_start

In [74]:
time_edge


Out[74]:
array([[   0,   18],
       [ 261,  279],
       [ 441,  459],
       [ 621,  639],
       [ 801,  819],
       [ 981,  999],
       [1161, 1179],
       [1341, 1359],
       [1521, 1539],
       [1782, 1800]])

In [76]:
qr_t = get_t_qrc( FD, time_edge, Qr, Qz, qr, qz)


Averaging images: 100%|██████████| 18/18 [00:00<00:00, 181.59it/s]
Averaging images: 100%|██████████| 18/18 [00:00<00:00, 311.01it/s]
Averaging images: 100%|██████████| 18/18 [00:00<00:00, 293.69it/s]
Averaging images: 100%|██████████| 18/18 [00:00<00:00, 276.22it/s]
Averaging images: 100%|██████████| 18/18 [00:00<00:00, 247.38it/s]
Averaging images: 100%|██████████| 18/18 [00:00<00:00, 239.39it/s]
Averaging images: 100%|██████████| 18/18 [00:00<00:00, 223.51it/s]
Averaging images: 100%|██████████| 18/18 [00:00<00:00, 211.51it/s]
Averaging images: 100%|██████████| 18/18 [00:00<00:00, 202.59it/s]
Averaging images: 100%|██████████| 18/18 [00:00<00:00, 186.06it/s]

In [79]:
plot_t_qrc( qr_t, time_edge, pargs=setup_pargs, xlim = [0,0.05], ylim=[1e-3, .15])



In [ ]:

Create X boxes along Y qz line to generate X*Y ROIs


In [339]:
qz_start = 0.093
qz_end = 0.11
qz_num= 2
qz_width = 0.004

qr_start =  0.002
qr_end = 0.14
qr_num = 10
qr_width = 0.012
Qr = [qr_start , qr_end, qr_width, qr_num]
Qz=  [qz_start,   qz_end,  qz_width , qz_num ]

In [338]:
ref_box=True
cum_box=True

Create one box to inspect the reflect beam


In [347]:
if ref_box:
    qz_start = 0.163 - 0.004
    qz_end = 0.17
    qz_num= 1
    qz_width = 0.004


    qr_start =  -0.002
    qr_end = 0.1
    qr_num = 1
    qr_width = 0.004
    
    Qr_ref = [qr_start , qr_end, qr_width, qr_num]
    Qz_ref =  [qz_start,   qz_end,  qz_width , qz_num ]

Create seprate other 'customized' boxes


In [435]:
if cum_box:
    qz_start = 0.11
    qz_end = 0.16
    qz_num= 1
    qz_width = 0.05
    
    qr_start =  0.01
    qr_end = 0.1
    qr_num = 1
    qr_width = 0.008

    Qr_cum = [qr_start, qr_end, qr_width, qr_num]
    Qz_cum=  [qz_start,   qz_end,  qz_width, qz_num ]

In [ ]:

Don't Change these lines below here

  • Create label array (Qz, Qr, Qzr boxes)

In [466]:
[qr_start , qr_end, qr_width, qr_num] = Qr
[qz_start,   qz_end,  qz_width , qz_num ] = Qz
qr_edge, qr_center = get_qedge(qr_start, qr_end, qr_width, qr_num )
qz_edge, qz_center = get_qedge(qz_start, qz_end, qz_width, qz_num ) 

label_array_qz = get_qmap_label(qz, qz_edge)
label_array_qr = get_qmap_label(qr, qr_edge)
label_array_qzr, qzc, qrc = get_qzrmap(label_array_qz, label_array_qr, 
                                       qz_center, qr_center)
labels_qzr, indices_qzr = roi.extract_label_indices(label_array_qzr)
labels_qz, indices_qz = roi.extract_label_indices(label_array_qz)
labels_qr, indices_qr = roi.extract_label_indices(label_array_qr)
num_qz = len(np.unique(labels_qz))
num_qr = len(np.unique(labels_qr))
num_qzr = len(np.unique(labels_qzr))

In [467]:
if ref_box:
    
    [qr_start , qr_end, qr_width, qr_num] = Qr_ref     
    [qz_start,   qz_end,  qz_width , qz_num ] = Qz_ref
    qr_edge, qr_center_ref = get_qedge(qr_start, qr_end, qr_width, qr_num )
    qz_edge, qz_center_ref = get_qedge(qz_start, qz_end, qz_width, qz_num )

    label_array_qz = get_qmap_label(qz, qz_edge)
    label_array_qr = get_qmap_label(qr, qr_edge)
    label_array_qzr_ref, qzc, qrc = get_qzrmap(label_array_qz, label_array_qr, 
                                           qz_center_ref, qr_center_ref)
    labels_qzr_ref, indices_qzr = roi.extract_label_indices(label_array_qzr_ref)
    labels_qz, indices_qz = roi.extract_label_indices(label_array_qz)
    labels_qr, indices_qr = roi.extract_label_indices(label_array_qr)
    num_qz = len(np.unique(labels_qz))
    num_qr = len(np.unique(labels_qr))
    num_qzr_ref = len(np.unique(labels_qzr_ref))

In [468]:
if cum_box:
    [qr_start , qr_end, qr_width, qr_num] = Qr_cum     
    [qz_start,   qz_end,  qz_width , qz_num ] = Qz_cum
    qr_edge, qr_center_cum  = get_qedge(qr_start, qr_end, qr_width, qr_num )
    qz_edge, qz_center_cum  = get_qedge(qz_start, qz_end, qz_width, qz_num )

    label_array_qz = get_qmap_label(qz, qz_edge)
    label_array_qr = get_qmap_label(qr, qr_edge)
    label_array_qzr_cum, qzc, qrc = get_qzrmap(label_array_qz, label_array_qr, 
                                           qz_center_cum, qr_center_cum)
    labels_qzr_cum, indices_qzr = roi.extract_label_indices(label_array_qzr_cum)
    labels_qz, indices_qz = roi.extract_label_indices(label_array_qz)
    labels_qr, indices_qr = roi.extract_label_indices(label_array_qr)
    num_qz = len(np.unique(labels_qz))
    num_qr = len(np.unique(labels_qr))
    num_qzr_cum = len(np.unique(labels_qzr_cum))

In [444]:
#show_img( label_array_qzr_cum  )

In [469]:
if ref_box:
    for i in range(1, 1+ num_qzr_ref):
        label_array_qzr_ref[np.where( label_array_qzr_ref ==i ) ] = num_qzr + i 
    if cum_box:
        for i in range(1, 1+ num_qzr_cum):
            label_array_qzr_cum[np.where( label_array_qzr_cum ==i ) ] = num_qzr + num_qzr_ref + i
  • Extract the labeled array

In [470]:
boxes = label_array_qzr 
if ref_box:
    boxes = label_array_qzr + label_array_qzr_ref 
    if cum_box:
        boxes = label_array_qzr + label_array_qzr_ref   +  label_array_qzr_cum
box_maskr = boxes*maskr

qind, pixelist = roi.extract_label_indices(box_maskr)
noqs = len(np.unique(qind))

In [471]:
md['ring_mask'] = box_maskr
md['qr_center']= qr_center
md['qr_edge'] = qr_edge
md['qz_center']= qz_center
md['qz_edge'] = qz_edge
md['beam_center_x'] = inc_x0
md['beam_center_y']=  inc_y0
md['refl_center_x'] = refl_x0
md['refl_center_y']=  refl_y0
md['incident angle'] = alphai*180/np.pi
md['data_dir'] = data_dir

psave_obj(  md, data_dir + 'uid=%s-md'%uid ) #save the setup parameters

In [472]:
show_qzr_roi( avg_imgr, box_maskr, refl_x0, ticks, alpha=0.5,
             save=True, path=data_dir, uid=uid   )


  • Plot average image with interested Q-regions (boxes)

In [445]:
plot_qr_1d_with_ROI( qr_1d, qr_center, loglog=False, save=True, setup_pargs=setup_pargs )



In [ ]:

  • Number of pixels in each q box

In [446]:
nopr = np.bincount(qind, minlength=(noqs+1))[1:]
nopr


Out[446]:
array([ 6496,  6459,  6429,  6499,  3565,  3237,  3267,  3234,  3185,
        3217,  6534,  6402,  6399,  6534,  3597,  3250,  3161,  3234,
        3234,  3251,  1089, 48975])
  • Check one box intensity

In [447]:
roi_inten = check_ROI_intensity( avg_imgr, box_maskr, ring_number= 1, uid =uid )


Check beam damage

  • do a waterfall analysis

In [448]:
#if compress:
#    qindex = 1
#    wat = cal_waterfallc( FD, box_maskr, qindex= qindex, save =True, path=data_dir, uid=uid)

In [449]:
#if compress:
#    plot_waterfallc( wat, qindex, aspect=None,  vmax= 2, uid=uid, save =True, 
#                    path=data_dir, beg= FD.beg)
  • check mean intensity of each box as a function of time

In [450]:
if compress:
    times, mean_int_sets = get_each_ring_mean_intensityc(FD, box_maskr,
                        timeperframe = None, plot_ = True, uid = uid , save=True, path=data_dir  )
    ring_avg = np.average( mean_int_sets, axis=0)
    
else:
    
    mean_int_sets = get_each_ring_mean_intensity(good_series, box_maskr, sampling = sampling,
                                timeperframe = md['frame_time']*sampling, plot_ = True, uid = uid  )


Get ROI intensity of each frame: 100%|██████████| 1800/1800 [00:06<00:00, 278.43it/s]

In [451]:
fig, ax = plt.subplots()
for i in range(num_qzr):
    #plot1D( mean_int_sets[:,i], ax=ax, legend='box_%d'%i )
    ax.plot( mean_int_sets[:,i], marker = 'o', ls='-',label= 'box_%d'%i)
    ax.legend(loc = 'best', fontsize=6)



In [452]:
#plot1D( x= range( len(mean_int_sets)), y= mean_int_sets[:,1])

One time Correlation

Note : Enter the number of buffers for Muliti tau one time correlation number of buffers has to be even. More details in https://github.com/scikit-beam/scikit-beam/blob/master/skbeam/core/correlation.py

if define another good_series


In [562]:
if True:
    if compress:
        FD = Multifile(filename, 1300, 1800)
    else:
        good_start = 1
        good_end =  1000
        good_series = apply_mask( imgs[good_start:good_end-1], mask )

In [563]:
lag_steps = None
bad_frame_list


Out[563]:
array([], dtype=int64)

In [564]:
para_cal = False  #True   #if True to use the parallel calculation

In [565]:
#FD = Multifile(filename, good_start, len(imgs))

In [566]:
#show_img(FD_bin.rdframe(0), logs=True, vmin=.01, vmax=1)

Do one-time


In [567]:
t0 = time.time()
if compress:
    if para_cal:
        g2, lag_steps  =cal_g2p( FD,  box_maskr,
                bad_frame_list, good_start, num_buf = 8, 
                        imgsum= None, norm=None )    
    else:
        g2, lag_steps  =cal_g2c( FD,  box_maskr, bad_frame_list, good_start, num_buf = 8, 
                        imgsum= None, norm=None )        
else:
    bad_image_process = False
    if  len(bad_frame_list):
        bad_image_process = True
    print( bad_image_process  )    

    g2, lag_steps  =cal_g2( good_series,  box_maskr, bad_image_process,
                       bad_frame_list, good_start, num_buf = 8 )
run_time(t0)


In this g2 calculation, the buf and lev number are: 8--8--
500 frames will be processed...
100%|██████████| 500/500 [00:13<00:00, 38.74it/s]
G2 calculation DONE!
Total time: 0.23 min


In [568]:
lag_steps


Out[568]:
array([  0,   1,   2,   3,   4,   5,   6,   7,   8,  10,  12,  14,  16,
        20,  24,  28,  32,  40,  48,  56,  64,  80,  96, 112, 128, 160,
       192, 224, 256, 320, 384])

In [569]:
taus = lag_steps * timeperframe
res_pargs = dict(taus=taus, qz_center=qz_center,
        qr_center=  qr_center,  path=data_dir, uid=uid )

In [480]:
save_gisaxs_g2(  g2[:,:num_qzr], res_pargs )


The correlation function of uid= a3e323 is saved with filename as /XF11ID/analysis/2016_3/yuzhang/Results/a3e323/g2-a3e323-20161013-1756-.csv

Plot the one time correlation functions


In [570]:
plot_gisaxs_g2( g2[:,:num_qzr], taus,  vlim=[0.95, 1.1], 
               res_pargs=res_pargs, one_plot=True)



In [571]:
if cum_box:
    res_pargs_cum = dict(taus=taus, qz_center= np.array(qz_center_cum),
            qr_center=  np.array(qr_center_cum),  path=data_dir, uid=uid )

    plot_gisaxs_g2( g2[:,num_qzr + num_qzr_ref:], taus,  vlim=[0.99, 1.02], 
                   res_pargs=res_pargs_cum, one_plot=True)


Fit g2


In [518]:
fit= True

In [519]:
if fit:
    fit_result = fit_gisaxs_g2( g2[:,:num_qzr], res_pargs, function = 'stretched',  vlim=[0.95, 1.1], 
                fit_variables={'baseline':True, 'beta':True, 'alpha':False,'relaxation_rate':True},
                guess_values={'baseline':1.229,'beta':0.05,'alpha':1.0,'relaxation_rate':0.01},
                              one_plot= True)
    if cum_box: 
        fit_result_cum = fit_gisaxs_g2( g2[:,num_qzr + num_qzr_ref:], 
                            res_pargs_cum, function = 'stretched',  vlim=[0.99, 1.02], 
                fit_variables={'baseline':True, 'beta':True, 'alpha':False,'relaxation_rate':True},
                guess_values={'baseline':1.229,'beta':0.05,'alpha':1.0,'relaxation_rate':0.01},
                              one_plot= True)



In [486]:
psave_obj( fit_result, data_dir + 'uid=%s-g2-fit-para'%uid )

Plot the relaxation time versus qr for different qz


In [487]:
fit_qr_qz_rate(  qr_center, qz_center, fit_result, power_variable= False,
           uid=uid, path= data_dir )


The fitted diffusion coefficient D0 is:  5.463e+00   A^2S-1
The fitted diffusion coefficient D0 is:  3.814e+00   A^2S-1
Out[487]:
array([ 5.46290752,  3.81357846])

For two-time


In [ ]:
#if compress:
#    FD = Multifile(filename, 0, Nimg)

In [520]:
run_two_time =  True

In [521]:
para_cal = False

In [517]:
%run /XF11ID/analysis/Analysis_Pipelines/Develop/chxanalys/chxanalys/chx_compress.py
%run /XF11ID/analysis/Analysis_Pipelines/Develop/chxanalys/chxanalys/XPCS_GiSAXS.py

In [522]:
if run_two_time:
    
    if compress:
        norm = None
        data_pixel =   Get_Pixel_Arrayc( FD, pixelist,  norm=norm ).get_data()
        if para_cal:
            g12b = auto_two_Arrayp(  data_pixel,  box_maskr, index = None   )    
        else:    
            g12b = auto_two_Arrayc(  data_pixel,  box_maskr, index = None   ) 
           
        if lag_steps is None:
            num_bufs=8
            noframes = FD.end - FD.beg
            num_levels = int(np.log( noframes/(num_bufs-1))/np.log(2) +1) +1
            tot_channels, lag_steps, dict_lag = multi_tau_lags(num_levels, num_bufs)
            max_taus= lag_steps.max()
    else:
        qind, pixelist = roi.extract_label_indices(   box_maskr  )
        t0 = time.time()
        data_pixel =   Get_Pixel_Array( good_series , pixelist).get_data()
        run_time(t0)
        g12b = auto_two_Array( good_series, box_maskr, data_pixel = data_pixel )


100%|██████████| 1800/1800 [00:12<00:00, 148.40it/s]
100%|██████████| 22/22 [00:09<00:00,  1.14it/s]

In [ ]:


In [535]:
if run_two_time:
    show_C12(g12b, q_ind= 21, N1=0, N2=1800, vmin=0.99, vmax=1.15,
             timeperframe=timeperframe,save=True, path= data_dir, uid = uid )



In [575]:
if run_two_time:
    if lag_steps is None:
        num_bufs=8
        noframes = FD.end - FD.beg
        num_levels = int(np.log( noframes/(num_bufs-1))/np.log(2) +1) +1
        tot_channels, lag_steps, dict_lag = multi_tau_lags(num_levels, num_bufs)
        max_taus= lag_steps.max()
    
    max_taus= lag_steps.max()
    t0=time.time()
    g2b = get_one_time_from_two_time(g12b[1300:1800,1300:1800,:]  )[:max_taus]
    run_time(t0)
    taus2 = np.arange( g2b.shape[0])[:max_taus] *timeperframe

    res_pargs2 = dict(taus=taus2, qz_center=qz_center, qr_center=qr_center,
             path=data_dir, uid=uid + 'g2_from_two-time'        )        
    #save_gisaxs_g2(  g2b[:,:num_qzr], res_pargs2,  taus=np.arange( g2b.shape[0]) *timeperframe,
    #             filename='g2_from_two-time')


Total time: 0.00 min

In [576]:
plot_gisaxs_g2( g2b[:,:num_qzr], np.arange( g2b.shape[0]) *timeperframe,
               vlim=[0.95, 1.1], 
               res_pargs=res_pargs2, one_plot=True)



In [ ]:


In [574]:
plot_gisaxs_g2( g2b[:,:num_qzr], np.arange( g2b.shape[0]) *timeperframe,
               vlim=[0.95, 1.1], 
               res_pargs=res_pargs2, one_plot=True)



In [ ]:


In [573]:
if run_two_time:
    plot_gisaxs_two_g2( g2[:,:num_qzr], taus, 
                 g2b[:,:num_qzr], np.arange( g2b.shape[0]) *timeperframe,
                 res_pargs=res_pargs, vlim=[.9, 1.1],one_plot= True, uid =uid )


Four Time Correlation


In [538]:
run_four_time = True

In [539]:
if run_four_time:
    t0=time.time()
    g4 = get_four_time_from_two_time(g12b, g2=g2b)[:max_taus]
    run_time(t0)


Total time: 0.01 min

In [540]:
if run_four_time:
    taus4 = np.arange( g4.shape[0])*timeperframe
    res_pargs4 = dict(taus=taus4, qz_center=qz_center, qr_center=qr_center, path=data_dir, uid=uid )
    save_gisaxs_g2(   g4[:,:num_qzr],  res_pargs4,   filename='uid=%s--g4.csv' % (uid) )


The correlation function of uid= a3e323 is saved with filename as /XF11ID/analysis/2016_3/yuzhang/Results/a3e323/uid=a3e323--g4.csv

In [541]:
if run_four_time:
        plot_gisaxs_g4( g4[:,:num_qzr], taus4,  vlim=[0.95, 1.05], res_pargs=res_pargs, one_plot= True)


Create a PDF report


In [542]:
create_report = True

In [543]:
pdf_out_dir = os.path.join('/XF11ID/analysis/', CYCLE, username, 'Results/')

In [553]:
if create_report:
    c= create_pdf_report(  data_dir, uid, pdf_out_dir,
                    filename= "XPCS_Analysis_Report_for_uid=%s.pdf"%uid, report_type='gisaxs')
    
    #Page one: Meta-data/Iq-Q/ROI
    c.report_header(page=1)
    c.report_meta( top=730)
    c.report_static( top=560, iq_fit =None )
    c.report_ROI( top= 300)
    
    #Page Two: img~t/iq~t/waterfall/mean~t/g2/rate~q
    c.new_page()
    c.report_header(page=2)
    c.report_time_analysis( top= 720)
    c.report_one_time( top= 350)  
    
    #Page Three: two-time/two g2
    if run_two_time:
        c.new_page()
        c.report_header(page=3)
        c.report_two_time(  top= 720 )      

    if run_four_time:
        c.new_page()
        c.report_header(page=4)
        c.report_four_time(  top= 720 ) 
        
        
    c.save_page()
    c.done()


****************************************
The pdf report is created with filename as: /XF11ID/analysis/2016_3/yuzhang/Results/XPCS_Analysis_Report_for_uid=a3e323.pdf
****************************************

The End!

Attach the PDF report to Olog


In [ ]:
from chxanalys.chx_olog import LogEntry,Attachment, update_olog_uid, update_olog_id

In [ ]:
os.environ['HTTPS_PROXY'] = 'https://proxy:8888'
os.environ['no_proxy'] = 'cs.nsls2.local,localhost,127.0.0.1'

In [ ]:
c.filename

In [ ]:
filename = c.filename
atch=[  Attachment(open(filename, 'rb')) ] 

update_olog_uid( uid=uid, text='Add XPCS Analysis PDF Report', attachments= atch )

Attach the Analysis Notebook to Olog


In [ ]:
NOTEBOOK_FULL_PATH

In [ ]:
note_path = '2016_2/yuzhang/August/XPCS_GiSAXS_Single_Run_Sep.ipynb'

In [ ]:
filename = '/XF11ID/analysis/'+ note_path #NOTEBOOK_FULL_PATH

atch=[  Attachment(open(filename, 'rb')) ] 
update_olog_uid( uid=uid, text='Add XPCS Analysis notebook', attachments= atch )

In [ ]:


In [ ]: