In [30]:
from chxanalys.chx_packages import *
%matplotlib notebook
plt.rcParams.update({'figure.max_open_warning': 0})
import pandas as pds
#%reset -f #for clean up things in the memory
In [31]:
data_dir = '/XF11ID/analysis/2017_1/manisen/Results/'
In [ ]:
In [32]:
suid_dict = {'Temp=175C_c_1' :'2f781b','Temp=175C_c_2' :'b57f7d',
'Temp=175C_c_3' :'9846cb', 'Temp=175C_c_4' :'1cca34', 'Temp=175C_c_5' :'8a6c63',
'Temp=175C_c_6' :'b5e2ec', 'Temp=175C_c_7':'cff9b6'}
In [33]:
uid_dict = {}
for u in list( suid_dict.keys() ):
uid_dict[u] = get_meta_data( suid_dict[u] )['uid']
print( uid_dict )
In [34]:
total_res = {}
In [35]:
for u in list( uid_dict.keys() ):
inDir = data_dir + suid_dict[u] + '/'
total_res[u] = extract_xpcs_results_from_h5( filename = 'uid=%s_Res.h5'%uid_dict[u], import_dir = inDir )
In [36]:
print( total_res[list( uid_dict.keys() )[0]].keys() )
In [ ]:
In [23]:
qth_interest = 9 #3 #the intested single qth
In [37]:
fig,ax=plt.subplots()
for u in sorted(list( uid_dict.keys() )):
print(u)
plot1D( x = total_res[u]['taus4'], y= total_res[u]['g4'][:,qth_interest],
ax=ax, legend= u, ylim=[0, 0.0001], legend_size = 8, xlabel='t (sec)', ylabel='g4',
title='four_order_time_correlation-->q=%s'%total_res[u]['qval_dict'][qth_interest][0], logx=True )
plt.savefig( data_dir +'g4_101k44pc_175_new_7.png' )
In [38]:
fit_g2_func = 'stretched'
for u in sorted(list( uid_dict.keys() )):
g2 = total_res[u]['g2']
taus = total_res[u]['taus']
g2_fit_result, taus_fit, g2_fit = get_g2_fit_general( g2, taus,
function = fit_g2_func, vlim=[0.95, 1.05], fit_range= None,
fit_variables={'baseline':False, 'beta':True, 'alpha':False,'relaxation_rate':True},
guess_values={'baseline':1.0,'beta':0.05,'alpha':1.0,'relaxation_rate':0.01,})
g2_fit_paras = save_g2_fit_para_tocsv(g2_fit_result, filename= u +'_g2_fit_paras.csv', path=data_dir )
#print( g2.shape)
In [40]:
fig,ax=plt.subplots()
for u in sorted(list( uid_dict.keys() )):
y= total_res[u]['g2'][1:,qth_interest]
#ym = (y-y.min())/(y.max()-y.min())
#fits = total_res[u]['g2_fit_paras'] #here load from XPCS_Single Pipeline fit results
fits = pds.read_csv( data_dir + u + '_g2_fit_paras.csv' ) #load from re-fit results
ym = ( y - fits['baseline'][qth_interest] )/ fits['beta'][qth_interest]
plot1D( x = total_res[u]['taus'][1:], y= ym,
ax=ax, legend= u, ylim=[-0.1, 1.15], legend_size = 10, xlabel='tau (sec)', ylabel='g1',
title='normalized' +'one_time_correlation-->q=%s'%total_res[u]['qval_dict'][qth_interest][0], logx=True )
plt.savefig( data_dir +'normalized'+'g__101k44pc_175_new_7.png' )
In [41]:
fig,ax=plt.subplots()
for u in sorted(list( uid_dict.keys() )):
plot1D( x = total_res[u]['taus'], y= total_res[u]['g2'][:,qth_interest],
ax=ax, legend= u, ylim=[0.99, 1.08], legend_size = 8, xlabel='tau (sec)', ylabel='g2',
title='one_time_correlation-->q=%s'%total_res[u]['qval_dict'][qth_interest][0], logx=True )
plt.savefig( data_dir +'g2_101k44pc__175_new_7.png' )
In [42]:
qs = np.array([total_res[u]['qval_dict'][i][0] for i in range( len(list(total_res[u]['qval_dict'].keys())) ) ])
In [43]:
fig,ax=plt.subplots()
for u in sorted(list( uid_dict.keys() )):
x= qs
#y= 1/total_res[u]['g2_fit_paras']['relaxation_rate'] #here load from XPCS_Single Pipeline fit results
y = 1/ ( pds.read_csv( data_dir + u + '_g2_fit_paras.csv' )['relaxation_rate'] ) #load from re-fit results
#print(y)
plot1D( x = qs, y= y, ax=ax, ylim=[0,200], legend= u, legend_size = 8, ls = '--',
xlabel='q, (A-1)', ylabel='relaxation_time, (s)',
title='q~relaxation_time', logx=True, logy=True )
plt.savefig( data_dir +'q~relaxation_time.png' )
In [ ]:
In [ ]: