In [1]:
# header for 2018-1 kernel
from pyCHX.chx_packages import *
%matplotlib notebook
plt.rcParams.update({'figure.max_open_warning': 0})
plt.rcParams.update({ 'image.origin': 'lower' })
plt.rcParams.update({ 'image.interpolation': 'none' })
import pickle as cpk
from pyCHX.chx_xpcs_xsvs_jupyter_V1 import *
#%run /home/yuzhang/pyCHX_link/pyCHX/chx_generic_functions.py
import papermill as pm
%matplotlib inline
In [2]:
# import database -> should be hidden from user in same package....
import datetime
import pymongo
from tqdm import tqdm
from bson import ObjectId
import matplotlib.patches as mpatches
from IPython.display import clear_output
cli = pymongo.MongoClient('xf11id-ca')
samples_2 = cli.get_database('samples').get_collection('samples_2')
data_acquisition_collection = cli.get_database('samples').get_collection('data_acquisition_collection')
beamline_pos = cli.get_database('samples').get_collection('beamline_pos')
from databroker import Broker
db = Broker.named('temp') # for real applications, 'temp' would be 'chx'
print('available databases:')
print(cli.database_names())
print('\n available collection in database samples:')
print(cli.samples.collection_names())
In [ ]:
In [3]:
#uid = '2555d366' #] (scan num: 24810) (Measurement: Test images from reference 8CB planar cell detectorx=154.9712,detectory=-132.6920 T=33.9C, real T=33.1 )
def _chx_compress_data( uid, force_compress = False,
template_pipeline = '/nsls2/xf11id1/analysis/2018_3/commissioning/Template_CHX_CompressData_V0.ipynb',
outDir = '/nsls2/xf11id1/analysis/2018_3/commissioning/ResPipes/',
):
''' YG. Octo 6, 2018, Compress a eiger data using papermail
Input:
uid: string, the uique data id
force compress: if True, will force to compress data no matter the data was compressed already
The default compress pipeline
template_pipeline: str, the filename of the template pipeline
outDir:str, the path for the output pipeline
Output:
save the current pipeline into a output_pipeline
'''
output_pipeline = outDir + 'CHX_CompressData_V0.ipynb'
pm.execute_notebook(
template_pipeline, output_pipeline,
parameters = dict( uid = uid, force_compress=force_compress),
kernel_name= None, report_mode=True )
In [4]:
if True:
temp1 = data_acquisition_collection.find_one({'_id':'general_list'})['uid_list']
temp2= data_acquisition_collection.find_one({'_id':'general_list'})['compression_completed']
temp3= data_acquisition_collection.find_one({'_id':'general_list'})['compression_failed_X']
#data_acquisition_collection.find_one({'_id':'general_list'})['compression_failed']
s2 = set(temp2)
s3 = set(temp3)
uids = [x for x in temp1 if x not in s2 and x not in s3]
#uids = [x for x in temp1 if x not in s2 and x not in s3]
print(uids)
In [5]:
#data_acquisition_collection.update_one({'_id':'general_list'},{'$set':{'compression_failed_X': temp1[:-10] }})
In [6]:
temp1[-3:]
Out[6]:
In [7]:
temp2[-3:]
Out[7]:
In [8]:
temp3[-3:]
Out[8]:
In [9]:
#temp3
In [ ]:
In [6]:
#temp1[-10:]
In [11]:
def get_masked_analysis_database( start_uid ):
'''Give the uid for the first running and get the masked database'''
temp1 = data_acquisition_collection.find_one({'_id':'general_list'})['compression_completed']
for i, t in enumerate(temp1):
if t == start_uid:
print(i,t)
start_id = i
masked = data_acquisition_collection.update_one(
{'_id':'general_list'},{'$set':{'analysis_failed_userX': temp1[:start_id] } })
In [12]:
#get_masked_analysis_database( start_uid = '0a5989df-5859-4ce2-a5f9-ef611d5ab166' )
In [9]:
#import pymongo
#from IPython.display import clear_output
#cli = pymongo.MongoClient('xf11id-ca')
#samples_2 = cli.get_database('samples').get_collection('samples_2')
#data_acquisition_collection = cli.get_database('samples').get_collection('data_acquisition_collection')
# How to find/list one entery
#data_acquisition_collection.find_one({'_id':'general_list'})['uid_list']
# How to find and delete one entery
#samples_2.find_one_and_delete( {'info.owner':'chx'} )
# How to create/update an entry
#temp1=['uid1', 'uid2']
#data_acquisition_collection.update_one({'_id':'general_list'},{'$set':{ 'uid_list': temp1}})
#data_acquisition_collection.update_one({'_id':'general_list'},{'$set':{'analysis_failed':[]}})
#data_acquisition_collection.update_one({'_id':'general_list'},{'$set':{'compression_failed': temp1 }})
# How to delete an entry
#data_acquisition_collection.delete_one( {'_id':'general_list/compresion_completed'} )
In [18]:
uids
Out[18]:
In [ ]:
In [ ]:
# get list of uids
stop_key='none' # stops if only this key is left in uid_list, 'none': not looking for key, just for empty list timeout
empty_list_timeout= 3600 * 24 * 6 #[s] stops if uid_list is empty for x s
end_of_compression_key='none' # if not 'none': write key to list of compressed uids to mark end
time_count=0
run_condition = True
while run_condition:
clear_output()
temp1 = data_acquisition_collection.find_one({'_id':'general_list'})['uid_list']
temp2= data_acquisition_collection.find_one({'_id':'general_list'})['compression_completed']
temp3= data_acquisition_collection.find_one({'_id':'general_list'})['compression_failed_X']
#data_acquisition_collection.find_one({'_id':'general_list'})['compression_failed']
s2 = set(temp2)
s3 = set(temp3)
#######################################
#######Get uids to be compressed#######
uids = [x for x in temp1 if x not in s2 and x not in s3]
######################################
if uids: # list of uids is not empty
print('uid list for compression is NOT empty, found '+str(len(uids))+' uids awaiting compression.')
time_count=0
if stop_key != 'none' and uids[0] == stop_key: #looking for a stop key, but the next uid up is not the stop key
run_condition = False
print('Stop Key for compression detected!')
else:
print('Doing data file compression for uid '+uids[0])
#############Here will update with a real code for compression
#for ics in tqdm(range(100)):
# time.sleep(.23)
uid = uids[0]
try:
_chx_compress_data( uid )
# update list of compressed uids:
temp2.append(uids[0])
data_acquisition_collection.update_one({'_id':'general_list'},{'$set':{ 'compression_completed': temp2}})
except:
print('This uid=%s can not be compressed.'%uid)
# update list of failed compressed uids:
temp3.append(uids[0])
data_acquisition_collection.update_one({'_id':'general_list'},{'$set':{ 'compression_failed_X': temp3}})
######################################
else:
if time_count > empty_list_timeout:
print('uid list for compression was empty for > '+str(empty_list_timeout)+'s -> stop looking for new uids')
run_condition = False
else:
time_count=time_count+5
print('list of uids for compression is emtpy...going to look again in 5s.')
time.sleep(5)
In [ ]:
In [ ]:
In [ ]:
data_acquisition_collection.find_one({'_id':'general_list'})
In [ ]: