In [3]:
# header for 2018-1 kernel
from pyCHX.chx_packages import *
%matplotlib notebook
plt.rcParams.update({'figure.max_open_warning': 0})
plt.rcParams.update({ 'image.origin': 'lower'   })
plt.rcParams.update({ 'image.interpolation': 'none'   })
import pickle as cpk
#from pyCHX.chx_xpcs_xsvs_jupyter_V1 import *
#%run /home/yuzhang/pyCHX_link/pyCHX/chx_generic_functions.py
#%matplotlib inline
import papermill as pm

In [4]:
# import database -> should be hidden from user in same package....
import datetime
import pymongo 
from tqdm import tqdm
from bson import ObjectId
import matplotlib.patches as mpatches
from IPython.display import clear_output
cli = pymongo.MongoClient('xf11id-ca')    
samples_2 = cli.get_database('samples').get_collection('samples_2')
data_acquisition_collection = cli.get_database('samples').get_collection('data_acquisition_collection')
beamline_pos = cli.get_database('samples').get_collection('beamline_pos')
from databroker import Broker                                                   
db = Broker.named('temp')  # for real applications, 'temp' would be 'chx' 
print('available databases:')
print(cli.database_names())
print('\n available collection in database samples:')
print(cli.samples.collection_names())


available databases:
['amostra', 'chx-simulation-assetstore', 'chx-simulation-metadatastore', 'datastore', 'filestore', 'local', 'metadatastore', 'metadatastore-production-v1', 'samples']

 available collection in database samples:
['samples', 'data_acquisition_collection', 'samples_2', 'debug', 'beamline_pos']
/opt/conda_envs/analysis-2019-1.2-chx/lib/python3.6/site-packages/ipykernel_launcher.py:15: DeprecationWarning: database_names is deprecated. Use list_database_names instead.
  from ipykernel import kernelapp as app
/opt/conda_envs/analysis-2019-1.2-chx/lib/python3.6/site-packages/ipykernel_launcher.py:17: DeprecationWarning: collection_names is deprecated. Use list_collection_names instead.

In [ ]:


In [ ]:


In [36]:
def _chx_analysis_data( uid,  
        #template_pipeline = '/home/yuzhang/analysis/2019_1/commisionning/XPCS_Single_2019_V1_SQRange.ipynb', 
        template_pipeline = '/home/yuzhang/analysis/2019_1/petrash/XPCS_Single_2019_V2.ipynb',                
        outDir = '/home/yuzhang/analysis/2019_1/petrash/ResPipelines/',
                      ):
    ''' YG. Octo 6, 2018, Compress a eiger data using papermail
    Input:
        uid: string, the uique data id
        force compress: if True, will force to compress data no matter the data was compressed already
        The default compress pipeline  
        template_pipeline: str, the filename of the template pipeline
        outDir:str, the path for the output pipeline
    Output:
        save the current pipeline to outDir
    '''
    output_pipeline = outDir +   template_pipeline.split('/')[-1] + '_%s.ipynb'%uid 
    pm.execute_notebook(
        template_pipeline, output_pipeline,         
           parameters = dict( uid = uid,  ),
            kernel_name='python3', report_mode=True )

In [37]:
if True:    
    temp1 = data_acquisition_collection.find_one({'_id':'general_list'})['compression_completed']
    temp2= data_acquisition_collection.find_one({'_id':'general_list'})['analysis_completed']
    #temp3= data_acquisition_collection.find_one({'_id':'general_list'})['analysis_failed']
    temp4= data_acquisition_collection.find_one({'_id':'general_list'})['analysis_failed_userX']
    s2 = set(temp2)
    #s3 = set(temp3)
    s4 = set(temp4)       
    #######################################
    #######Get uids to be compressed#######    
    uids = [x for x in temp1 if x not in s2 and x not in s4] 
    print(uids)


['de9bd5ba-897d-4719-87a3-c5ad6ebf50c1', '396b8e68-acd2-4b21-8af7-bb3a8685549b', 'f3f39aec-db19-4c77-9cb4-4458261714df']

In [39]:
#temp1[-10:]

In [35]:
#temp4[-10:]

In [40]:
#temp2[-10:]

In [5]:
#not_done=[x for x in temp1 if x not in s2 and x not in s4]
#not_done

In [42]:
temp1[-1], temp2[-1], temp4[-1]


Out[42]:
('f3f39aec-db19-4c77-9cb4-4458261714df',
 'd9fc1e0e-a5b9-4fe9-995f-a8f6fff370e3',
 '2494fe5e-6d1a-4c18-805a-57c6e7d34acc')

In [43]:
def get_masked_analysis_database( start_uid ):
    '''Give the uid for the first running and get the masked database'''
    temp1 = data_acquisition_collection.find_one({'_id':'general_list'})['compression_completed']
    for i,  t in enumerate(temp1):
        if t == start_uid:
            print(i,t)
            start_id = i    
    masked = data_acquisition_collection.update_one( 
           {'_id':'general_list'},{'$set':{'analysis_failed_userX':  temp1[:start_id] }   })

In [44]:
start_fuid = '2494fe5e-6d1a-4c18-805a-57c6e7d34acc' #'25171c35-ce50-450b-85a0-ba9e116651e3'
get_masked_analysis_database( start_uid = start_fuid )


2816 2494fe5e-6d1a-4c18-805a-57c6e7d34acc

In [9]:
#data_acquisition_collection.update_one( {'_id':'general_list'},{'$set':{'analysis_failed_petrash':  temp1[:797] }   })
data_acquisition_collection.update_one( {'_id':'general_list'},{'$set':{'analysis_in_progress':  [] }   })


Out[9]:
<pymongo.results.UpdateResult at 0x7f213a4dbf88>

In [10]:
data_acquisition_collection.find_one({'_id':'general_list'})['analysis_in_progress']


Out[10]:
[]

In [ ]:


In [11]:
#temp1

In [12]:
#temp4.append(['2891d947-58b1-4db8-9b7e-c93f94259e78',   'f34aceea-dacb-439f-9e91-94bc37156354',          '0639c0c3-7257-436f-9bfe-b964830e3823']   )
#data_acquisition_collection.update_one({'_id':'general_list'},{'$set':{'analysis_failed_Surita':tem}})
#data_acquisition_collection.update_one({'_id':'general_list'},{'$set':{'analysis_failed':[]}})

faking data analysis from compressed_uid list in data-acquisition database


In [ ]:
uids

In [ ]:
# get list of uids
end_of_compression_key='none' # stops if only this key is left in compressed_uid_list, 'none': not looking for key, just for empty list timeout
empty_list_timeout= 3600 * 24 * 7  #[s] stops if compressed_uid_list is empty for x s

time_count=0
run_condition = True
while run_condition:
    clear_output()
    temp1 = data_acquisition_collection.find_one({'_id':'general_list'})['compression_completed']
    temp2= data_acquisition_collection.find_one({'_id':'general_list'})['analysis_completed']
    temp3= data_acquisition_collection.find_one({'_id':'general_list'})['analysis_failed_userX']
    temp4 = data_acquisition_collection.find_one({'_id':'general_list'})['analysis_in_progress']
    #data_acquisition_collection.find_one({'_id':'general_list'})['analysis_failed']
    s2 = set(temp2)
    s3 = set(temp3)
    s4 = set( temp4 )
       
    #######################################
    #######Get uids to be compressed#######    
    uids = [x for x in temp1 if x not in s2 and x not in s3] 
    ######################################
    
    
    if uids: # list of uids is not empty
        print('uid list for analysis is NOT empty, found '+str(len(uids))+' uids awaiting analysis.')
        time_count=0
        if end_of_compression_key != 'none' and uids[0] == end_of_compression_key: #looking for a stop key, next uid up IS the stop key
            run_condition = False
            print('Stop Key for analysis detected!')
            
        else:
            print('Doing data analysis for uid '+uids[0])
            
            #######################################
            #for ics in tqdm(range(100)):
            #    time.sleep(.35)
            ########################################
            uid = uids[0]
            if uid not in s4:
                try:                                        
                    temp4.append( uid )
                    data_acquisition_collection.update_one({'_id':'general_list'},
                                                         {'$set':{ 'analysis_in_progress': temp4}}) 

                    _chx_analysis_data( uid )                 
                        # update list of compressed uids:
                    temp2= data_acquisition_collection.find_one({'_id':'general_list'})['analysis_completed']    
                    temp2.append(uids[0])
                    data_acquisition_collection.update_one({'_id':'general_list'},
                                                    {'$set':{ 'analysis_completed': temp2}})            
                
                except:
                    temp3= data_acquisition_collection.find_one({'_id':'general_list'})['analysis_failed_userX']
                    temp3.append(uids[0])
                    data_acquisition_collection.update_one({'_id':'general_list'},{'$set':{ 'analysis_failed_userX': temp3}})
                    
                ##remove this uid from  uid in analysis_in_progress
                temp4 = data_acquisition_collection.find_one({'_id':'general_list'})['analysis_in_progress']
                tx = [u for u in temp4  if u!=uid ]
                data_acquisition_collection.update_one( {'_id':'general_list'},
                                                           {'$set':{'analysis_in_progress':  tx }   })                     


                
    else:
        if time_count > empty_list_timeout:
            print('uid list for analysis was empty for > '+str(empty_list_timeout)+'s -> stop looking for new uids')
            run_condition = False
        else:
            time_count=time_count+5
            print('list of uids for analysis is emtpy...going to look again in 5s.')
            time.sleep(5)

In [ ]:


In [ ]:


In [ ]:


In [ ]:


In [ ]: