In [ ]:
from pycqed.utilities import general as gen
from pycqed.analysis import measurement_analysis as ma
from pycqed.analysis import analysis_toolbox as a_tools
from pycqed.analysis import fitting_models as fit_mods
from pycqed.analysis_v2 import timedomain_analysis as tda
from pycqed.analysis_v2 import base_analysis as ba
from pycqed.analysis_v2 import readout_analysis as ra
from pycqed.analysis.tools import plotting as plting
from importlib import reload
from collections import OrderedDict
import numpy as np
from pprint import pprint
import matplotlib.pyplot as plt
import h5py
import os
In [ ]:
from pycqedscripts.init.xld.virtual_ATC66_M124_S7P11_PQSC import *
from pycqed.measurement import multi_qubit_module as mqm
data_folder = r''
a_tools.datadir = data_folder
In [ ]:
from pycqedscripts.init.xld.cz_ATC66_M124_S7P11 import *
from pycqed.utilities import general as gen
qubits = [qb1, qb2, qb3, qb4, qb5, qb6, qb7]
upcz_pulse_names = add_all_CZ_pulses(qubits)
timestamp = '20200427_233508'
for qb in qubits:
gen.load_settings(qb, timestamp=timestamp)
clear_output()
In [ ]:
from pycqed.measurement import hdf5_data as hdf_dat
from pycqed.measurement.sweep_points import SweepPoints
from pycqed.measurement.calibration_points import CalibrationPoints
In [ ]:
In [ ]:
# import analysis_v3 modules
from pycqed.analysis_v3.init_anav3 import *
In [ ]:
plot_pars_dict = plot_module.get_default_plot_params(set_params=True)
%config InlineBackend.print_figure_kwargs = {'bbox_inches':None}
# fig.patch.set_color('0.9')
# fig.align_ylabels()
# fig.subplots_adjust(0.145, 0.075, 0.987, 0.995)
Analysis_v3 requires:
1. ProcessingPipeline object
2. measured object(s)
3. measured-objects-value-names map
4. SweepPoints object
5. measured-objects-sweep-points map
6. CalibrationPoints object (written by Nathan Lacroix)
In [ ]:
from pycqed.analysis_v3.processing_pipeline import ProcessingPipeline
# [
# {'node_name1': function_name1, keys_in: keys_in_list1, **node_params1},
# {'node_name2': function_name2, keys_in: keys_in_list2, **node_params2},
# .
# .
# .
# {'node_nameN': function_nameN, keys_in: keys_in_listN, **node_paramsN}
# ]
Create processing pipeline
Requires:
- measured object(s): ['qb1', 'qb2'], 'qb3', 'TWPA', 'dummy' etc. -> completely up to the user
- measured-objects-value-names map (i.e. channel map, {meas_obj_names: [ro_channels]})
In [ ]:
# ProcessingPipeline(node_name,
# **node_params)
pp = ProcessingPipeline('average_data',
keys_in='raw', shape=(10, 3), averaging_axis=1, meas_obj_names='qb1')
pp
In [ ]:
pp.add_node('rotate_iq', keys_in='raw', meas_obj_names='qb2', num_keys_out=1)
pp.add_node('ramsey_analysis', keys_in='previous', keys_out=None, meas_obj_names='qb2')
pp
In [ ]:
# finalize pipeline -> requires measured-objects-value-names map
# helper function for multi-qubit experiments -> requires (virtual) qubit objects + detector functions
qubits = [qb1, qb2, qb3]
for i, qb in enumerate(qubits):
qb.acq_I_channel(2*i)
qb.acq_Q_channel(2*i + 1)
qb.update_detector_functions()
det_func = mqm.get_multiplexed_readout_detector_functions(qubits)['int_avg_det']
mqm.get_meas_obj_value_names_map(qubits, det_func)
In [ ]:
det_func = mqm.get_multiplexed_readout_detector_functions(qubits)['int_avg_classif_det']
mqm.get_meas_obj_value_names_map(qubits, det_func)
In [ ]:
det_func = mqm.get_multiplexed_readout_detector_functions(
qubits, det_get_values_kws={'correlated': True})['int_avg_classif_det']
mqm.get_meas_obj_value_names_map(qubits, det_func)
In [ ]:
In [ ]:
# let's use:
det_func = mqm.get_multiplexed_readout_detector_functions(qubits)['int_avg_det']
movnm = mqm.get_meas_obj_value_names_map(qubits, det_func)
movnm
In [ ]:
pp
In [ ]:
# finalize pipeline
pp(movnm)
pp
In [ ]:
from pycqed.measurement.sweep_points import SweepPoints
# The SweepPoints object is a list of dictionaries of the form:
# [
# # 1st sweep dimension
# {param_name0: (values, unit, plot_label),
# param_name1: (values, unit, plot_label),
# ...
# param_nameN: (values, unit, plot_label)},
# # 2nd sweep dimension
# {param_name0: (values, unit, plot_label),
# param_name1: (values, unit, plot_label),
# ...
# param_nameN: (values, unit, plot_label)},
# .
# .
# .
# # D-th sweep dimension
# {param_name0: (values, unit, plot_label),
# param_name1: (values, unit, plot_label),
# ...
# param_nameN: (values, unit, plot_label)},
# ]
In [ ]:
# hard sweep (first sweep dimension): pulse delays
sp = SweepPoints('delay_qb1', np.linspace(0, 1e-6, 3), 's', 'Pulse delay, $\\tau$')
sp
In [ ]:
sp.add_sweep_dimension()
sp
In [ ]:
# soft sweep (2nd sweep dimension): pulse amplitudes
sp.add_sweep_parameter(f'amps_qb1', np.linspace(0, 1, 3), 'V', 'Pulse amplitude, $A$')
sp
In [ ]:
In [ ]:
# 2D sweep for 3 qubits
# first (hard) sweep dimension: pulse delay
sp = SweepPoints()
sp.add_sweep_parameter('lengths_qb1', np.linspace(10e-9, 1e-6, 3), 's', 'Pulse delay, $\\tau$')
sp.add_sweep_parameter('lengths_qb2', np.linspace(10e-9, 1e-6, 3), 's', 'Pulse delay, $\\tau$')
sp.add_sweep_parameter('lengths_qb3', np.linspace(10e-9, 1e-6, 3), 's', 'Pulse delay, $\\tau$')
sp
In [ ]:
# second (soft) sweep dimension: pulse amplitude
sp.add_sweep_dimension()
for qb in ['qb1', 'qb2', 'qb3']:
sp.add_sweep_parameter(f'amps_{qb}', np.linspace(0, 1, 3), 'V', 'Pulse amplitude, $A$')
sp
In [ ]:
mospm = sp.get_sweep_points_map(['qb1', 'qb2', 'qb3'])
mospm
In [ ]:
timestamp = '20200317_231624'
In [ ]:
reload(hlp_mod)
data_file = hlp_mod.get_data_file_from_timestamp(timestamp)
sweep_points = np.array(data_file['Experimental Data']['Experimental Metadata']['sweep_points_dict']['qb2'])
data_file.close()
In [ ]:
# OR
sweep_points = hlp_mod.get_param_from_metadata_group('sweep_points_dict', timestamp)['qb2']
In [ ]:
meas_object = 'qb2'
SP = SweepPoints('delays_' + meas_object, sweep_points, 's', 'Delay, $\\tau$')
meas_obj_value_names_map = {meas_object: hlp_mod.get_value_names_from_timestamp(timestamp)}
meas_obj_sweep_points_map = SP.get_sweep_points_map([meas_object])
Create pipeline
In [ ]:
# "raw" pipeline
reload(ppmod)
pp = ppmod.ProcessingPipeline()
pp.add_node('rotate_iq', keys_in='raw', meas_obj_names=[meas_object], num_keys_out=1)
pp.add_node('ramsey_analysis', keys_in='previous rotate_iq', keys_out=None, meas_obj_names=[meas_object])
pp
In [ ]:
pp(meas_obj_value_names_map)
pp
In [ ]:
data_dict = pla.extract_data_hdf(timestamp)
In [ ]:
data_dict.keys()
In [ ]:
data_dict.update(OrderedDict({
'sweep_points': SP,
'meas_obj_value_names_map': meas_obj_value_names_map,
'meas_obj_sweep_points_map': meas_obj_sweep_points_map,
'artificial_detuning_dict': {meas_object: 0.5e6},
}))
pla.process_pipeline(data_dict, processing_pipeline=pp)
In [ ]:
data_dict.keys()
In [ ]:
data_dict['qb2']
In [ ]:
timestamp = '20191118_183801'
In [ ]:
movnm = hlp_mod.get_param_from_metadata_group('meas_obj_value_names_map', timestamp)
reload(ppmod)
pp = ppmod.ProcessingPipeline()
pp.add_node('rotate_iq', keys_in='raw', meas_obj_names=list(movnm), num_keys_out=1)
pp.add_node('ramsey_analysis', keys_in='previous rotate_iq', keys_out=None,
meas_obj_names=list(movnm))
pp
In [ ]:
pp(movnm)
pp
In [ ]:
data_dict = pla.extract_data_hdf(timestamp)
data_dict.update(OrderedDict({
'artificial_detuning_dict': {meas_object: 2e6 for meas_object in movnm},
}))
pla.process_pipeline(data_dict, processing_pipeline=pp)
In [ ]:
data_dict.keys()
In [ ]:
data_dict['qb1']
In [ ]:
t_start = '20191103_174901'
t_stop = '20191103_183000'
data_dict = pla.get_timestamps(t_start=t_start, t_stop=t_stop)
In [ ]:
data_dict
In [ ]:
sweep_points = hlp_mod.get_param_from_metadata_group('sweep_points', data_dict['timestamps'][-1])
ncl = sweep_points[1]['cliffords'][0]
nr_seeds_per_file = len(sweep_points[0]['nr_seeds'][0])
nr_files = len(data_dict['timestamps'])
print(ncl)
print(nr_seeds_per_file)
print(nr_files)
In [ ]:
movnm = hlp_mod.get_param_from_metadata_group('meas_obj_value_names_map', data_dict['timestamps'][-1])
movnm
In [ ]:
reload(ppmod)
pp = ppmod.ProcessingPipeline()
pp.add_node('average_data', keys_in='raw',
shape=(nr_files*len(ncl), nr_seeds_per_file),
meas_obj_names=list(movnm))
pp.add_node('get_std_deviation', keys_in='raw',
shape=(nr_files*len(ncl), nr_seeds_per_file),
meas_obj_names=list(movnm))
pp.add_node('average_data', keys_in=f'previous average_data',
shape=(nr_files, len(ncl)), averaging_axis=0, meas_obj_names=list(movnm))
pp.add_node('get_std_deviation', keys_in=f'previous get_std_deviation',
shape=(nr_files, len(ncl)), averaging_axis=0, meas_obj_names=list(movnm))
pp.add_node('rb_analysis', meas_obj_names=list(movnm),
keys_out=None, d=4,
keys_in=f'previous average_data1',
keys_in_std=f'previous get_std_deviation1')
pp(movnm)
pp
In [ ]:
reload(a_tools)
a_tools.datadir = data_folder
reload_anav3()
In [ ]:
pla.search_modules
In [ ]:
data_dict = pla.extract_data_hdf(data_dict=data_dict, append_data=True, replace_data=False)
In [ ]:
data_dict.keys()
In [ ]:
pla.process_pipeline(data_dict, processing_pipeline=pp, save_processed_data=True, save_figures=False)
In [ ]:
data_dict.keys()
In [ ]:
data_dict['qb1']
In [ ]:
# plot raw data
In [ ]:
pp = ppmod.ProcessingPipeline('prepare_1d_raw_data_plot_dicts', keys_in='raw', keys_out=None,
meas_obj_names=list(movnm), sp_name='cliffords',
xvals=np.tile(np.repeat(ncl, nr_seeds_per_file), nr_files),
do_plotting=True)#, plot_params={'linestyle': ''})
pp(movnm)
pp
In [ ]:
pla.process_pipeline(data_dict, processing_pipeline=pp, save_processed_data=True, save_figures=True)
In [ ]:
data_dict.keys()
In [ ]:
save_module.Save(data_dict=data_dict, save_processed_data=False, save_figures=True)
In [ ]:
In [ ]:
timestamp = '20191118_181845'
In [ ]:
movnm = hlp_mod.get_param_from_metadata_group('meas_obj_value_names_map', timestamp)
print(movnm)
reload(ppmod)
pp = ppmod.ProcessingPipeline()
pp.add_node('rotate_iq', keys_in='raw', meas_obj_names=list(movnm), num_keys_out=1)
pp.add_node('rabi_analysis', keys_in='previous rotate_iq', keys_out=None,
meas_obj_names=list(movnm))
pp
In [ ]:
pp(movnm)
pp
In [ ]:
reload_anav3()
pla.search_modules
In [ ]:
data_dict = pla.extract_data_hdf(timestamp)
pla.process_pipeline(data_dict, processing_pipeline=pp)
In [ ]:
In [ ]:
In [ ]: