In [1]:
__depends__=["../results/ebtel_varying_tau_results.pickle"]
__dest__=["../results/tau20.electron.sol.txt","../results/tau20.ion.sol.txt","../results/tau20.single.sol.txt",
"../results/tau500.electron.sol.txt","../results/tau500.ion.sol.txt","../results/tau500.single.sol.txt"]
In [2]:
import os
import pickle
import multiprocessing
import subprocess
import xml.etree.ElementTree as ET
import numpy as np
First, import the EBTEL results.
In [3]:
with open(__depends__[0],'rb') as f:
varying_tau_results = pickle.load(f)
Next, reshape the EBTEL results into something readable by the IonPopSolver
code and save them to a file. Set some parameters for reading the data structure.
In [4]:
tau_indices = [0,-1]
prefixes = ['tau20','tau500']
parameter_sets = {'single':('t','T','n'),'electron':('te','Tee','ne'),'ion':('ti','Tie','ni')}
Now, print the files.
In [5]:
inputs = []
for i,pre in zip(tau_indices,prefixes):
for key in parameter_sets:
inputs.append(os.path.join('../results/','_tmp_%s.%s.ips.txt'%(pre,key)))
np.savetxt(os.path.join('../results/','_tmp_%s.%s.ips.txt'%(pre,key)),
np.transpose([varying_tau_results[i][parameter_sets[key][0]],
varying_tau_results[i][parameter_sets[key][1]],
varying_tau_results[i][parameter_sets[key][2]]]),
header=str(len(varying_tau_results[i][parameter_sets[key][0]])),comments='',fmt='%f\t%e\t%e')
We need to modify the XML input file for the IonPopSolver
code to make sure it points to the right atomic database (see install instructions in IonPopSolver). We'll also set the cutoff ion fraction to $1\times10^{-6}$ to speed up the calculation.
In [6]:
xml_tree = ET.parse(os.path.join(os.environ['EXP_DIR'],'IonPopSolver/test/radiation.example.cfg.xml'))
root = xml_tree.getroot()
node1 = root.find('atomicDB')
node1.text = os.path.join(os.environ['EXP_DIR'],'apolloDB') + '/'
node2 = root.find('cutoff_ion_fraction')
node2.text = '1e-6'
xml_tree.write(os.path.join(os.environ['EXP_DIR'],'IonPopSolver/test/radiation.local.cfg.xml'))
Now, we'll run the code in parallel with the subprocess module. First, define the worker function that will run in parallel.
In [7]:
def worker((input_file,output_file)):
print("Running IonPopSolver for input %s"%(input_file))
executable = os.path.join(os.environ['EXP_DIR'],'IonPopSolver/bin/IonPopSolver.run')
static_args = ["-Z","26","-f","9","-t","27","-r",
os.path.join(os.environ['EXP_DIR'],'IonPopSolver/test/radiation.local.cfg.xml')]
var_args = ["-I",os.path.abspath(input_file),"-O",os.path.abspath(output_file)]
subprocess.call([executable]+static_args+var_args)
print("Finished IonPopSolver for input %s"%(input_file))
Now, we need to assemble our list of inputs.
Finally, map the worker and inputs to the appropriate processors and run the code.
In [8]:
p = multiprocessing.Pool()
p.map(worker,zip(sorted(inputs),__dest__))
Out[8]:
In [9]: