In [1]:
import sys, os
import matplotlib.pyplot as plt
import pynoddy.history
import pynoddy.output
import copy
import pickle
In [2]:
# os.chdir(r"/Users/Florian/Documents/10_Geomodels/Noddy/GBasin/GBasin")
os.chdir(r"/Users/flow/Documents/02_work/10_Geomodels/06_Noddy/")
In [3]:
reload(pynoddy.history)
reload(pynoddy.events)
PH = pynoddy.history.NoddyHistory("GBasin_Ve1_working.his")
In [4]:
PH.get_origin()
Out[4]:
In [44]:
PH.get_extent()
Out[44]:
In [45]:
PH.events
Out[45]:
In [47]:
PH.events[12].properties
Out[47]:
In [14]:
reload(pynoddy)
his = 'simple_folding.his'
PH.write_history(his)
out = 'simple_folding_out'
pynoddy.compute_model(his, out)
print os.getcwd()
In [15]:
reload(pynoddy.output)
PO = pynoddy.output.NoddyOutput(out)
In [23]:
PO.plot_section('y', position=-1, ve = 10.)
In [13]:
PO.export_to_vtk(vtk_filename = "GBasin")
In [9]:
PH.events[8].properties['Amplitude'] = 200.
PH.change_cube_size(50)
PH.update_all_event_properties()
In [10]:
PH.events[2].properties
Out[10]:
In [11]:
PH.events[2].update_properties()
In [12]:
his = 'simple_folding_2.his'
PH.write_history(his)
out2 = 'simple_folding_out_2'
pynoddy.compute_model(his, out2)
In [13]:
reload(pynoddy.output)
PO2 = pynoddy.output.NoddyOutput(out2)
In [14]:
PO2.plot_section('y', position=0)
In [15]:
PO2.export_to_vtk(vtk_filename = "Gipps_changed_hres")
Ok, it's the time now to become uncertain! Let's start with some simple uncertainties:
Update: suggestions by Mark (in email):
Faults (all events) Perturb dip and strike +- 10 degrees (a range of 20 degrees)
Folds (all events) Perturb dip and strike +- 10 degrees Wavelength +- 1000 (range of 2000) Amplitude +-300
Unconformities Just adjust the Z value by +- 500... but nothing else. That should cause enough trouble on its own.
Topology Maybe just change the folding events +- 1 position from the original. This will mean folding will either post-date faulting, or pre-date an unconformity. Hmm... this is fun!
In [16]:
# Let's start with a clean model:
PH = pynoddy.history.NoddyHistory("GBasin.his")
# set cube size to create higher resolution outputs - once it works...
PH.change_cube_size(100)
noddy_his = 'tmp'
noddy_out = 'tmp_out'
PH.write_history(noddy_his)
pynoddy.compute_model(noddy_his, noddy_out)
PO = pynoddy.NoddyOutput(noddy_out)
As Mark wants to have relative changes, let's just create a simple function to do the trick and avoid a lot of writing later:
In [17]:
def disturb_percent(event, prop, percent=5):
"""Disturb the property of an evetn by a given percentage (default=10), assuming a normal distribution"""
ori_val = event.properties[prop]
new_val = np.random.randn() * percent/100. * ori_val + ori_val
event.properties[prop] = new_val
def disturb_value(event, prop, stdev):
"""Disturb the property of an evetn by a given stdev, assuming a normal distribution"""
ori_val = event.properties[prop]
new_val = np.random.randn() * stdev * ori_val + ori_val
event.properties[prop] = new_val
Now we create a function to disturb the model once. Note: the object has to be copied before the disturbance step, otherwise properties are continuously changed, leading to a random walk instead of a standard samping (and therefore to a lot more disturbance):
In [18]:
def disturb(PH_local):
for event in PH_local.events.values():
# check instances and roll the dice
if isinstance(event,pynoddy.events.Fault):
disturb_percent(event, 'Dip')
disturb_percent(event, 'Dip Direction')
if isinstance(event,pynoddy.events.Fold):
disturb_percent(event, 'Dip')
disturb_percent(event, 'Dip Direction')
disturb_value(event, 'Wavelength', 500)
disturb_value(event, 'Amplitude', 100)
if isinstance(event,pynoddy.events.Unconformity):
disturb_value(event, 'Z', 0)
In [19]:
PH_tmp = copy.deepcopy(PH)
disturb(PH_tmp)
PH_tmp.events[2].properties['Dip']
Out[19]:
Setting up the entire uncertainty sampling step is now straight-forward:
In [22]:
# define number of sampling steps:
n = 500
all_blocks = np.ndarray((n, PO.nx, PO.ny, PO.nz), dtype="int")
for i in range(n):
PH_tmp = copy.deepcopy(PH)
disturb(PH_tmp)
PH_tmp.write_history(noddy_his)
pynoddy.compute_model(noddy_his, noddy_out)
PO_tmp = pynoddy.NoddyOutput(noddy_out)
all_blocks[i,:,:,:] = PO_tmp.block
For later use (and probably combination of results of multiple runs), save results to file:
In [23]:
pickle.dump(all_blocks, open("all_blocks_500.pkl", 'w'))
The questions is now, of course, how to analyse and visualise the results. As a first step, we simply generate indicator functions and determine the id probabilities.
For a first simple visalisation, we create a "dummy" NoddyOutput file with the same properties as the original output file and assign the resulting grid to the block property. We can then use the standard plot_section and export_to_vtk commands:
In [32]:
# define id to be analysed:
unit_id = 11
ind_func = (all_blocks == unit_id)
np.shape(ind_func)
id_prob = np.sum(ind_func, axis=0) / float(n)
# create dummy NoddyOutput and assign values as block
NO_dummy = copy.deepcopy(PO_tmp)
NO_dummy.block = id_prob
NO_dummy.export_to_vtk(vtk_filename = 'prob_unit_id_%d' % unit_id)
NO_dummy.plot_section('x')
NO_dummy.plot_section('y')
NO_dummy.plot_section('z')
In [24]:
In [391]:
len(np.unique(PO.block))
Out[391]:
In [370]:
In [370]:
In [ ]: