Tutorial 5: Running an external application#

In this tutorial, we provide an example of how to use the SDK as an external application (outside of a user task). The goal is to run a previously-defined Topaze model using the RTA micro-service and perform sensitivity runs varying 3 parameters. The SciPy library is employed to determine the parameter combinationsand P10/P50/P90 simulation profiles are produced. These probabilistic production profiles are sent back to KAPPA-Automate.

The code for Tutorial 5 can be found in this zip file.

Listing 35 tutorial_5.py#
  1from kappa_sdk import Connection
  2from kappa_sdk import UnitConverter, UnitEnum, Document, ModelParser, KWModuleEnum, Vector
  3from scipy.stats import qmc
  4from scipy.interpolate import interp1d
  5import numpy as np
  6import numpy.typing as npt
  7import matplotlib.pyplot as plt
  8import time
  9from datetime import datetime
 10from typing import Tuple, List, Optional
 11
 12
 13#  Function that takes
 14#  (1) parameters (x)
 15#  (2) the base model xml to be modified
 16#  (3) and the pointer to the Topaze Document instance in KAPPA-Automate
 17#  Returns a tuple of dates, elapsed times and rates
 18def run_model(x: Tuple[Optional[float], Optional[float], Optional[float]], unit_converter: UnitConverter, model_xml: str, topaze_doc: Document) -> Tuple[List[datetime], List[float], List[float]]:
 19    delta, k, xmf = x
 20    parser = ModelParser(model_xml)
 21    parser.set_parameter_value(topaze_doc.analyses[0].id, "KWKA_RES_PAR",
 22                               {"Type": "FRACTIONAL_DIMENSION", "ZoneIndexX": "1"}, str(delta))
 23
 24    internal_perm = unit_converter.convert_to_internal(UnitEnum.perm_milli_darcy, k)
 25    parser.set_parameter_value(topaze_doc.analyses[0].id, "KWKA_RES_PAR",
 26                               {"Type": "PERMEABILITY"}, str(internal_perm))
 27
 28    internal_xmf = unit_converter.convert_to_internal(UnitEnum.length_feet, xmf)
 29    parser.set_parameter_value(topaze_doc.analyses[0].id, "KWKA_WELL_PAR",
 30                               {"Type": "FRACTURE_XF"}, str(internal_xmf))
 31
 32    # get the updated xml
 33    new_model_xml = parser.export()
 34    ret = topaze_doc.set_model_xml(new_model_xml)
 35    if not ret.is_success:
 36        print(ret.message)
 37
 38    vectors = None
 39
 40    retry_timeout = 10
 41    # retrieve generated data from the document
 42    sim_values = None
 43    while vectors is None:
 44        try:
 45            vectors = topaze_doc.analyses[0].get_plot_data('History')  # <-- need to document plot types
 46            #  NOTE:  dates coming from vectors is not correct because the reference time is often not initialized correctly
 47            for v in vectors:
 48                if v.data_measure == 'LiquidRateSurface' and v.data_name == 'Simulated oil rate':
 49                    sim_values = v.values
 50                    sim_times = v.elapsed_times
 51                    sim_dates = v.dates
 52            break
 53        except ConnectionError:
 54            print("Retrying the History plot retrieval...")
 55            time.sleep(retry_timeout)
 56
 57    if sim_values is None:
 58        raise Exception('No simulation values found')
 59
 60    return sim_dates, sim_times, sim_values
 61
 62
 63#  This function launches the runs and returns the P10/P50/P90
 64def get_simulation_percentiles(samples: npt.NDArray[np.float64], ref_sim_times: List[float], unit_converter: UnitConverter, model_xml: str, topaze_doc: Document) -> npt.NDArray[np.float64]:
 65    storage = list()
 66    for sample in samples:
 67        sample[1] = np.power(10, sample[1])
 68        sample[2] = np.power(10, sample[2])
 69        print('Running model with x={}'.format(sample))
 70        sim_dates, sim_times, sim_values = run_model(sample, unit_converter, model_xml, topaze_doc)
 71        sim_interpolator = interp1d(sim_times, sim_values, bounds_error=False, fill_value='extrapolate')
 72        storage.append(sim_interpolator(ref_sim_times))
 73    percentiles: npt.NDArray[np.float64] = np.percentile(storage, [10, 50, 90], axis=0)
 74    return percentiles
 75
 76
 77# set the server address, credentials (if needed), field, well and a document name.
 78ka_server_address = 'https://your-ka-instance'
 79field_name = "Tutorial"
 80well_name = "Well #1"
 81document_name = "UR_example.kt5"
 82show_plot = False
 83samples_number = 1
 84
 85# set up the connection
 86print('Connecting to {}'.format(ka_server_address))
 87connection = Connection(ka_server_address, verify_ssl=False)
 88print('Made connection')
 89
 90#  Now get the field, welland Topaze document
 91field = next(x for x in connection.get_fields() if x.name == field_name)
 92well = next(x for x in field.wells if x.name == well_name)
 93topaze_doc = next(x for x in well.documents if x.type == KWModuleEnum.topaze and x.name == document_name)
 94print('Found field, welland Topaze document ({})'.format(document_name))
 95
 96# get the model XML once and re-use it on each iteration
 97model_xml = topaze_doc.get_model_xml()
 98parser = ModelParser(model_xml)
 99
100#  Get the base values of delta, kand xmf
101delta_str = parser.get_parameter_value(topaze_doc.analyses[0].id, "KWKA_RES_PAR", {"Type": "FRACTIONAL_DIMENSION", "ZoneIndexX": "1"})
102delta = float(delta_str) if delta_str is not None else None
103
104k_str = parser.get_parameter_value(topaze_doc.analyses[0].id, "KWKA_RES_PAR", {"Type": "PERMEABILITY"})
105k = connection.unit_converter.convert_from_internal(UnitEnum.perm_milli_darcy, float(k_str) if k_str is not None else None)
106
107xmf_str = parser.get_parameter_value(topaze_doc.analyses[0].id, "KWKA_WELL_PAR", {"Type": "FRACTURE_XF"})
108xmf = connection.unit_converter.convert_from_internal(UnitEnum.length_feet, float(xmf_str) if xmf_str is not None else None)
109
110if delta is None:
111    raise ValueError("Delta should be defined")
112if k is None:
113    raise ValueError("Permeability should be defined")
114if xmf is None:
115    raise ValueError("XMF should be defined")
116
117print('Running model')
118x = (delta, k, xmf)
119ref_sim_dates, ref_sim_times, ref_sim_values = run_model(x, connection.unit_converter, model_xml, topaze_doc)
120
121# Get samples using Latin hypercube
122lhs_sampler = qmc.LatinHypercube(d=3, strength=1, optimization='random-cd')
123samples = lhs_sampler.random(n=samples_number)
124l_bounds = [delta - 0.05, np.log10(0.5 * k), np.log10(0.5 * xmf)]
125u_bounds = [delta + 0.05, np.log10(2 * k), np.log10(2 * xmf)]
126
127samples = qmc.scale(samples, l_bounds, u_bounds)
128print('Evaluating Latin hypercube samples')
129percentiles = get_simulation_percentiles(samples, ref_sim_times, connection.unit_converter, model_xml, topaze_doc)
130
131plt.figure()
132plt.plot_date(ref_sim_dates, percentiles[0], label='P10', ls='--', marker=None)
133plt.plot_date(ref_sim_dates, percentiles[1], label='P50', ls='-', marker=None)
134plt.plot_date(ref_sim_dates, percentiles[2], label='P90', ls='--', marker=None)
135if show_plot:
136    plt.show()
137
138print('Output data back to KAPPA-Automate')
139p10_vec = Vector(ref_sim_dates, percentiles[0].tolist())
140data_p10 = well.create_data('P10', 'qo')
141data_p10.append(p10_vec)
142p50_vec = Vector(ref_sim_dates, percentiles[1].tolist())
143data_p50 = well.create_data('P50', 'qo')
144data_p50.append(p50_vec)
145p90_vec = Vector(ref_sim_dates, percentiles[2].tolist())
146data_p90 = well.create_data('P90', 'qo')
147data_p90.append(p90_vec)
148print('Finished')