diff --git a/omas/machine_mappings/_common.py b/omas/machine_mappings/_common.py index 572f671e9..2f8ad1bfd 100644 --- a/omas/machine_mappings/_common.py +++ b/omas/machine_mappings/_common.py @@ -3,6 +3,8 @@ from omas.omas_utils import printd import os import glob +from omas.omas_setup import omas_dir +from omas.utilities.omas_mds import mdsvalue __support_files_cache__ = {} diff --git a/omas/machine_mappings/d3d.json b/omas/machine_mappings/d3d.json index a79b7cf32..a271343ee 100644 --- a/omas/machine_mappings/d3d.json +++ b/omas/machine_mappings/d3d.json @@ -113,32 +113,131 @@ "coils_non_axisymmetric.coil.:.name": { "PYTHON": "coils_non_axisymmetric_hardware(ods, {pulse})" }, + "core_profiles": { + "PYTHON": "core_profiles_profile_1d(ods, {pulse}, {PROFILES_tree!r})" + }, "core_profiles.global_quantities.v_loop": { "COCOSIO": 11, "PYTHON": "core_profiles_global_quantities_data(ods, {pulse})" }, + "core_profiles.ids_properties.homogeneous_time": { + "PYTHON": "core_profiles_profile_1d(ods, {pulse}, {PROFILES_tree!r})" + }, "core_profiles.profiles_1d.:": { - "TDI": "size(\\{PROFILES_tree}::TOP.PROFILES.EDENSFIT,1)", - "treename": "{PROFILES_tree}" + "PYTHON": "core_profiles_profile_1d(ods, {pulse}, {PROFILES_tree!r})" + }, + "core_profiles.profiles_1d.:.e_field.radial": { + "PYTHON": "core_profiles_profile_1d(ods, {pulse}, {PROFILES_tree!r})" + }, + "core_profiles.profiles_1d.:.e_field.radial_error_upper": { + "PYTHON": "core_profiles_profile_1d(ods, {pulse}, {PROFILES_tree!r})" }, "core_profiles.profiles_1d.:.electrons.density_thermal": { - "TDI": "\\{PROFILES_tree}::TOP.PROFILES.EDENSFIT", - "treename": "{PROFILES_tree}" + "PYTHON": "core_profiles_profile_1d(ods, {pulse}, {PROFILES_tree!r})" + }, + "core_profiles.profiles_1d.:.electrons.density_error_upper": { + "PYTHON": "core_profiles_profile_1d(ods, {pulse}, {PROFILES_tree!r})" + }, + "core_profiles.profiles_1d.:.electrons.density_fit.measured": { + "PYTHON": "core_profiles_profile_1d(ods, {pulse}, {PROFILES_tree!r})" + }, + "core_profiles.profiles_1d.:.electrons.density_fit.measured_error_upper": { + "PYTHON": "core_profiles_profile_1d(ods, {pulse}, {PROFILES_tree!r})" + }, + "core_profiles.profiles_1d.:.electrons.density_fit.psi_norm": { + "PYTHON": "core_profiles_profile_1d(ods, {pulse}, {PROFILES_tree!r})" }, "core_profiles.profiles_1d.:.electrons.temperature": { - "TDI": "\\{PROFILES_tree}::TOP.PROFILES.ETEMPFIT", - "treename": "{PROFILES_tree}" + "PYTHON": "core_profiles_profile_1d(ods, {pulse}, {PROFILES_tree!r})" + }, + "core_profiles.profiles_1d.:.electrons.temperature_error_upper": { + "PYTHON": "core_profiles_profile_1d(ods, {pulse}, {PROFILES_tree!r})" + }, + "core_profiles.profiles_1d.:.electrons.temperature_fit.measured": { + "PYTHON": "core_profiles_profile_1d(ods, {pulse}, {PROFILES_tree!r})" + }, + "core_profiles.profiles_1d.:.electrons.temperature_fit.measured_error_upper": { + "PYTHON": "core_profiles_profile_1d(ods, {pulse}, {PROFILES_tree!r})" + }, + "core_profiles.profiles_1d.:.electrons.temperature_fit.psi_norm": { + "PYTHON": "core_profiles_profile_1d(ods, {pulse}, {PROFILES_tree!r})" + }, + "core_profiles.profiles_1d.:.grid.rho_pol_norm": { + "PYTHON": "core_profiles_profile_1d(ods, {pulse}, {PROFILES_tree!r})" }, "core_profiles.profiles_1d.:.grid.rho_tor_norm": { - "eval2TDI": "py2tdi(tile,'dim_of(\\{PROFILES_tree}::TOP.PROFILES.EDENSFIT,0)','size(\\{PROFILES_tree}::TOP.PROFILES.EDENSFIT,1)')", - "treename": "{PROFILES_tree}" + "PYTHON": "core_profiles_profile_1d(ods, {pulse}, {PROFILES_tree!r})" + }, + "core_profiles.profiles_1d.:.ion.:": { + "PYTHON": "core_profiles_profile_1d(ods, {pulse}, {PROFILES_tree!r})" + }, + "core_profiles.profiles_1d.:.ion.:.density_thermal": { + "PYTHON": "core_profiles_profile_1d(ods, {pulse}, {PROFILES_tree!r})" + }, + "core_profiles.profiles_1d.:.ion.:.density_thermal_error_upper": { + "PYTHON": "core_profiles_profile_1d(ods, {pulse}, {PROFILES_tree!r})" + }, + "core_profiles.profiles_1d.:.ion.:.density_thermal_fit.measured": { + "PYTHON": "core_profiles_profile_1d(ods, {pulse}, {PROFILES_tree!r})" + }, + "core_profiles.profiles_1d.:.ion.:.density_thermal_fit.measured_error_upper": { + "PYTHON": "core_profiles_profile_1d(ods, {pulse}, {PROFILES_tree!r})" + }, + "core_profiles.profiles_1d.:.ion.:.density_fit.psi_norm": { + "PYTHON": "core_profiles_profile_1d(ods, {pulse}, {PROFILES_tree!r})" }, - "core_profiles.profiles_1d.:.time": { - "TDI": "dim_of(\\{PROFILES_tree}::TOP.PROFILES.EDENSFIT,1)/1000.", - "treename": "{PROFILES_tree}" + "core_profiles.profiles_1d.:.ion.:.element.:": { + "PYTHON": "core_profiles_profile_1d(ods, {pulse}, {PROFILES_tree!r})" + }, + "core_profiles.profiles_1d.:.ion.:.element.:.a": { + "PYTHON": "core_profiles_profile_1d(ods, {pulse}, {PROFILES_tree!r})" + }, + "core_profiles.profiles_1d.:.ion.:.element.:.z_n": { + "PYTHON": "core_profiles_profile_1d(ods, {pulse}, {PROFILES_tree!r})" + }, + "core_profiles.profiles_1d.:.ion.:.label": { + "PYTHON": "core_profiles_profile_1d(ods, {pulse}, {PROFILES_tree!r})" + }, + "core_profiles.profiles_1d.:.ion.:.temperature": { + "PYTHON": "core_profiles_profile_1d(ods, {pulse}, {PROFILES_tree!r})" + }, + "core_profiles.profiles_1d.:.ion.:.temperature_error_upper": { + "PYTHON": "core_profiles_profile_1d(ods, {pulse}, {PROFILES_tree!r})" + }, + "core_profiles.profiles_1d.:.ion.:.temperature_fit.measured": { + "PYTHON": "core_profiles_profile_1d(ods, {pulse}, {PROFILES_tree!r})" + }, + "core_profiles.profiles_1d.:.ion.:.temperature_fit.measured_error_upper": { + "PYTHON": "core_profiles_profile_1d(ods, {pulse}, {PROFILES_tree!r})" + }, + "core_profiles.profiles_1d.:.ion.:.temperature_fit.psi_norm": { + "PYTHON": "core_profiles_profile_1d(ods, {pulse}, {PROFILES_tree!r})" + }, + "core_profiles.profiles_1d.:.ion.:.velocity.toroidal": { + "COCOSIO": 11, + "PYTHON": "core_profiles_profile_1d(ods, {pulse}, {PROFILES_tree!r})" + }, + "core_profiles.profiles_1d.:.ion.:.velocity.toroidal_error_upper": { + "PYTHON": "core_profiles_profile_1d(ods, {pulse}, {PROFILES_tree!r})" + }, + "core_profiles.profiles_1d.:.ion.:.velocity.toroidal_fit.measured": { + "PYTHON": "core_profiles_profile_1d(ods, {pulse}, {PROFILES_tree!r})" + }, + "core_profiles.profiles_1d.:.ion.:.velocity.toroidal_fit.measured_error_upper": { + "PYTHON": "core_profiles_profile_1d(ods, {pulse}, {PROFILES_tree!r})" + }, + "core_profiles.profiles_1d.:.ion.:.velocity.toroidal_fit.psi_norm": { + "PYTHON": "core_profiles_profile_1d(ods, {pulse}, {PROFILES_tree!r})" + }, + "core_profiles.profiles_1d.:.j_total": { + "COCOSIO": 11, + "PYTHON": "core_profiles_profile_1d(ods, {pulse}, {PROFILES_tree!r})" + }, + "core_profiles.profiles_1d.:.pressure_perpendicular": { + "PYTHON": "core_profiles_profile_1d(ods, {pulse}, {PROFILES_tree!r})" }, "core_profiles.time": { - "PYTHON": "core_profiles_global_quantities_data(ods, {pulse})" + "PYTHON": "core_profiles_profile_1d(ods, {pulse}, {PROFILES_tree!r})" }, "ec_launchers.beam.:": { "PYTHON": "ec_launcher_active_hardware(ods, {pulse})" @@ -199,6 +298,9 @@ "ec_launchers.beam.:.time": { "PYTHON": "ec_launcher_active_hardware(ods, {pulse})" }, + "ece": { + "PYTHON": "electron_cyclotron_emission_hardware(ods, {pulse}, {fast_ece!r})" + }, "ece.channel.:": { "PYTHON": "electron_cyclotron_emission_hardware(ods, {pulse}, {fast_ece!r})" }, diff --git a/omas/machine_mappings/d3d.py b/omas/machine_mappings/d3d.py index 5d3f25b24..6c07a0173 100644 --- a/omas/machine_mappings/d3d.py +++ b/omas/machine_mappings/d3d.py @@ -6,6 +6,11 @@ from omas import * from omas.omas_utils import printd, printe, unumpy from omas.machine_mappings._common import * +from uncertainties import unumpy +from omas.utilities.machine_mapping_decorator import machine_mapping_function +from omas.utilities.omas_mds import mdsvalue +from omas.omas_core import ODS +from omas.omas_structure import add_extra_structures __all__ = [] __regression_arguments__ = {'__all__': __all__} @@ -1353,6 +1358,136 @@ def ip_bt_dflux_data(ods, pulse): ods['tf.b_field_tor_vacuum_r.data'] *= 1.6955 +def add_extra_profile_structures(): + extra_structures = {} + extra_structures["core_profiles"] = {} + sh = "core_profiles.profiles_1d" + for quant in ["ion.:.density_fit.psi_norm", "electrons.density_fit.psi_norm", + "ion.:.temperature_fit.psi_norm", "electrons.temperature_fit.psi_norm", + "ion.:.velocity.toroidal_fit.psi_norm"]: + if "velocity" in quant: + psi_struct = {"coordinates": "1- 1...N"} + else: + psi_struct = {"coordinates": sh + ".:." + quant.replace("psi_norm", "rho_tor_norm")} + psi_struct["documentation"] = "Normalized Psi for fit data." + psi_struct["data_type"] = "FLT_1D" + psi_struct["units"] = "" + extra_structures["core_profiles"][f"core_profiles.profiles_1d.:.{quant}"] = psi_struct + velo_struct = {"coordinates": sh + ".:." + "ion.:.velocity.toroidal_fit.psi_norm"} + velo_struct["documentation"] = "Information on the fit used to obtain the toroidal velocity profile [m/s]" + velo_struct["data_type"] = "FLT_1D" + velo_struct["units"] = "m.s^-1" + extra_structures["core_profiles"][f"core_profiles.profiles_1d.:.ion.:.velocity.toroidal_fit.measured"] = velo_struct + extra_structures["core_profiles"][f"core_profiles.profiles_1d.:.ion.:.velocity.toroidal_fit.measured_error_upper"] = velo_struct + add_extra_structures(extra_structures) + + +@machine_mapping_function(__regression_arguments__, pulse=194842001, PROFILES_tree="OMFIT_PROFS") +def core_profiles_profile_1d(ods, pulse, PROFILES_tree="OMFIT_PROFS"): + add_extra_profile_structures() + ods["core_profiles.ids_properties.homogeneous_time"] = 1 + if "OMFIT_PROFS" in PROFILES_tree: + omfit_profiles_node = '\\TOP.' + query = { + "electrons.density_thermal": "N_E", + "electrons.density_fit.measured": "RW_N_E", + "electrons.temperature": "T_E", + "electrons.temperature_fit.measured": "RW_T_E", + "ion[0].density_thermal": "N_D", + "ion[0].temperature": "T_D", + "ion[1].velocity.toroidal": "V_TOR_C", + "ion[1].velocity.toroidal_fit.measured": "RW_V_TOR_C", + "ion[1].density_thermal": "N_C", + "ion[1].density_fit.measured": "RW_N_C", + "ion[1].temperature": "T_C", + "ion[1].temperature_fit.measured": "RW_T_C", + } + uncertain_entries = list(query.keys()) + query["electrons.density_fit.psi_norm"] = "PS_N_E" + query["electrons.temperature_fit.psi_norm"] = "PS_T_E" + query["ion[1].density_fit.psi_norm"] = "PS_N_C" + query["ion[1].temperature_fit.psi_norm"] = "PS_T_C" + query["ion[1].velocity.toroidal_fit.psi_norm"]= "PS_V_TOR_C" + #query["j_total"] = "J_TOT" + #query["pressure_perpendicular"] = "P_TOT" + query["e_field.radial"] = "ER_C" + query["grid.rho_tor_norm"] = "rho" + normal_entries = set(query.keys()) - set(uncertain_entries) + for entry in query: + query[entry] = omfit_profiles_node + query[entry] + for entry in uncertain_entries: + query[entry + "_error_upper"] = "error_of(" + query[entry] + ")" + data = mdsvalue('d3d', treename=PROFILES_tree, pulse=pulse, TDI=query).raw() + if data is None: + print("No mds+ data") + raise ValueError(f"Could not find any data in MDS+ for {pulse} and {PROFILES_tree}") + dim_info = mdsvalue('d3d', treename=PROFILES_tree, pulse=pulse, TDI="\\TOP.n_e") + data['time'] = dim_info.dim_of(1) * 1.e-3 + psi_n = dim_info.dim_of(0) + data['grid.rho_pol_norm'] = np.zeros((data['time'].shape + psi_n.shape)) + data['grid.rho_pol_norm'][:] = np.sqrt(psi_n) + # for density_thermal in densities: + # data[density_thermal] *= 1.e6 + for unc in ["", "_error_upper"]: + data[f"ion[0].velocity.toroidal{unc}"] = data[f"ion[1].velocity.toroidal{unc}"] + ods["core_profiles.time"] = data['time'] + sh = "core_profiles.profiles_1d" + for i_time, time in enumerate(data["time"]): + ods[f"{sh}[{i_time}].grid.rho_pol_norm"] = data['grid.rho_pol_norm'][i_time] + for entry in uncertain_entries + ["ion[0].velocity.toroidal"]: + if isinstance(data[entry], Exception): + continue + for i_time, time in enumerate(data["time"]): + try: + ods[f"{sh}[{i_time}]." + entry] = data[entry][i_time] + ods[f"{sh}[{i_time}]." + entry + "_error_upper"] = data[entry + "_error_upper"][i_time] + except Exception as e: + print("Uncertain entry", entry) + print("================ DATA =================") + print(data[entry][i_time]) + print("================ ERROR =================") + print(data[entry + "_error_upper"][i_time]) + print(data[entry][i_time].shape, + data[entry + "_error_upper"][i_time].shape) + print(e) + for entry in normal_entries: + if isinstance(data[entry], Exception): + continue + for i_time, time in enumerate(data["time"]): + try: + ods[f"{sh}[{i_time}]."+entry] = data[entry][i_time] + except: + print("Normal entry", entry) + print("================ DATA =================") + print(data[entry][i_time]) + for i_time, time in enumerate(data["time"]): + ods[f"{sh}[{i_time}].ion[0].element[0].z_n"] = 1 + ods[f"{sh}[{i_time}].ion[0].element[0].a"] = 2.0141 + ods[f"{sh}[{i_time}].ion[1].element[0].z_n"] = 6 + ods[f"{sh}[{i_time}].ion[1].element[0].a"] = 12.011 + ods[f"{sh}[{i_time}].ion[0].label"] = "D" + ods[f"{sh}[{i_time}].ion[1].label"] = "C" + else: + profiles_node = '\\TOP.PROFILES.' + query = { + "electrons.density_thermal": "EDENSFIT", + "electrons.temperature": "ETEMPFIT" + } + for entry in query: + query[entry] = profiles_node + query[entry] + data = mdsvalue('d3d', treename=PROFILES_tree, pulse=pulse, TDI=query).raw() + dim_info = mdsvalue('d3d', treename=PROFILES_tree, pulse=pulse, TDI="\\TOP.PROFILES.EDENSFIT") + data['time'] = dim_info.dim_of(1) * 1.e-3 + rho_tor_norm = dim_info.dim_of(0) + data['grid.rho_tor_norm'] = np.zeros((data['time'].shape + rho_tor_norm.shape)) + data['grid.rho_tor_norm'][:] = rho_tor_norm + ods[f"core_profiles.time"] = data['time'] + for entry in data: + if isinstance(data[entry], Exception): + continue + for i_time, time in enumerate(data["time"]): + ods[f"core_profiles.profiles_1d[{i_time}]."+entry] = data[entry][i_time] + # ================================ @machine_mapping_function(__regression_arguments__, pulse=133221) def core_profiles_global_quantities_data(ods, pulse): @@ -1375,5 +1510,3 @@ def core_profiles_global_quantities_data(ods, pulse): # ================================ -if __name__ == '__main__': - test_machine_mapping_functions(__all__, globals(), locals()) diff --git a/omas/omas_core.py b/omas/omas_core.py index cb2f03a2a..b178b014e 100644 --- a/omas/omas_core.py +++ b/omas/omas_core.py @@ -2914,4 +2914,6 @@ def through_omas_pkl(ods): from .omas_mongo import * from .omas_symbols import * from .omas_machine import * +from .utilities.machine_mapping_decorator import * +from .utilities.omas_mds import * from . import omas_structure diff --git a/omas/omas_machine.py b/omas/omas_machine.py index 37ccedf1d..3c662df2f 100644 --- a/omas/omas_machine.py +++ b/omas/omas_machine.py @@ -6,6 +6,10 @@ from .omas_utils import * from .omas_core import ODS, dynamic_ODS, omas_environment, omas_info_node, imas_json_dir, omas_rcparams from .omas_physics import cocos_signals +from omas.machine_mappings import d3d +from omas.machine_mappings.d3d import __regression_arguments__ +from omas.utilities.machine_mapping_decorator import machine_mapping_function +from omas.utilities.omas_mds import mdsvalue try: from MDSplus.connection import MdsIpException from MDSplus.mdsExceptions import TreeNODATA, TreeNNF @@ -17,11 +21,8 @@ 'machines', 'machine_mappings', 'load_omas_machine', - 'machine_mapping_function', 'test_machine_mapping_functions', - 'mdstree', - 'mdsvalue', - 'reload_machine_mappings', + 'reload_machine_mappings' ] machine_expression_types = ['VALUE', 'EVAL', 'ENVIRON', 'PYTHON', 'TDI', 'eval2TDI'] @@ -225,9 +226,10 @@ def resolve_mapped(ods, machine, pulse, mappings, location, idm, options_with_d namespace['ods'] = ODS() namespace['__file__'] = machines(machine, branch)[:-5] + '.py' printd(f"Calling `{call}` in {os.path.basename(namespace['__file__'])}", topic='machine') - tmp = compile(call, namespace['__file__'], 'exec') - exec(tmp, namespace) - ods = namespace[mapped.get('RETURN', 'ods')] + # Add the callback for mapping updates + # By supplyinh the function to the decorator we avoid a ringinclusion + call_w_update_mapping = call[:-1] + ", update_callback=update_mapping)" + exec( machine + "." + call_w_update_mapping) if isinstance(cache, dict): cache[call] = ods if location.endswith(':'): @@ -610,65 +612,8 @@ def update_mapping(machine, location, value, cocosio=None, default_options=None, return new_raw_mappings -# =================== -# machine mapping functions -# =================== -def machine_mapping_function(__regression_arguments__, **regression_args): - """ - Decorator used to identify machine mapping functions - - :param \**regression_args: arguments used to run regression test - - NOTE: use `inspect.unwrap(function)` to call a function decorated with `@machine_mapping_function` - from another function decorated with `@machine_mapping_function` - """ - - __all__ = __regression_arguments__['__all__'] - - def machine_mapping_decorator(f, __all__): - __all__.append(f.__name__) - if __regression_arguments__ is not None: - __regression_arguments__[f.__name__] = regression_args - - @functools.wraps(f) - def machine_mapping_caller(*args, **kwargs): - clean_ods = True - if len(args[0]): - clean_ods = False - if clean_ods and omas_git_repo: - import inspect - # figure out the machine name from where the function `f` is defined - machine = os.path.splitext(os.path.split(inspect.getfile(f))[1])[0] - if machine == '': # if `f` is called via exec then we need to look at the call stack to figure out the machine name - machine = os.path.splitext(os.path.split(inspect.getframeinfo(inspect.currentframe().f_back)[0])[1])[0] - - # call signature - argspec = inspect.getfullargspec(f) - f_args_str = ", ".join('{%s!r}' % item for item in argspec.args if not item.startswith('_')) - # f_args_str = ", ".join(item + '={%s!r}' % item for item in argspec.args if not item.startswith('_')) # to use keywords arguments - call = f"{f.__qualname__}({f_args_str})".replace('{ods!r}', 'ods').replace('{pulse!r}', '{pulse}') - default_options = None - if argspec.defaults: - default_options = dict(zip(argspec.args[::-1], argspec.defaults[::-1])) - default_options = {item: value for item, value in default_options.items() if not item.startswith('_')} - - # call - out = f(*args, **kwargs) - - # update mappings definitions - if clean_ods and omas_git_repo: - for ulocation in numpy.unique(list(map(o2u, args[0].flat().keys()))): - update_mapping(machine, ulocation, {'PYTHON': call}, 11, default_options, update_path=True) - - return out - - return machine_mapping_caller - - return lambda f: machine_mapping_decorator(f, __all__) - - -def test_machine_mapping_functions(__all__, global_namespace, local_namespace): +def test_machine_mapping_functions(machine, __all__, global_namespace, local_namespace): """ Function used to test python mapping functions @@ -682,7 +627,7 @@ def test_machine_mapping_functions(__all__, global_namespace, local_namespace): os.environ['OMAS_DEBUG_TOPIC'] = 'machine' # call machine mapping to make sure the json file is properly formatted - machine = os.path.splitext(os.path.split(local_namespace['__file__'])[1])[0] + # machine = os.path.splitext(os.path.split(local_namespace['__file__'])[1])[0] print(f'Sanity check of `{machine}` mapping files: ... ', end='') machine_mappings(machine, '', raise_errors=True) print('OK') @@ -695,10 +640,11 @@ def test_machine_mapping_functions(__all__, global_namespace, local_namespace): print(func_name) pprint(regression_kw) print('=' * len(func_name)) - ods = ODS() - func = eval(func_name, global_namespace, local_namespace) + ods = ODS() #consistency_check= not break_schema + func = eval(machine + "." + func_name, global_namespace, local_namespace) try: try: + regression_kw["update_callback"] = update_mapping func(ods, **regression_kw) except Exception: raise @@ -730,217 +676,6 @@ def test_machine_mapping_functions(__all__, global_namespace, local_namespace): os.environ['OMAS_DEBUG_TOPIC'] = old_OMAS_DEBUG_TOPIC -# =================== -# MDS+ functions -# =================== -def tunnel_mds(server, treename): - """ - Resolve MDS+ server - NOTE: This function makes use of the optional `omfit_classes` dependency to establish a SSH tunnel to the MDS+ server. - - :param server: MDS+ server address:port - - :param treename: treename (in case treename affects server to be used) - - :return: string with MDS+ server and port to be used - """ - try: - import omfit_classes.omfit_mds - except (ImportError, ModuleNotFoundError): - return server.format(**os.environ) - else: - server0 = omfit_classes.omfit_mds.translate_MDSserver(server, treename) - tunneled_server = omfit_classes.omfit_mds.tunneled_MDSserver(server0, quiet=False) - return tunneled_server - - return server.format(**os.environ) - - -_mds_connection_cache = {} - - -class mdstree(dict): - """ - Class to handle the structure of an MDS+ tree. - Nodes in this tree are mdsvalue objects - """ - - def __init__(self, server, treename, pulse): - pulse = int(pulse) - for TDI in sorted(mdsvalue(server, treename, pulse, rf'getnci("***","FULLPATH")').raw())[::-1]: - try: - TDI = TDI.decode('utf8') - except AttributeError: - pass - TDI = TDI.strip() - path = TDI.replace('::TOP', '').lstrip('\\').replace(':', '.').split('.') - h = self - for p in path[1:-1]: - h = h.setdefault(p, mdsvalue(server, treename, pulse, '')) - if path[-1] not in h: - h[path[-1]] = mdsvalue(server, treename, pulse, TDI) - else: - h[path[-1]].TDI = TDI - - -class mdsvalue(dict): - """ - Execute MDS+ TDI functions - """ - - def __init__(self, server, treename, pulse, TDI, old_MDS_server=False): - self.treename = treename - self.pulse = int(pulse) - self.TDI = TDI - if 'nstx' in server: - old_MDS_server = True - try: - # handle the case that server is just the machine name - tmp = machine_mappings(server, '') - except NotImplementedError: - # hanlde case where server is actually a URL - if '.' not in server: - raise - else: - if '__mdsserver__' not in tmp or not len(tmp['__mdsserver__']): - raise Exception(f'Must specify `__mdsserver__` for {server}') - else: - server = tmp['__mdsserver__'] - self.server = tunnel_mds(server, self.treename) - old_servers = ['skylark.pppl.gov:8500', 'skylark.pppl.gov:8501', 'skylark.pppl.gov:8000'] - if server in old_servers or self.server in old_servers: - old_MDS_server = True - self.old_MDS_server = old_MDS_server - - def data(self): - return self.raw(f'data({self.TDI})') - - def dim_of(self, dim): - return self.raw(f'dim_of({self.TDI},{dim})') - - def units(self): - return self.raw(f'units({self.TDI})') - - def error(self): - return self.raw(f'error({self.TDI})') - - def error_dim_of(self, dim): - return self.raw(f'error_dim_of({self.TDI},{dim})') - - def units_dim_of(self, dim): - return self.raw(f'units_dim_of({self.TDI},{dim})') - - def size(self, dim): - return self.raw(f'size({self.TDI})') - - def raw(self, TDI=None): - """ - Fetch data from MDS+ with connection caching - - :param TDI: string, list or dict of strings - MDS+ TDI expression(s) (overrides the one passed when the object was instantiated) - - :return: result of TDI expression, or dictionary with results of TDI expressions - """ - try: - import time - - t0 = time.time() - import MDSplus - - def mdsk(value): - """ - Translate strings to MDS+ bytes - """ - return str(str(value).encode('utf8')) - - if TDI is None: - TDI = self.TDI - - try: - out_results = None - - # try connecting and re-try on fail - for fallback in [0, 1]: - if (self.server, self.treename, self.pulse) not in _mds_connection_cache: - conn = MDSplus.Connection(self.server) - if self.treename is not None: - conn.openTree(self.treename, self.pulse) - _mds_connection_cache[(self.server, self.treename, self.pulse)] = conn - try: - conn = _mds_connection_cache[(self.server, self.treename, self.pulse)] - break - except Exception as _excp: - if (self.server, self.treename, self.pulse) in _mds_connection_cache: - del _mds_connection_cache[(self.server, self.treename, self.pulse)] - if fallback: - raise - - # list of TDI expressions - if isinstance(TDI, (list, tuple)): - TDI = {expr: expr for expr in TDI} - - # dictionary of TDI expressions - if isinstance(TDI, dict): - # old versions of MDS+ server do not support getMany - if self.old_MDS_server: - results = {} - for tdi in TDI: - try: - results[tdi] = mdsvalue(self.server, self.treename, self.pulse, TDI[tdi]).raw() - except Exception as _excp: - results[tdi] = Exception(str(_excp)) - out_results = results - - # more recent MDS+ server - else: - conns = conn.getMany() - for name, expr in TDI.items(): - conns.append(name, expr) - res = conns.execute() - results = {} - for name, expr in TDI.items(): - try: - results[name] = MDSplus.Data.data(res[mdsk(name)][mdsk('value')]) - except KeyError: - try: - results[name] = MDSplus.Data.data(res[str(name)][str('value')]) - except KeyError: - try: - results[name] = Exception(MDSplus.Data.data(res[mdsk(name)][mdsk('error')])) - except KeyError: - results[name] = Exception(MDSplus.Data.data(res[str(name)][str('error')])) - out_results = results - - # single TDI expression - else: - out_results = MDSplus.Data.data(conn.get(TDI)) - - # return values - return out_results - - except Exception as _excp: - txt = [] - for item in ['server', 'treename', 'pulse']: - txt += [f' - {item}: {getattr(self, item)}'] - txt += [f' - TDI: {TDI}'] - raise _excp.__class__(str(_excp) + '\n' + '\n'.join(txt)) - - finally: - if out_results is not None: - if isinstance(out_results, dict): - if all(isinstance(out_results[k], Exception) for k in out_results): - printd(f'{TDI} \tall NO\t {time.time() - t0:3.3f} secs', topic='machine') - elif any(isinstance(out_results[k], Exception) for k in out_results): - printd(f'{TDI} \tsome OK/NO\t {time.time() - t0:3.3f} secs', topic='machine') - else: - printd(f'{TDI} \tall OK\t {time.time() - t0:3.3f} secs', topic='machine') - else: - printd(f'{TDI} \tOK\t {time.time() - t0:3.3f} secs', topic='machine') - else: - printd(f'{TDI} \tNO\t {time.time() - t0:3.3f} secs', topic='machine') - - # =================== # Loading machine data in ODSs # =================== @@ -1034,3 +769,6 @@ def load_omas_machine( print(location) machine_to_omas(ods, machine, pulse, location, options, branch) return ods + +if __name__ == '__main__': + test_machine_mapping_functions('d3d', ["core_profiles_profile_1d"], globals(), locals()) diff --git a/omas/omas_physics.py b/omas/omas_physics.py index 4c07fd33e..5f6123e70 100644 --- a/omas/omas_physics.py +++ b/omas/omas_physics.py @@ -943,7 +943,7 @@ def summary_lineaverage_density(ods, line_grid=2000, time_index=None, update=Tru Zgrid = ods['equilibrium']['time_slice'][time_index]['profiles_2d'][0]['grid']['dim2'] psi2d = ods['equilibrium']['time_slice'][time_index]['profiles_2d'][0]['psi'] - psi_interp = scipy.interpolate.interp2d(Zgrid, Rgrid, psi2d) + psi_spl = RectBivariateSpline(Rgrid, Zgrid, psi2d) psi_eq = ods['equilibrium']['time_slice'][time_index]['profiles_1d']['psi'] rhon_eq = ods['equilibrium']['time_slice'][time_index]['profiles_1d']['rho_tor_norm'] rhon_cp = ods['core_profiles']['profiles_1d'][time_index]['grid']['rho_tor_norm'] @@ -997,7 +997,7 @@ def summary_lineaverage_density(ods, line_grid=2000, time_index=None, update=Tru i1 = zero_crossings[0] i2 = zero_crossings[-1] - psival = [psi_interp(Zline[i], Rline[i])[0] for i in range(i1, i2, numpy.sign(i2 - i1))] + psival = [psi_spl(Rline[i], Zline[i], grid=False).item() for i in range(i1, i2, numpy.sign(i2 - i1))] ne_interp = scipy.interpolate.splev(psival, tck) ne_line = numpy.trapz(ne_interp) ne_line /= abs(i2 - i1) @@ -2507,6 +2507,26 @@ def search_in_array_structure(ods, conditions, no_matches_return=0, no_matches_r return match +@add_to__ALL__ +def get_plot_scale_and_unit(phys_quant, species=None): + """ + Returns normalizing scale for a physical quantity. + E.g. "temprerature" returns 1.e-3 and keV + :param phys_qaunt: str with a physical quantity. Uses IMAS scheme names where possible + :return: scale, unit + """ + if "temperature" in phys_quant: + return 1.e-3, r"\mathrm{keV}" + elif "density" in phys_quant : + if species is not None and species not in ["H", "D", "He"]: + return 1.e-18, r"\times 10^{18}\,\mathrm{m}^{-3}" + else: + return 1.e-19, r"\times 10^{19}\,\mathrm{m}^{-3}" + elif "velocity" in phys_quant: + return 1.e-6, r"\mathrm{Mm}\,\mathrm{s}^{-1}" + elif "e_field" in phys_quant: + return 1.e-3, r"\mathrm{kV}\,\mathrm{m}^{-1}" + @add_to__ALL__ def define_cocos(cocos_ind): diff --git a/omas/omas_plot.py b/omas/omas_plot.py index 9b35ff38a..2c2070ea4 100644 --- a/omas/omas_plot.py +++ b/omas/omas_plot.py @@ -762,7 +762,10 @@ def get2d(contour_quantity): if levels is None and value_1d is not None: if contour_quantity == 'q': max_q = int(numpy.round(omas_interp1d(0.95, x_value_1d, value_1d))) - levels = numpy.arange(max_q) + levels = numpy.arange(numpy.abs(max_q)) + if max_q < 0: + levels *= -1 + levels = levels[::-1] else: levels = numpy.linspace(numpy.min(value_1d), numpy.max(value_1d), 11)[1:-1] levels = numpy.hstack((levels, levels[-1] + (levels[1] - levels[0]) * numpy.arange(100)[1:])) @@ -799,8 +802,10 @@ def get2d(contour_quantity): r = scipy.ndimage.zoom(r, sf) z = scipy.ndimage.zoom(z, sf) value_2d = scipy.ndimage.zoom(value_2d, sf) - - cs = ax.contour(r, z, value_2d, levels=levels, **kw) + if levels is not None: + cs = ax.contour(r, z, value_2d, levels, **kw) + else: + cs = ax.contour(r, z, value_2d, **kw) if label_contours or ((label_contours is None) and (contour_quantity == 'q')): ax.clabel(cs) @@ -948,10 +953,41 @@ def equilibrium_CX_topview(ods, time_index=None, time=None, ax=None, **kw): 'phi_norm': '$\\phi$', 'q': '$q$', } +def plot_1d_equilbrium_quantity(ax, x, y, xlabel, ylabel, title, visible_x = True, **kw): + from matplotlib import pyplot + ax.plot(x, y, **kw) + if visible_x: + ax.set_xlabel(xlabel) + else: + pyplot.setp(ax.get_xticklabels(), visible=False) + ax.set_ylabel(ylabel) + ax.set_title(title) + +@add_to__ODS__ +def equilibrium_quality(ods, fig=None, **kw): + """ + Plot equilibrium convergence error and total Chi-squared as a function of time + + :param ods: input ods + + :param fig: figure to plot in (a new figure is generated if `fig is None`) + """ + from matplotlib import pyplot + axs = kw.pop('ax', {}) + if axs is None: + axs = {} + if not len(axs) and fig is None: + fig = pyplot.figure() + + ax1 = cached_add_subplot(fig, axs, 1, 2, 1) + ax2 = cached_add_subplot(fig, axs, 1, 2, 2, sharex=ax1) + + ax1.plot(ods['equilibrium.time'], ods['equilibrium.time_slice[:].constraints.chi_squared_total']) + ax2.plot(ods['equilibrium.time'], ods['equilibrium.time_slice[:].convergence.grad_shafranov_deviation_value']) @add_to__ODS__ -def equilibrium_summary(ods, time_index=None, time=None, fig=None, ggd_points_triangles=None, **kw): +def equilibrium_summary(ods, time_index=None, time=None, fig=None, ggd_points_triangles=None, omas_viewer=False, **kw): """ Plot equilibrium cross-section and P, q, P', FF' profiles as per `ods['equilibrium']['time_slice'][time_index]` @@ -998,7 +1034,6 @@ def equilibrium_summary(ods, time_index=None, time=None, fig=None, ggd_points_tr return ods_time_plot( equilibrium_summary, ods, time_index, time, fig=fig, ggd_points_triangles=ggd_points_triangles, ax=axs, **kw ) - ax = cached_add_subplot(fig, axs, 1, 3, 1) contour_quantity = kw.pop('contour_quantity', 'rho_tor_norm') tmp = equilibrium_CX( @@ -1010,25 +1045,32 @@ def equilibrium_summary(ods, time_index=None, time=None, fig=None, ggd_points_tr if tmp['contour_quantity'] in eq['profiles_1d']: raw_xName = tmp['contour_quantity'] x = eq['profiles_1d'][raw_xName] + xName = nice_names.get(raw_xName, raw_xName) else: raw_xName = 'psi' - x = eq['profiles_1d']['psi_norm'] - x = (x - min(x)) / (max(x) - min(x)) - xName = nice_names.get(raw_xName, raw_xName) + x = ((eq['profiles_1d']['psi'] - eq['global_quantities']['psi_axis']) + / ( eq['global_quantities']['psi_boundary'] - eq['global_quantities']['psi_axis'])) + xName = r"$\Psi_\mathrm{n}$" # pressure ax = cached_add_subplot(fig, axs, 2, 3, 2) - ax.plot(x, eq['profiles_1d']['pressure'], **kw) + if omas_viewer: + ax.plot(-ods[f"equilibrium.code.parameters.time_slice.{time_index}.in1.rpress"], + ods[f"equilibrium.code.parameters.time_slice.{time_index}.in1.pressr"]/1.e3, ".r") + plot_1d_equilbrium_quantity(ax, x, eq['profiles_1d']['pressure'] * 1.e-3, + xName, r"$p$ [kPa]", r'$\,$ Pressure', + visible_x=omas_viewer, **kw) kw.setdefault('color', ax.lines[-1].get_color()) - ax.set_title(r'$\,$ Pressure') - ax.ticklabel_format(style='sci', scilimits=(-1, 2), axis='y') - pyplot.setp(ax.get_xticklabels(), visible=False) # q - ax = cached_add_subplot(fig, axs, 2, 3, 3, sharex=ax) - ax.plot(x, eq['profiles_1d']['q'], **kw) - ax.set_title('$q$ Safety factor') - ax.ticklabel_format(style='sci', scilimits=(-1, 2), axis='y') + if omas_viewer: + ax = cached_add_subplot(fig, axs, 2, 3, 5, sharex=ax) + else: + ax = cached_add_subplot(fig, axs, 2, 3, 3, sharex=ax) + plot_1d_equilbrium_quantity(ax, x, numpy.abs(eq['profiles_1d']['q']), + xName, r'$q$ Safety factor', r'$q$ Safety factor', + visible_x=omas_viewer, **kw) + #ax.ticklabel_format(style='sci', scilimits=(-1, 2), axis='y') if 'label' in kw: leg = ax.legend(loc=0) import matplotlib @@ -1037,28 +1079,65 @@ def equilibrium_summary(ods, time_index=None, time=None, fig=None, ggd_points_tr leg.set_draggable(True) else: leg.draggable(True) - pyplot.setp(ax.get_xticklabels(), visible=False) - - # dP_dpsi - ax = cached_add_subplot(fig, axs, 2, 3, 5, sharex=ax) - ax.plot(x, eq['profiles_1d']['dpressure_dpsi'], **kw) - ax.set_title(r"$P\,^\prime$ source function") - ax.ticklabel_format(style='sci', scilimits=(-1, 2), axis='y') - pyplot.xlabel(xName) - - # FdF_dpsi - ax = cached_add_subplot(fig, axs, 2, 3, 6, sharex=ax) - ax.plot(x, eq['profiles_1d']['f_df_dpsi'], **kw) - ax.set_title(r"$FF\,^\prime$ source function") - ax.ticklabel_format(style='sci', scilimits=(-1, 2), axis='y') - pyplot.xlabel(xName) - + if not omas_viewer: + pyplot.setp(ax.get_xticklabels(), visible=False) + if omas_viewer: + ax = cached_add_subplot(fig, axs, 2, 3, 3, sharex=ax) + ax.plot(ods[f"equilibrium.code.parameters.time_slice.{time_index}.inwant.sizeroj"], + ods[f"equilibrium.code.parameters.time_slice.{time_index}.inwant.vzeroj"] / 1.e6, ".r") + plot_1d_equilbrium_quantity(ax, x, eq['profiles_1d']['j_tor']/1.e6, + xName, r"$\langle j_\mathrm{tor} / R \rangle$ [MA m$^{-2}$]", + r"$j_\mathrm{tor}$", + visible_x=omas_viewer, **kw) + else: + ax = cached_add_subplot(fig, axs, 2, 3, 5, sharex=ax) + plot_1d_equilbrium_quantity(ax, x, eq['profiles_1d']['dpressure_dpsi'] * 1.e-3, + xName, r'$P\,^\prime$ [kPa Wb$^{-1}$]', + r"$P\,^\prime$ source function", + visible_x=True, **kw) if raw_xName.endswith('norm'): ax.set_xlim([0, 1]) - + if omas_viewer: + ax = cached_add_subplot(fig, axs, 2, 3, 6) + ax = cached_add_subplot(fig, axs, 2, 3, 6) + # ax.plot(eq['profiles_1d']['convergence']['iteration'], + # eq['profiles_1d']['convergence']['grad_shafranov_deviation_value'], **kw) + diag_chi_2 = [] + labels = [] + try: + diag_chi_2 += list(eq[f'constraints.pf_current[:].chi_squared'].flatten()) + for i in range(len(diag_chi_2)): + labels.append("PF coil " + ods[f'pf_active.coil[[{i}].identifier']) + except: + printd("Failed to find pf_active chi^2. Skipping pf_active in chi^2 plot.") + for constraint, imas_mangetics_id, nice_label in zip(["flux_loop", "bpol_probe"], + ["flux_loop", "b_field_pol_probe"], + ["Flux loop ", r"$B_\mathrm{pol}$ Probe "]): + chi_2 = list(eq[f'constraints.{constraint}[:].chi_squared']) + for i in range(len(chi_2)): + labels.append(nice_label + ods[f'magnetics.{imas_mangetics_id}[{i}].identifier']) + diag_chi_2 += chi_2 + indices = numpy.array(range(len(diag_chi_2))) + 1 + plot_1d_equilbrium_quantity(ax, indices, diag_chi_2, + r"Diagnostic #", r"$\chi^2$ convergence", + r"Magnetics $\chi^2$", + visible_x=True, marker="+", linestyle='', **kw) + for i_label, label in enumerate(labels): + annot = ax.annotate(label, xy=(indices[i_label],diag_chi_2[i_label]), + xytext=(20,20),textcoords="offset points", + bbox=dict(boxstyle="round", fc="w"), + arrowprops=dict(arrowstyle="->")) + annot.set_visible(False) + else: + ax = cached_add_subplot(fig, axs, 2, 3, 6, sharex=ax) + ax = cached_add_subplot(fig, axs, 2, 3, 6, sharex=ax) + # FdF_dpsi + plot_1d_equilbrium_quantity(ax, x, eq['profiles_1d']['f_df_dpsi'], + xName, r'$FF\,^\prime$ [T$^2$ m$^2$ Wb$^{-1}$]', + r"$FF\,^\prime$ source function", + visible_x=True, **kw) return {'ax': axs} - @add_to__ODS__ def core_profiles_currents_summary(ods, time_index=None, time=None, ax=None, **kw): """ @@ -1110,9 +1189,10 @@ def core_profiles_currents_summary(ods, time_index=None, time=None, ax=None, **k ax.set_xlabel(r'$\rho_{tor}$') return {'ax': ax} - @add_to__ODS__ -def core_profiles_summary(ods, time_index=None, time=None, fig=None, ods_species=None, quantities=['density_thermal', 'temperature'], **kw): +def core_profiles_summary(ods, time_index=None, time=None, fig=None, + ods_species=None, quantities=['density_thermal', 'temperature'], + x_axis = "rho_tor_norm", **kw): """ Plot densities and temperature profiles for electrons and all ion species as per `ods['core_profiles']['profiles_1d'][time_index]` @@ -1139,7 +1219,7 @@ def core_profiles_summary(ods, time_index=None, time=None, fig=None, ods_species """ from matplotlib import pyplot - + from omas.omas_physics import get_plot_scale_and_unit axs = kw.pop('ax', {}) if axs is None: axs = {} @@ -1157,16 +1237,29 @@ def core_profiles_summary(ods, time_index=None, time=None, fig=None, ods_species ) prof1d = ods['core_profiles']['profiles_1d'][time_index] - rho = prof1d['grid.rho_tor_norm'] - + if x_axis == "psi_norm": + x = prof1d['grid.rho_pol_norm']**2 + x_label = r"$\Psi_\mathrm{n}$" + else: + x = prof1d[f'grid.{x_axis}'] + if "tor" in x_axis: + x_label = r'$\rho$' + elif "pol" in x_axis: + x_label = r'$\rho_\mathrm{pol}$' # Determine subplot rows x colls if ods_species is None: ncols = len(prof1d['ion']) + 1 ods_species = [-1] + list(prof1d['ion']) else: ncols = len(ods_species) - - nplots = sum([ncols if 'density' in i or 'temperature' in i else 1 for i in quantities]) + nplots = 0 + for quant in quantities: + if 'density' in quant or 'temperature' in quant: + nplots += ncols + elif 'velocity' in quant: + nplots += ncols - 1 + else: + nplots += 1 nrows = int(numpy.ceil(nplots / ncols)) # Generate species with corresponding name @@ -1177,54 +1270,114 @@ def core_profiles_summary(ods, time_index=None, time=None, fig=None, ods_species label_name = [] label_name_z = [] unit_list = [] + data_list = [] for q in quantities: - if 'density' in q or 'temperature' in q: + if 'density' in q or 'temperature' in q or "velocity.toroidal" in q : for index, specie in enumerate(species_in_tree): - unit_list.append(omas_info_node(o2u(f"core_profiles.profiles_1d.0.{specie}.{q}"))['units']) + #unit_list.append(omas_info_node(o2u(f"core_profiles.profiles_1d.0.{specie}.{q}"))['units']) if q in prof1d[specie]: - if 'density' in q and 'ion' in specie and prof1d[specie]['element[0].z_n'] != 1.0: - plotting_list.append(prof1d[specie][q] * prof1d[specie]['element[0].z_n']) - label_name_z.append(r'$\times$' + f" {int(prof1d[specie]['element[0].z_n'])}") + if "label" in prof1d[specie]: + scale, unit = get_plot_scale_and_unit(q, prof1d[specie]["label"]) + else: + scale, unit = get_plot_scale_and_unit(q) + unit_list.append(unit) + # if 'density' in q and 'ion' in specie and prof1d[specie]['element[0].z_n'] != 1.0: + # plotting_list.append(prof1d[specie][q]*scale * prof1d[specie]['element[0].z_n']) + # label_name_z.append(r'$\times$' + f" {int(prof1d[specie]['element[0].z_n'])}") + # else: + + if q + "_error_upper" in prof1d[specie] and len(prof1d[specie][q]) == len(prof1d[specie][q + "_error_upper"]): + plotting_list.append(unumpy.uarray(prof1d[specie][q]*scale, + prof1d[specie][q + "_error_upper"]*scale)) else: - plotting_list.append(prof1d[specie][q]) - label_name_z.append("") + plotting_list.append(prof1d[specie][q]*scale) + if x_axis == "psi_norm": + try: + data_list.append([prof1d[specie][q + "_fit.psi_norm"], + prof1d[specie][q + "_fit.measured"]*scale, + prof1d[specie][q + "_fit.measured_error_upper"]*scale]) + except Exception as e: + data_list.append(None) + else: + data_list.append(None) + label_name_z.append("") label_name.append(f'{names[index]} {q.capitalize()}') - - else: - plotting_list.append(numpy.zeros(len(rho))) - - else: + elif "e_field.radial" not in q: unit_list.append(omas_info_node(o2u(f"core_profiles.profiles_1d.0.{q}"))['units']) plotting_list.append(prof1d[q]) label_name.append(q.capitalize()) - + data_list.append(None) + if "e_field.radial" in quantities: + try: + scale, unit = get_plot_scale_and_unit("e_field.radial") + unit_list.append(unit) + plotting_list.append(prof1d["e_field.radial"]*scale) + label_name_z.append("") + label_name.append('e_field.radial') + data_list.append(None) + except: + pass + last_quant = None for index, y in enumerate(plotting_list): + if index >= len(label_name): + break plot = index + 1 - if index % ncols == 0: + # if index % ncols == 0: + # sharey = None + # sharex = None + # el + if index == 0: sharey = None sharex = None - elif 'Density' in label_name[index] or 'Temperature' in label_name[index]: - sharey = ax - sharex = ax + try: + if last_quant.split(" ")[-1] == label_name[index].split(" ")[-1]: + sharex = ax + sharey = ax + else: + sharex = ax + sharey = None + except: + sharex = None + sharey = None + last_quant = label_name[index] ax = cached_add_subplot(fig, axs, nrows, ncols, plot, sharex=sharex, sharey=sharey) - - uband(rho, y, ax=ax, **kw) + if data_list[index] is not None: + mask = numpy.ones(data_list[index][0].shape, dtype=bool) + # Remove NaNs + for j in range(3): + mask[numpy.isnan(data_list[index][j])] = False + # Remove measuremetns with 100% or more uncertainty + x_data = data_list[index][0][mask] + y_data = data_list[index][1][mask] + y_data_err = data_list[index][2][mask] + mask = mask[mask] + mask[numpy.abs(y_data_err[mask]) > numpy.abs(y_data[mask])] = False + if numpy.any(mask): + ax.errorbar(x_data[mask], y_data[mask], y_data_err[mask], + linestyle='', marker=".", color=(1.0, 0.0, 0.0, 0.3), zorder=-10, **kw) + uband(x, y, ax=ax, **kw) + + species_label = label_name[index].split()[0] + species_label = species_label.replace("electron", "e") if "Temp" in label_name[index]: - ax.set_ylabel(r'$T_{{{}}}$'.format(label_name[index].split()[0]) + imas_units_to_latex(unit_list[index])) + ax.set_ylabel(r'$T_{{{}}}$'.format(species_label) + imas_units_to_latex(unit_list[index])) elif "Density" in label_name[index]: - ax.set_ylabel(r'$n_{{{}}}$'.format(label_name[index].split()[0]) + imas_units_to_latex(unit_list[index]) + label_name_z[index]) + ax.set_ylabel(r'$n_{{{}}}$'.format(species_label) + imas_units_to_latex(unit_list[index]) + label_name_z[index]) + elif "e_field" in label_name[index].lower(): + ax.set_ylabel(r'$E_\mathrm{r}$' + imas_units_to_latex(unit_list[index])) + elif "Velocity" in label_name[index]: + ax.set_ylabel(r"$v_\mathrm{" + species_label[0] + r"}$" + imas_units_to_latex(unit_list[index])) else: ax.set_ylabel(label_name[index][:10] + imas_units_to_latex(unit_list[index])) if (nplots - plot) < ncols: - ax.set_xlabel('$\\rho$') + ax.set_xlabel(x_label) + if 'label' in kw: ax.legend(loc='lower center') - ax.set_xlim([0, 1]) - + ax.set_xlim(0, 1) return {'ax': axs, 'fig': fig} - @add_to__ODS__ def core_profiles_pressures(ods, time_index=None, time=None, ax=None, **kw): """ diff --git a/omas/utilities/machine_mapping_decorator.py b/omas/utilities/machine_mapping_decorator.py new file mode 100644 index 000000000..9ee4ae395 --- /dev/null +++ b/omas/utilities/machine_mapping_decorator.py @@ -0,0 +1,66 @@ +from omas.omas_setup import omas_git_repo +import os +import functools +import numpy +from omas.omas_core import o2u +__all__ = [ + 'machine_mapping_function' +] + + +# =================== +# machine mapping functions +# =================== +def machine_mapping_function(__regression_arguments__, **regression_args): + """ + Decorator used to identify machine mapping functions + + :param \**regression_args: arguments used to run regression test + + NOTE: use `inspect.unwrap(function)` to call a function decorated with `@machine_mapping_function` + from another function decorated with `@machine_mapping_function` + """ + + __all__ = __regression_arguments__['__all__'] + + def machine_mapping_decorator(f, __all__): + __all__.append(f.__name__) + if __regression_arguments__ is not None: + __regression_arguments__[f.__name__] = regression_args + + @functools.wraps(f) + def machine_mapping_caller(*args, **kwargs): + clean_ods = True + if len(args[0]): + clean_ods = False + if clean_ods and omas_git_repo: + import inspect + + # figure out the machine name from where the function `f` is defined + machine = os.path.splitext(os.path.split(inspect.getfile(f))[1])[0] + if machine == '': # if `f` is called via exec then we need to look at the call stack to figure out the machine name + machine = os.path.splitext(os.path.split(inspect.getframeinfo(inspect.currentframe().f_back)[0])[1])[0] + + # call signature + argspec = inspect.getfullargspec(f) + f_args_str = ", ".join('{%s!r}' % item for item in argspec.args if not item.startswith('_')) + # f_args_str = ", ".join(item + '={%s!r}' % item for item in argspec.args if not item.startswith('_')) # to use keywords arguments + call = f"{f.__qualname__}({f_args_str})".replace('{ods!r}', 'ods').replace('{pulse!r}', '{pulse}') + default_options = None + if argspec.defaults: + default_options = dict(zip(argspec.args[::-1], argspec.defaults[::-1])) + default_options = {item: value for item, value in default_options.items() if not item.startswith('_')} + + # call + update_mapping = kwargs.pop("update_callback") + out = f(*args, **kwargs) + #update mappings definitions + if clean_ods and omas_git_repo: + for ulocation in numpy.unique(list(map(o2u, args[0].flat().keys()))): + update_mapping(machine, ulocation, {'PYTHON': call}, 11, default_options, update_path=True) + + return out + + return machine_mapping_caller + + return lambda f: machine_mapping_decorator(f, __all__) diff --git a/omas/utilities/omas_mds.py b/omas/utilities/omas_mds.py new file mode 100644 index 000000000..07a4cdfd6 --- /dev/null +++ b/omas/utilities/omas_mds.py @@ -0,0 +1,216 @@ +import json +import os +from omas.omas_utils import printd + +__all__ = [ + 'mdstree', + 'mdsvalue' +] + +_mds_connection_cache = {} + +# =================== +# MDS+ functions +# =================== +def tunnel_mds(server, treename): + """ + Resolve MDS+ server + NOTE: This function makes use of the optional `omfit_classes` dependency to establish a SSH tunnel to the MDS+ server. + + :param server: MDS+ server address:port + + :param treename: treename (in case treename affects server to be used) + + :return: string with MDS+ server and port to be used + """ + try: + import omfit_classes.omfit_mds + except (ImportError, ModuleNotFoundError): + return server.format(**os.environ) + else: + server0 = omfit_classes.omfit_mds.translate_MDSserver(server, treename) + tunneled_server = omfit_classes.omfit_mds.tunneled_MDSserver(server0, quiet=False) + return tunneled_server + + return server.format(**os.environ) + + + + + +class mdsvalue(dict): + """ + Execute MDS+ TDI functions + """ + + def __init__(self, server, treename, pulse, TDI, old_MDS_server=False): + self.treename = treename + self.pulse = pulse + self.TDI = TDI + if 'nstx' in server: + old_MDS_server = True + try: + # handle the case that server is just the machine name + machine_mappings_path = os.path.join(os.path.dirname(__file__), "../", "machine_mappings") + machine_mappings_path = os.path.join(machine_mappings_path, server + ".json") + with open(machine_mappings_path, "r") as machine_file: + server = json.load(machine_file)["__mdsserver__"] + except Exception: + # hanlde case where server is actually a URL + if '.' not in server: + raise + self.server = tunnel_mds(server, self.treename) + old_servers = ['skylark.pppl.gov:8500', 'skylark.pppl.gov:8501', 'skylark.pppl.gov:8000'] + if server in old_servers or self.server in old_servers: + old_MDS_server = True + self.old_MDS_server = old_MDS_server + + def data(self): + return self.raw(f'data({self.TDI})') + + def dim_of(self, dim): + return self.raw(f'dim_of({self.TDI},{dim})') + + def units(self): + return self.raw(f'units({self.TDI})') + + def error(self): + return self.raw(f'error({self.TDI})') + + def error_dim_of(self, dim): + return self.raw(f'error_dim_of({self.TDI},{dim})') + + def units_dim_of(self, dim): + return self.raw(f'units_dim_of({self.TDI},{dim})') + + def size(self, dim): + return self.raw(f'size({self.TDI})') + + def raw(self, TDI=None): + """ + Fetch data from MDS+ with connection caching + + :param TDI: string, list or dict of strings + MDS+ TDI expression(s) (overrides the one passed when the object was instantiated) + + :return: result of TDI expression, or dictionary with results of TDI expressions + """ + try: + import time + + t0 = time.time() + import MDSplus + + def mdsk(value): + """ + Translate strings to MDS+ bytes + """ + return str(str(value).encode('utf8')) + + if TDI is None: + TDI = self.TDI + + try: + out_results = None + + # try connecting and re-try on fail + for fallback in [0, 1]: + if (self.server, self.treename, self.pulse) not in _mds_connection_cache: + conn = MDSplus.Connection(self.server) + if self.treename is not None: + conn.openTree(self.treename, self.pulse) + _mds_connection_cache[(self.server, self.treename, self.pulse)] = conn + try: + conn = _mds_connection_cache[(self.server, self.treename, self.pulse)] + break + except Exception as _excp: + if (self.server, self.treename, self.pulse) in _mds_connection_cache: + del _mds_connection_cache[(self.server, self.treename, self.pulse)] + if fallback: + raise + + # list of TDI expressions + if isinstance(TDI, (list, tuple)): + TDI = {expr: expr for expr in TDI} + + # dictionary of TDI expressions + if isinstance(TDI, dict): + # old versions of MDS+ server do not support getMany + if self.old_MDS_server: + results = {} + for tdi in TDI: + try: + results[tdi] = mdsvalue(self.server, self.treename, self.pulse, TDI[tdi]).raw() + except Exception as _excp: + results[tdi] = Exception(str(_excp)) + out_results = results + + # more recent MDS+ server + else: + conns = conn.getMany() + for name, expr in TDI.items(): + conns.append(name, expr) + res = conns.execute() + results = {} + for name, expr in TDI.items(): + try: + results[name] = MDSplus.Data.data(res[mdsk(name)][mdsk('value')]) + except KeyError: + try: + results[name] = MDSplus.Data.data(res[str(name)][str('value')]) + except KeyError: + try: + results[name] = Exception(MDSplus.Data.data(res[mdsk(name)][mdsk('error')])) + except KeyError: + results[name] = Exception(MDSplus.Data.data(res[str(name)][str('error')])) + out_results = results + + # single TDI expression + else: + out_results = MDSplus.Data.data(conn.get(TDI)) + + # return values + return out_results + + except Exception as _excp: + txt = [] + for item in ['server', 'treename', 'pulse']: + txt += [f' - {item}: {getattr(self, item)}'] + txt += [f' - TDI: {TDI}'] + raise _excp.__class__(str(_excp) + '\n' + '\n'.join(txt)) + + finally: + if out_results is not None: + if isinstance(out_results, dict): + if all(isinstance(out_results[k], Exception) for k in out_results): + printd(f'{TDI} \tall NO\t {time.time() - t0:3.3f} secs', topic='machine') + elif any(isinstance(out_results[k], Exception) for k in out_results): + printd(f'{TDI} \tsome OK/NO\t {time.time() - t0:3.3f} secs', topic='machine') + else: + printd(f'{TDI} \tall OK\t {time.time() - t0:3.3f} secs', topic='machine') + else: + printd(f'{TDI} \tOK\t {time.time() - t0:3.3f} secs', topic='machine') + else: + printd(f'{TDI} \tNO\t {time.time() - t0:3.3f} secs', topic='machine') + +class mdstree(dict): + """ + Class to handle the structure of an MDS+ tree. + Nodes in this tree are mdsvalue objects + """ + + def __init__(self, server, treename, pulse): + for TDI in sorted(mdsvalue(server, treename, pulse, rf'getnci("***","FULLPATH")').raw())[::-1]: + try: + TDI = TDI.decode('utf8') + except AttributeError: + pass + TDI = TDI.strip() + path = TDI.replace('::TOP', '').lstrip('\\').replace(':', '.').split('.') + h = self + for p in path[1:-1]: + h = h.setdefault(p, mdsvalue(server, treename, pulse, '')) + if path[-1] not in h: + h[path[-1]] = mdsvalue(server, treename, pulse, TDI) + else: + h[path[-1]].TDI = TDI \ No newline at end of file diff --git a/sphinx/source/how.rst b/sphinx/source/how.rst index 90e20e73a..cf9293df7 100644 --- a/sphinx/source/how.rst +++ b/sphinx/source/how.rst @@ -102,7 +102,7 @@ The `ODS` class extends native Python dictionary and list classes with: .. code-block:: python - ods['core_profiles.profiles_1d.0.electrons.density_thermal'].xarray() + ods['core_profiles.profiles_1d.0.electrons.densdensity_thermality'].xarray() 14. Conveniently **plot individual quantities**: