-
Notifications
You must be signed in to change notification settings - Fork 3
/
Copy pathhelper.py
274 lines (229 loc) · 9.52 KB
/
helper.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
from __future__ import annotations
import errno
import json
import os
import re
from os import path
from typing import List, Set, Protocol, Dict
from model.ResourceQueryingMetaData import ResourceQueryingMetaData
from model.UiDataModel import TermCode
def traverse_tree(result: List[TermCode], node: dict):
"""
Traverse the tree and collect all selectable nodes
:param result: roots of the tree that are selectable
:param node: the current tree node
"""
if children := node.get("children"):
for child in children:
if child.get("selectable"):
result += [TermCode(**termCode) for termCode in child.get("termCodes")]
traverse_tree(result, child)
def get_term_selectable_codes_from_ui_profile(profile: dict) -> Set[TermCode]:
"""
Get all selectable nodes from the ui profile
:param profile: ui profile
:return: set of selectable leaf nodes
"""
result = []
if profile.get("selectable"):
result += [TermCode(**termCode) for termCode in profile.get("termCodes")]
traverse_tree(result, profile)
return set(result)
def to_upper_camel_case(string: str) -> str:
"""
Convert a string to upper camel case
:param string: input string
:return: the string in upper camel case
"""
result = ""
if re.match("([A-Z][a-z0-9]+)+", string) and " " not in string:
return string
for substring in string.split(" "):
result += substring.capitalize()
return result
def download_simplifier_packages(package_names: List[str]):
"""
Downloads the core data set from the MII and saves the profiles in the resources/core_data_sets folder
"""
mkdir_if_not_exists("resources/core_data_sets")
for dataset in package_names:
saved_path = os.getcwd()
os.chdir("resources/core_data_sets")
os.system(f"fhir install {dataset} --here")
os.chdir(saved_path)
def mkdir_if_not_exists(directory: str):
"""
Creates a directory if it does not exist
:param directory: name of the directory
:raises OSError: if the directory could not be created
"""
if not path.isdir(f"./{directory}"):
try:
os.makedirs(directory)
except OSError as e:
if e.errno != errno.EEXIST:
raise
def generate_snapshots(package_dir: str, prerequisite_packages: List[str] = None, reinstall: bool = False):
"""
Generates the snapshots for all the profiles in the package_dir folder and its sub folders
:param prerequisite_packages: list of prerequisite packages
:param package_dir: directory of the package
:param reinstall: if true the required packages will be reinstalled
:raises FileNotFoundError: if the package directory could not be found
:raises NotADirectoryError: if the package directory is not a directory
"""
def install_prerequisites():
if os.path.exists("package.json"):
os.remove("package.json")
os.system("fhir install hl7.fhir.r4.core")
for package in prerequisite_packages:
if os.path.exists("package.json"):
os.remove("package.json")
os.system(f"fhir install {package} --here")
def generate_snapshot():
os.system(f"fhir push {file}")
os.system(f"fhir snapshot")
os.system(f"fhir save {file[:-5]}-snapshot.json")
prerequisite_packages = prerequisite_packages if prerequisite_packages else []
if not os.path.exists(package_dir):
raise FileNotFoundError(f"Package directory does not exist: {package_dir}")
if not os.path.isdir(package_dir):
raise NotADirectoryError("package_dir must be a directory")
saved_path = os.getcwd()
if reinstall or not (os.path.exists("fhirpkg.lock.json") and os.path.exists("package.json")):
install_prerequisites()
# module folders
for folder in [f.path for f in os.scandir(package_dir) if f.is_dir()]:
if folder.endswith("dependencies"):
continue
os.chdir(f"{folder}")
# generates snapshots for all differential in the package if they do not exist
for file in [f for f in os.listdir('.') if
os.path.isfile(f) and is_structure_definition(f) and "-snapshot" not in f
and f[:-5] + "-snapshot.json" not in os.listdir('.')]:
generate_snapshot()
if not os.path.exists("extension"):
os.chdir(saved_path)
continue
os.chdir(f"extension")
for file in [f for f in os.listdir('.') if
os.path.isfile(f) and is_structure_definition(f) and "-snapshot" not in f
and f[:-5] + "-snapshot.json" not in os.listdir('.')]:
generate_snapshot()
os.chdir(saved_path)
def load_querying_meta_data(resource_querying_meta_data_dir: str) -> List[ResourceQueryingMetaData]:
"""
Loads the querying meta data from the querying meta data file
:return: the querying meta data
:raises FileNotFoundError: if the querying meta data directory could not be found
:raises NotADirectoryError: if the querying meta data directory is not a directory
"""
if not os.path.exists(resource_querying_meta_data_dir):
raise FileNotFoundError(f"Resource querying meta data file does not exist: {resource_querying_meta_data_dir}")
if not os.path.isdir(resource_querying_meta_data_dir):
raise NotADirectoryError("resource_querying_meta_data_dir must be a directory")
query_meta_data: List[ResourceQueryingMetaData] = []
for file in [f for f in os.scandir(resource_querying_meta_data_dir)
if os.path.isfile(f.path) and f.name.endswith(".json")]:
with open(file.path, encoding="utf-8") as f:
query_meta_data.append(ResourceQueryingMetaData.from_json(f))
return query_meta_data
def is_structure_definition(file: str) -> bool:
"""
Checks if a file is a structured definition
:param file: potential structured definition
:return: true if the file is a structured definition else false
"""
with open(file, encoding="UTF-8") as json_file:
try:
json_data = json.load(json_file)
except json.decoder.JSONDecodeError:
print(f"Could not decode {file}")
return False
if json_data.get("resourceType") == "StructureDefinition":
return True
return False
class JsonSerializable(Protocol):
def to_json(self) -> str:
...
def write_object_as_json(serializable: JsonSerializable, file_name: str):
"""
Writes a list of objects as json to a file
:param serializable: object that can be serialized to json
:param file_name: name of the file
"""
with open(file_name, "w") as f:
f.write(serializable.to_json())
VALUE_TYPE_TO_FHIR_SEARCH_TYPE = {
"concept": "token",
"quantity": "quantity",
"reference": "reference",
"date": "date"
}
def get_fhir_search_parameters() -> List[dict]:
pass
def flatten(lst) -> List:
"""
Flattens a list of lists with arbitrary depth
:param lst: the list to flatten
:return: the flattened list
"""
if not isinstance(lst, list):
yield lst
else:
for element in lst:
if isinstance(element, list) and not isinstance(element, (str, bytes)):
yield from flatten(element)
else:
yield element
def load_english_to_german_attribute_names() -> Dict[str, str]:
with open("../../resources/english_to_german_attribute_names.json", "r", encoding="utf-8") as f:
attribute_names = json.load(f)
return attribute_names
def generate_attribute_key(element_id: str) -> TermCode:
"""
Generates the attribute key for the given element id
:param element_id: element id
:return: attribute key
"""
if '(' and ')' in element_id:
element_id = element_id[element_id.rfind('(') + 1:element_id.find(')')]
if ':' in element_id:
element_id = element_id.split(':')[-1]
key = element_id.split('.')[0]
else:
key = element_id.split('.')[-1]
display = get_german_display(key)
if not key:
raise ValueError(f"Could not find key for {element_id}")
return TermCode("http://hl7.org/fhir/StructureDefinition", key, display)
def get_german_display(key: str) -> str:
"""
Returns the german display for the given key if it exists else the key itself and creates an entry in the
english_to_german_attribute_names.json
:param key: attribute key
:return: german display or original key
"""
english_to_german_attribute_names = load_english_to_german_attribute_names()
if key not in english_to_german_attribute_names:
english_to_german_attribute_names[key] = key
with open("../../resources/english_to_german_attribute_names.json", "w", encoding="utf-8") as f:
json.dump(english_to_german_attribute_names, f, ensure_ascii=False, indent=4)
return english_to_german_attribute_names.get(key)
def generate_result_folder():
"""
Generates the mapping, csv and ui-profiles folder if they do not exist in the result folder
:return:
"""
mkdir_if_not_exists("mapping")
mkdir_if_not_exists("mapping/fhir")
mkdir_if_not_exists("mapping/cql")
mkdir_if_not_exists("ui-trees")
mkdir_if_not_exists("csv")
mkdir_if_not_exists("ui-profiles")
mkdir_if_not_exists("ui-profiles-old")
mkdir_if_not_exists("mapping-old")
mkdir_if_not_exists("mapping-old/fhir")
mkdir_if_not_exists("mapping-old/cql")
mkdir_if_not_exists("value-sets")
mkdir_if_not_exists("criteria-sets")