Skip to content

Commit

Permalink
Fix missing borehole GLTF output
Browse files Browse the repository at this point in the history
  • Loading branch information
vjf committed Nov 6, 2024
1 parent 726ef99 commit 88c14cc
Show file tree
Hide file tree
Showing 4 changed files with 87 additions and 1,086 deletions.
2 changes: 1 addition & 1 deletion scripts/lib/exports/bh_make.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ def get_blob_boreholes(borehole_dict, model_param_dict):
gltf_kit = GltfKit(LOG_LVL)
blob_obj = gltf_kit.write_borehole(base_xyz, borehole_dict['name'],
bh_data_dict, height_res, '')
LOGGER.debug(f"Returning: blob_obj = {blob_obj}")
LOGGER.debug(f"Returning: {blob_obj=}")
return blob_obj

LOGGER.debug("No borehole data")
Expand Down
10 changes: 6 additions & 4 deletions scripts/lib/exports/gltf_kit.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
np.set_printoptions(threshold=sys.maxsize)

import pygltflib
from pygltflib import BufferFormat

from lib.exports.geometry_gen import colour_borehole_gen, tri_gen
from lib.exports.export_kit import ExportKit
Expand Down Expand Up @@ -207,10 +208,11 @@ def end_scene(self, out_filename):
materials=self.materials,
)
gltf.set_binary_blob(self.binary_blob)

#glb = b"".join(gltf.save_to_bytes()) # save_to_bytes returns an array of the components of a glb
#gltf.convert_buffers(BufferFormat.BINFILE) # convert buffers to files
gltf.save(out_filename + ".gltf") # glb or gltf all the buffers are saved in 0.bin, 1.bin, 2.bin.
gltf.convert_buffers(BufferFormat.DATAURI) # Save buffers inside GLTF
if out_filename != '':
gltf.save(out_filename + ".gltf")
return True
return gltf.gltf_to_json()


def write_borehole(self, base_vrtx, borehole_name, colour_info_dict, height_reso, out_filename=''):
Expand Down
199 changes: 80 additions & 119 deletions scripts/webapi/webapi.py
Original file line number Diff line number Diff line change
Expand Up @@ -116,10 +116,6 @@
''' Name of our 3DPS layer
'''

GLTF_REQ_NAME = '$blobfile.bin'
''' Name of the binary file holding GLTF data
'''

G_PARAM_DICT = {}
''' Stores the models' conversion parameters, key: model name
'''
Expand Down Expand Up @@ -202,50 +198,6 @@ def get_cached_dict_list(model, param_dict, wfs_dict):



def cache_blob(model, blob_id, blob, blob_sz, exp_timeout=None):
'''
Cache a GLTF blob and its size
:param model: name of model, string
:param blob_id: blob id string, must be unique within each model
:param blob: binary string
:param size of blob
:param exp_timeout cache expiry timeout, float, in seconds
:returns: True if blob was added to cache, false if it wasn't added
'''
try:
with Cache(CACHE_DIR) as cache_obj:
blob_key = 'blob|' + model + '|' + blob_id
return cache_obj.set(blob_key, (blob, blob_sz), expire=exp_timeout)

except OSError as os_exc:
LOGGER.error(f"Cannot cache blob {os_exc}")
return False
except Timeout as t_exc:
LOGGER.error(f"DB Timeout, cannot get cached dict list: {t_exc}")
return False



def get_cached_blob(model, blob_id):
'''
Get blob from cache
:param model: name of model, string
:param blob_id: blob id string, must be unique within each model
:returns: a GLTF blob (binary string) and its size
'''
try:
with Cache(CACHE_DIR) as cache_obj:
blob_key = 'blob|' + model + '|' + blob_id
blob, blob_sz = cache_obj.get(blob_key, (None, 0))
return blob, blob_sz

except OSError as os_exc:
LOGGER.error(f"Cannot get cached blob {os_exc}")
return (None, 0)


def get_cached_parameters():
'''
Creates dictionaries to store model parameters and WFS services
Expand Down Expand Up @@ -550,85 +502,26 @@ def make_getresourcebyid_response(model, version, output_format, res_id, param_d
borehole_dict = model_bh_dict.get(res_id, None)
if borehole_dict is not None:
# Get blob from cache
blob = get_blob_boreholes(borehole_dict, param_dict[model])
# Some boreholes do not have the requested metric
if blob is not None:
return send_blob(model, res_id, blob)
LOGGER.debug('Empty GLTF blob')
gltf_str = get_blob_boreholes(borehole_dict, param_dict[model])
return send_blob(gltf_str)
else:
LOGGER.debug('Resource not found in borehole dict')

return make_str_response('{}')



def send_blob(model, blob_id, blob, exp_timeout=None):
def send_blob(gltf_str):
''' Returns a blob in response
:param model: name of model (string)
:param blob_id: unique id string for blob, used for caching
:param blob: blob object
:param exp_timeout: cache expiry timeout, float, in seconds
:param gltf_str: blob object
:returns: a binary file response
'''
LOGGER.debug('got blob %s', str(blob))
gltf_bytes = b''
# There are 2 files in the blob, a GLTF file and a .bin file
# pylint: disable=W0612
for idx in range(2):
LOGGER.debug(f"{blob.contents.name.data=}")
LOGGER.debug(f"{blob.contents.size=}")
LOGGER.debug(f"{blob.contents.data=}")
# Look for the GLTF file
if not blob.contents.name.data:
# Convert to byte array
bcd = ctypes.cast(blob.contents.data, ctypes.POINTER(blob.contents.size \
* ctypes.c_char))
bcd_bytes = b''
for bitt in bcd.contents:
bcd_bytes += bitt
bcd_str = bcd_bytes.decode('utf-8', 'ignore')
LOGGER.debug(f"{bcd_str[:80]}")
try:
# Convert to json
gltf_json = json.loads(bcd_str)
LOGGER.debug(f"{gltf_json=}")
except JSONDecodeError as jde_exc:
LOGGER.debug(f"JSONDecodeError loads(): {jde_exc}")
else:
try:
# This modifies the URL of the .bin file associated with the GLTF file
# Inserting model name and resource id as a parameter so we can tell
# the .bin files apart
gltf_json["buffers"][0]["uri"] = model + '/' + \
gltf_json["buffers"][0]["uri"] + "?id=" + blob_id

# Convert back to bytes and send
gltf_str = json.dumps(gltf_json)
gltf_bytes = bytes(gltf_str, 'utf-8')
except JSONDecodeError as jde_exc:
LOGGER.debug(f"JSONDecodeError dumps(): {jde_exc}")

# Binary file (.bin)
elif blob.contents.name.data == b'bin':
# Convert to byte array
bcd = ctypes.cast(blob.contents.data,
ctypes.POINTER(blob.contents.size * ctypes.c_char))
bcd_bytes = b''
for bitt in bcd.contents:
bcd_bytes += bitt
cache_blob(model, blob_id, bcd_bytes, blob.contents.size, exp_timeout)


blob = blob.contents.next
if gltf_bytes == b'':
LOGGER.debug('GLTF not found in blob')
else:
with tempfile.NamedTemporaryFile(mode="w+b", suffix=".gltf", delete=False) as fp:
fp.write(gltf_bytes)
return FileResponse(fp.name, media_type="model/gltf+json;charset=UTF-8")

return make_str_response('{}')
LOGGER.debug(f"Got GLTF bytes {gltf_str}")
with tempfile.NamedTemporaryFile(mode="w", suffix=".gltf", delete=False) as fp:
fp.write(gltf_str)
LOGGER.debug("Created temp file, returning it")
return FileResponse(fp.name, media_type="model/gltf+json;charset=UTF-8")


def make_getpropvalue_response(model, version, output_format, type_name, value_ref, param_dict, wfs_dict):
Expand Down Expand Up @@ -695,10 +588,78 @@ def convert_gocad2gltf(model, id_str, gocad_list):
gltf_kit = GltfKit(DEBUG_LVL)
gltf_kit.start_scene()
gltf_kit.add_geom(geom_obj, style_obj, metadata_obj)
blob_obj = gltf_kit.end_scene("")
return send_blob(model, 'drag_and_drop_'+id_str, blob_obj, 60.0)
gltf_bytes = gltf_kit.end_scene("")
return send_blob(gltf_bytes)
return make_str_response(' ')

def send_assimp_blob(model, blob_id, blob, exp_timeout=None):
''' Returns a blob in response
:param model: name of model (string)
:param blob_id: unique id string for blob, used for caching
:param blob: blob object
:param exp_timeout: cache expiry timeout, float, in seconds
:returns: a binary file response
'''
LOGGER.debug('got blob %s', str(blob))
gltf_bytes = b''
# There are 2 files in the blob, a GLTF file and a .bin file
# pylint: disable=W0612
for idx in range(2):
LOGGER.debug(f"{blob.contents.name.data=}")
LOGGER.debug(f"{blob.contents.size=}")
LOGGER.debug(f"{blob.contents.data=}")
# Look for the GLTF file
if not blob.contents.name.data:
# Convert to byte array
bcd = ctypes.cast(blob.contents.data, ctypes.POINTER(blob.contents.size \
* ctypes.c_char))
bcd_bytes = b''
for bitt in bcd.contents:
bcd_bytes += bitt
bcd_str = bcd_bytes.decode('utf-8', 'ignore')
LOGGER.debug(f"{bcd_str[:80]}")
try:
# Convert to json
gltf_json = json.loads(bcd_str)
LOGGER.debug(f"{gltf_json=}")
except JSONDecodeError as jde_exc:
LOGGER.debug(f"JSONDecodeError loads(): {jde_exc}")
else:
try:
# This modifies the URL of the .bin file associated with the GLTF file
# Inserting model name and resource id as a parameter so we can tell
# the .bin files apart
gltf_json["buffers"][0]["uri"] = model + '/' + \
gltf_json["buffers"][0]["uri"] + "?id=" + blob_id

# Convert back to bytes and send
gltf_str = json.dumps(gltf_json)
gltf_bytes = bytes(gltf_str, 'utf-8')
except JSONDecodeError as jde_exc:
LOGGER.debug(f"JSONDecodeError dumps(): {jde_exc}")

# Binary file (.bin)
elif blob.contents.name.data == b'bin':
# Convert to byte array
bcd = ctypes.cast(blob.contents.data,
ctypes.POINTER(blob.contents.size * ctypes.c_char))
bcd_bytes = b''
for bitt in bcd.contents:
bcd_bytes += bitt
cache_blob(model, blob_id, bcd_bytes, blob.contents.size, exp_timeout)


blob = blob.contents.next
if gltf_bytes == b'':
LOGGER.debug('GLTF not found in blob')
else:
with tempfile.NamedTemporaryFile(mode="w+b", suffix=".gltf", delete=False) as fp:
fp.write(gltf_bytes)
return FileResponse(fp.name, media_type="model/gltf+json;charset=UTF-8")

return make_str_response('{}')


def convert_gltf2xxx(model, filename, fmt):
'''
Expand Down Expand Up @@ -737,7 +698,7 @@ def convert_gltf2xxx(model, filename, fmt):
LOGGER.error(f"Cannot export {gltf_path}: {ae}")
return make_str_response(' ')

return send_blob(model, 'export_{0}_{1}'.format(model, filename), blob_obj, 60.0)
return send_assimp_blob(model, 'export_{0}_{1}'.format(model, filename), blob_obj, 60.0)



Expand Down
Loading

0 comments on commit 88c14cc

Please sign in to comment.