From 3ffe96abd3f0128725f0f5a04dcd0074f00999fc Mon Sep 17 00:00:00 2001 From: Didier Vezinet Date: Thu, 12 Sep 2024 10:03:36 -0400 Subject: [PATCH 01/10] [#973] Fixed #972 --- tofu/data/_class8_plot_coverage.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/tofu/data/_class8_plot_coverage.py b/tofu/data/_class8_plot_coverage.py index 283eb7489..7b6aba001 100644 --- a/tofu/data/_class8_plot_coverage.py +++ b/tofu/data/_class8_plot_coverage.py @@ -129,7 +129,10 @@ def _check( lok_vos = [ k0 for k0, v0 in coll.dobj.get('diagnostic', {}).items() - if all([v1.get('dvos') is not None for v1 in v0['doptics'].values()]) + if all([ + v1.get('dvos', {}).get('keym') is not None + for v1 in v0['doptics'].values() + ]) ] lok = [ k0 for k0 in coll.dobj.get('diagnostic', {}).keys() From 26098d8692f4a4e4596bb00b7a512e2b95c2da69 Mon Sep 17 00:00:00 2001 From: Didier Vezinet Date: Thu, 12 Sep 2024 14:21:27 -0400 Subject: [PATCH 02/10] [#973] get_optics_outline() now return outline for 3d polygons, with a warning --- tofu/data/_class8_compute.py | 19 ++++++++++++++++++- 1 file changed, 18 insertions(+), 1 deletion(-) diff --git a/tofu/data/_class8_compute.py b/tofu/data/_class8_compute.py index db9928214..b21118067 100644 --- a/tofu/data/_class8_compute.py +++ b/tofu/data/_class8_compute.py @@ -1,6 +1,7 @@ # -*- coding: utf-8 -*- +import warnings import itertools as itt @@ -51,7 +52,23 @@ def get_optics_outline( # compute if dgeom['type'] == '3d': - return None, None + + msg = ( + "Approximate outline for {cls} '{key}' due to 3d polygon!" + ) + warnings.warn(msg) + + px, py, pz = coll.dobj[cls][key]['dgeom']['poly'] + px = coll.ddata[px]['data'] + py = coll.ddata[py]['data'] + pz = coll.ddata[pz]['data'] + + cx, cy, cz = np.mean([px, py, pz], axis=1) + e0 = coll.dobj[cls][key]['dgeom']['e0'] + e1 = coll.dobj[cls][key]['dgeom']['e1'] + + p0 = (px - cx) * e0[0] + (py - cy) * e0[1] + (pz - cz) * e0[2] + p1 = (px - cx) * e1[0] + (py - cy) * e1[1] + (pz - cz) * e1[2] if cls == 'camera' and total: # get centers From 04940a86cf20ae50ed910e645aafae3806ef0857 Mon Sep 17 00:00:00 2001 From: Didier Vezinet Date: Thu, 12 Sep 2024 14:22:45 -0400 Subject: [PATCH 03/10] [#973] Started implementing random mode of add_rays_from_diagnostic() --- tofu/data/_class08_Diagnostic.py | 22 ++ tofu/data/_class08_generate_rays.py | 393 ++++++++++++++++++++++++++++ 2 files changed, 415 insertions(+) create mode 100644 tofu/data/_class08_generate_rays.py diff --git a/tofu/data/_class08_Diagnostic.py b/tofu/data/_class08_Diagnostic.py index 3fd845725..438b50b1c 100644 --- a/tofu/data/_class08_Diagnostic.py +++ b/tofu/data/_class08_Diagnostic.py @@ -23,6 +23,7 @@ from . import _class8_vos as _vos from . import _class8_vos_spectro_nobin_at_lamb as _vos_nobin_at_lamb from . import _class8_los_angles as _los_angles +from . import _class08_generate_rays as _generate_rays from . import _class8_plane_perp_to_los as _planeperp from . import _class8_compute_signal as _compute_signal from . import _class8_compute_signal_moments as _signal_moments @@ -367,6 +368,27 @@ def compute_diagnostic_solidangle_from_plane( vmax_plane=vmax_plane, ) + # ----------------- + # add rays from diag + # ----------------- + + def add_rays_from_diagnostic( + self, + key=None, + strategy=None, + nrays=None, + ): + return _generate_rays.main( + coll=self, + key=key, + strategy=strategy, + nrays=nrays, + ) + + # ----------------- + # solid angle from plane + # ----------------- + def plot_diagnostic_solidangle_from_plane( self, dout=None, diff --git a/tofu/data/_class08_generate_rays.py b/tofu/data/_class08_generate_rays.py new file mode 100644 index 000000000..85acc86c0 --- /dev/null +++ b/tofu/data/_class08_generate_rays.py @@ -0,0 +1,393 @@ +# -*- coding: utf-8 -*- +""" +Created on Thu Sep 12 10:17:01 2024 + +@author: dvezinet +""" + + +import numpy as np +from matplotlib import path as mpath +import datastock as ds + + +# ############################################################### +# ############################################################### +# Main +# ############################################################### + + +def main( + coll=None, + key=None, + strategy=None, + nrays=None, + # storing + store=None, +): + + # ------------- + # check + # ------------- + + key, strategy, nrays, store = _check( + coll=coll, + key=key, + strategy=strategy, + nrays=nrays, + # storing + store=store, + ) + + # --------------- + # prepare + # --------------- + + wdiag = 'diagnostic' + key_cam = coll.dobj[wdiag][key]['camera'] + doptics = coll.dobj[wdiag][key]['doptics'] + + # ------------------------- + # trivial: nrays=1 => LOS + # ------------------------- + + if nrays == 1: + + dout = { + kcam: {} + for kcam in key_cam + } + + store = False + + # --------------- + # compute + # --------------- + + # initialize + dout = {} + + # loop on cameras + for kcam in key_cam: + + # -------------- + # prepare pixel outline + + out0, out1 = coll.dobj['camera'][kcam]['dgeom']['outline'] + out0 = coll.ddata[out0]['data'] + out1 = coll.ddata[out1]['data'] + out_arr = np.array([out0, out1]).T + + # --------------- + # call routine + + if strategy == 'random': + + dout[kcam] = _random( + coll=coll, + kcam=kcam, + doptics=doptics[kcam], + out_arr=out_arr, + nrays=nrays, + ) + + elif strategy == 'outline': + + dout[kcam] = _outline() + + elif strategy == 'mesh': + + dout[kcam] = _mesh() + + # --------------- + # store + # --------------- + + if store is True: + _store( + coll=coll, + dout=dout, + ) + + else: + return dout + + +# ############################################################### +# ############################################################### +# check +# ############################################################### + + +def _check( + coll=None, + key=None, + strategy=None, + nrays=None, + # storing + store=None, + store_key=None, +): + + # ------------ + # key + # ------------ + + wdiag = 'diagnostic' + lok = list(coll.dobj.get(wdiag, {}).keys()) + key = ds._generic_check._check_var( + key, 'key', + types=str, + allowed=lok, + ) + + # spectro ? + spectro = coll.dobj[wdiag][key]['spectro'] + if spectro is True: + raise NotImplementedError() + + # ------------ + # strategy + # ------------ + + strategy = ds._generic_check._check_var( + strategy, 'strategy', + types=str, + default='random', + allowed=['random', 'mesh', 'outline'], + ) + + # ------------ + # nrays + # ------------ + + if strategy == 'custom': + nrdef = 10 + else: + nrdef = 10 + + + nrays = int(ds._generic_check._check_var( + nrays, 'nrays', + types=(int, float), + default=nrdef, + sign='>0', + )) + + # ------------ + # store + # ------------ + + store = ds._generic_check._check_var( + store, 'store', + types=bool, + default=False, + ) + + return key, strategy, nrays, store + + +# ############################################################### +# ############################################################### +# random +# ############################################################### + + +def _random( + coll=None, + kcam=None, + doptics=None, + out_arr=None, + nrays=None, +): + + # --------------- + # prepare pixel + # --------------- + + # outline path + path = mpath.Path(out_arr) + + min0, max0 = np.min(out_arr[:, 0]), np.max(out_arr[:, 0]) + min1, max1 = np.min(out_arr[:, 1]), np.max(out_arr[:, 1]) + + # areas to get safety factor + area = doptics['pix_area'] + area_max = (max0 - min0) * (max1 - min1) + + # start point coords in pixel's plane + start0, start1 = _seed_pix( + path=path, + nrays=nrays, + nrays_factor=np.ceil(area_max/area) + 2, + min0=min0, + max0=max0, + min1=min1, + max1=max1, + ) + + # shared apertures ? + pinhole = doptics['pinhole'] + + # shape camera + shape_cam = coll.dobj['camera'][kcam]['dgeom']['shape'] + + # ----------------------------------- + # pinhole camera (shared apertures) + # ----------------------------------- + + if pinhole is True: + + cx, cy, cz = coll.get_camera_cents_xyz(kcam) + cent_cam = np.r_[np.mean(cx), np.mean(cy), np.mean(cz)] + + end0, end1 = _seed_optics( + coll=coll, + cent_cam=cent_cam, + nrays=nrays, + optics=doptics['optics'], + cent_cam=coll.get_camera_cents_xyz(kcam), + ) + + for ind in np.ndindex(shape_cam): + pass + + + + # --------------- + # mesh pixels + # --------------- + + else: + + for ind in np.ndindex(shape_cam): + + print(ind) + + + return + + +# ######################### +# seeding pixels +# ######################### + + +def _seed_pix( + path=None, + nrays=None, + nrays_factor=None, + min0=None, + max0=None, + min1=None, + max1=None, +): + + seed0 = np.random.random((nrays*nrays_factor,)) + seed1 = np.random.random((nrays*nrays_factor,)) + + pts0 = (max0 - min0) * seed0 + min0 + pts1 = (max1 - min1) * seed1 + min1 + + # only keep those in the pixel + iok_pix = path.contains_points(np.array([pts0, pts1]).T).nonzero()[0] + + # imax + imax = min(nrays, iok_pix.size) + + return pts0[iok_pix[:imax]], pts1[iok_pix[:imax]] + + +# ######################### +# seeding optics +# ######################### + + +def _seed_optics( + coll=None, + cent_cam=None, + nrays=None, + optics=None, +): + + # ----------------------------- + # select smallest etendue optic + # ----------------------------- + + optics, optics_cls = coll.get_optics_cls(optics) + + etend = [ + ( + coll.dobj[cc][kop]['dgeom']['area'] + / np.linalg.norm(coll.dobj[cc][kop]['dgeom']['cent'] - cent_cam)**2 + ) + for kop, cc in zip(optics, optics_cls) + ] + + ind = np.nanargmin(etend) + kop = optics[ind] + cc = optics_cls[ind] + + # ----------------------------- + # seed + # ----------------------------- + + out0, out1 = coll.get_optics_outline(kop) + + path = mpath.Path(np.array([out0, out1]).T) + + min0, max0 = np.min(out0), np.max(out0) + min1, max1 = np.min(out1), np.max(out1) + + area = coll.dobj[cc][kop]['dgeom']['area'] + factor = np.ceil((max0 - min0) * (max1 - min1) / area) + 2 + + seed0 = np.random.random((nrays * factor,)) + seed1 = np.random.random((nrays * factor,)) + + pts0 = (max0 - min0) * seed0 + min0 + pts1 = (max1 - min1) * seed1 + min1 + + # only keep those in the pixel + iok_pix = path.contains_points(np.array([pts0, pts1]).T).nonzero()[0] + + # imax + imax = min(nrays*2, iok_pix.size) + + return pts0[iok_pix[:imax]], pts1[iok_pix[:imax]] + + +# ############################################################### +# ############################################################### +# outline +# ############################################################### + + +def _outline(): + + + return + + +# ############################################################### +# ############################################################### +# mesh +# ############################################################### + + +def _mesh(): + + + return + + +# ############################################################### +# ############################################################### +# store +# ############################################################### + + +def _store(): + + + return \ No newline at end of file From d876cee03dfc86627378b6e27772834ddae8422d Mon Sep 17 00:00:00 2001 From: Didier Vezinet Date: Thu, 12 Sep 2024 19:34:35 -0400 Subject: [PATCH 04/10] [#973] Corrected _vignetting_tools.are_points_reflex_2d(): indexing issue --- tofu/geom/_vignetting_tools.pyx | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tofu/geom/_vignetting_tools.pyx b/tofu/geom/_vignetting_tools.pyx index cf5f8a6f6..f954ec53c 100644 --- a/tofu/geom/_vignetting_tools.pyx +++ b/tofu/geom/_vignetting_tools.pyx @@ -139,8 +139,8 @@ cdef inline void are_points_reflex_2d( # do first point: are_reflex[0] = is_reflex_2d( - &diff[0*nvert + 9], # u0 - &diff[1*nvert + 9], # u1 + &diff[0*nvert + nvert - 1], # u0 + &diff[1*nvert + nvert - 1], # u1 &diff[0*nvert + 0], # v0 &diff[1*nvert + 0], # v1 ) From de3edb0092ea048ebeb376a20e1c8931d09a4c38 Mon Sep 17 00:00:00 2001 From: Didier Vezinet Date: Fri, 13 Sep 2024 04:57:55 -0400 Subject: [PATCH 05/10] [#973] Upgraded requirements for setuptools>=70.0.0 --- requirements.txt | 5 +++-- setup.py | 3 ++- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/requirements.txt b/requirements.txt index 4d3225938..018160b88 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,8 +1,9 @@ ####### Requirements without Version Specifiers ####### scipy numpy -# scikit-sparse # does not work on windows, and requires "apt/brew install libsuitesparse-dev/suite-sparse" on linux / MacOs +# scikit-sparse # does not work on windows, and requires "apt/brew install libsuitesparse-dev/suite-sparse" on linux / MacOs # scikit-umfpack # similar issue +setuptools>=70.0.0 matplotlib contourpy requests @@ -11,4 +12,4 @@ Polygon3 ######## Requirements with Version Specifier ######## spectrally>=0.0.4 -Cython>=0.26 +Cython>=0.26 \ No newline at end of file diff --git a/setup.py b/setup.py index 0f85ee9a4..1e0075aa9 100644 --- a/setup.py +++ b/setup.py @@ -314,6 +314,7 @@ def get_version_tofu(path=_HERE): # requirements files see: # https://packaging.python.org/en/latest/requirements.html install_requires=[ + "setuptools>=70.0.0", "numpy", "scipy", # "scikit-sparse", @@ -397,4 +398,4 @@ def get_version_tofu(path=_HERE): cmdclass={"build_ext": build_ext, "clean": CleanCommand}, include_dirs=[np.get_include()], -) +) \ No newline at end of file From 7260613bd038579de95676d0a017ce559ea7c1ef Mon Sep 17 00:00:00 2001 From: Didier Vezinet Date: Fri, 13 Sep 2024 04:59:22 -0400 Subject: [PATCH 06/10] [#973] fixed except nogil in _vignetting_tools (compilation warning) --- tofu/geom/_vignetting_tools.pxd | 2 +- tofu/geom/_vignetting_tools.pyx | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/tofu/geom/_vignetting_tools.pxd b/tofu/geom/_vignetting_tools.pxd index 53e922654..f8cb758a2 100644 --- a/tofu/geom/_vignetting_tools.pxd +++ b/tofu/geom/_vignetting_tools.pxd @@ -94,7 +94,7 @@ cdef int triangulate_polys(double** vignett_poly, int64_t* lnvert, int nvign, int64_t** ltri, - int num_threads) nogil except -1 + int num_threads) except -1 nogil # =============================================================== diff --git a/tofu/geom/_vignetting_tools.pyx b/tofu/geom/_vignetting_tools.pyx index f954ec53c..d8f9c9f72 100644 --- a/tofu/geom/_vignetting_tools.pyx +++ b/tofu/geom/_vignetting_tools.pyx @@ -478,7 +478,7 @@ cdef inline int triangulate_polys( int nvign, int64_t** ltri, int num_threads, -) nogil except -1: +) except -1 nogil: """ Triangulates a list 3d polygon using the earclipping techinque https://www.geometrictools.com/Documentation/TriangulationByEarClipping.pdf From 663b46af27d6fbcb3fff296533b60b4b5a8d1635 Mon Sep 17 00:00:00 2001 From: Didier Vezinet Date: Fri, 13 Sep 2024 05:00:22 -0400 Subject: [PATCH 07/10] [#973] Advancing on _generate_rays() --- tofu/data/_class08_generate_rays.py | 78 ++++++++++++++++++++++++++--- 1 file changed, 70 insertions(+), 8 deletions(-) diff --git a/tofu/data/_class08_generate_rays.py b/tofu/data/_class08_generate_rays.py index 85acc86c0..88d10483a 100644 --- a/tofu/data/_class08_generate_rays.py +++ b/tofu/data/_class08_generate_rays.py @@ -11,6 +11,9 @@ import datastock as ds +from ..geom import _GG + + # ############################################################### # ############################################################### # Main @@ -212,14 +215,14 @@ def _random( min1, max1 = np.min(out_arr[:, 1]), np.max(out_arr[:, 1]) # areas to get safety factor - area = doptics['pix_area'] + area = coll.dobj['camera'][kcam]['dgeom']['pix_area'] area_max = (max0 - min0) * (max1 - min1) # start point coords in pixel's plane start0, start1 = _seed_pix( path=path, nrays=nrays, - nrays_factor=np.ceil(area_max/area) + 2, + nrays_factor=int(np.ceil(area_max/area) + 2), min0=min0, max0=max0, min1=min1, @@ -228,30 +231,84 @@ def _random( # shared apertures ? pinhole = doptics['pinhole'] + parallel = coll.dobj['camera'][kcam]['dgeom']['parallel'] # shape camera shape_cam = coll.dobj['camera'][kcam]['dgeom']['shape'] + # camera vector + # get camera vectors + dvect = coll.get_camera_unit_vectors(kcam) + lc = ['x', 'y', 'z'] + if parallel is True: + e0i_x, e0i_y, e0i_z = [dvect[f"e0_{kk}"] for kk in lc] + e1i_x, e1i_y, e1i_z = [dvect[f"e1_{kk}"] for kk in lc] + # ----------------------------------- # pinhole camera (shared apertures) # ----------------------------------- if pinhole is True: + # get pixel centers cx, cy, cz = coll.get_camera_cents_xyz(kcam) cent_cam = np.r_[np.mean(cx), np.mean(cy), np.mean(cz)] - end0, end1 = _seed_optics( + # get end points + end0, end1, op_cent, op_e0, op_e1 = _seed_optics( coll=coll, cent_cam=cent_cam, nrays=nrays, optics=doptics['optics'], - cent_cam=coll.get_camera_cents_xyz(kcam), ) + # full versions + end0f = np.tile(end0, nrays) + end1f = np.tile(end1, nrays) + + # full versions + start0f = np.repeat(start0, end0.size) + start1f = np.repeat(start1, end0.size) + + # vignetting polygons + vignett_poly = [ + np.array(coll.get_optics_poly(k0, closed=True, add_points=False)) + for k0 in doptics['optics'] + ] + lnvert = np.array([vv.shape[1] for vv in vignett_poly], dtype=np.int64) + + # loop on pixels + ray_orig = np.full((3, nrays*end0.size), np.nan) + ray_vdir = np.full((3, nrays*end0.size), np.nan) for ind in np.ndindex(shape_cam): - pass + # unit vectors + if parallel is not True: + e0i_x, e0i_y, e0i_z = [dvect[f"e0_{kk}"][ind] for kk in lc] + e1i_x, e1i_y, e1i_z = [dvect[f"e1_{kk}"][ind] for kk in lc] + + # ray_orig + ray_orig[0, :] = cx[ind] + start0f * e0i_x + start1f * e1i_x + ray_orig[1, :] = cy[ind] + start0f * e0i_y + start1f * e1i_y + ray_orig[2, :] = cz[ind] + start0f * e0i_z + start1f * e1i_z + + # ray_vdir, normalized + ray_vdir[0, :] = op_cent[0] + end0f * op_e0[0] + end1f * op_e1[0] + ray_vdir[1, :] = op_cent[1] + end0f * op_e0[1] + end1f * op_e1[1] + ray_vdir[2, :] = op_cent[2] + end0f * op_e0[2] + end1f * op_e1[2] + ray_norm = np.sqrt(np.sum(ray_vdir**2, axis=0)) + ray_vdir[:] = ray_vdir / ray_norm + + # vignetting (npoly, nrays) + iok = _GG.vignetting( + ray_orig, + ray_vdir, + vignett_poly, + lnvert, + num_threads=16, + ) + + print(ind, iok.sum(), iok.size) # --------------- @@ -332,7 +389,7 @@ def _seed_optics( # seed # ----------------------------- - out0, out1 = coll.get_optics_outline(kop) + out0, out1 = coll.get_optics_outline(kop, add_points=False, closed=False) path = mpath.Path(np.array([out0, out1]).T) @@ -340,7 +397,7 @@ def _seed_optics( min1, max1 = np.min(out1), np.max(out1) area = coll.dobj[cc][kop]['dgeom']['area'] - factor = np.ceil((max0 - min0) * (max1 - min1) / area) + 2 + factor = int(np.ceil((max0 - min0) * (max1 - min1) / area) + 2) seed0 = np.random.random((nrays * factor,)) seed1 = np.random.random((nrays * factor,)) @@ -354,7 +411,12 @@ def _seed_optics( # imax imax = min(nrays*2, iok_pix.size) - return pts0[iok_pix[:imax]], pts1[iok_pix[:imax]] + # optic cent and vectors + cent = coll.dobj[cc][kop]['dgeom']['cent'] + e0 = coll.dobj[cc][kop]['dgeom']['e0'] + e1 = coll.dobj[cc][kop]['dgeom']['e1'] + + return pts0[iok_pix[:imax]], pts1[iok_pix[:imax]], cent, e0, e1 # ############################################################### From df7d75214609b00718acf8d345a6e143ca0e00d8 Mon Sep 17 00:00:00 2001 From: Didier Vezinet Date: Fri, 13 Sep 2024 06:44:59 -0400 Subject: [PATCH 08/10] [#973] Fixed without vignetting --- tofu/data/_class08_Diagnostic.py | 8 ++ tofu/data/_class08_generate_rays.py | 109 +++++++++++++++++++++++----- 2 files changed, 100 insertions(+), 17 deletions(-) diff --git a/tofu/data/_class08_Diagnostic.py b/tofu/data/_class08_Diagnostic.py index 438b50b1c..89788db8c 100644 --- a/tofu/data/_class08_Diagnostic.py +++ b/tofu/data/_class08_Diagnostic.py @@ -377,12 +377,20 @@ def add_rays_from_diagnostic( key=None, strategy=None, nrays=None, + # storing + store=None, + config=None, + overwrite=None, ): return _generate_rays.main( coll=self, key=key, strategy=strategy, nrays=nrays, + # storing + store=store, + config=config, + overwrite=overwrite, ) # ----------------- diff --git a/tofu/data/_class08_generate_rays.py b/tofu/data/_class08_generate_rays.py index 88d10483a..daaf9740b 100644 --- a/tofu/data/_class08_generate_rays.py +++ b/tofu/data/_class08_generate_rays.py @@ -27,19 +27,22 @@ def main( nrays=None, # storing store=None, + config=None, + overwrite=None, ): # ------------- # check # ------------- - key, strategy, nrays, store = _check( + key, strategy, nrays, store, overwrite = _check( coll=coll, key=key, strategy=strategy, nrays=nrays, # storing store=store, + overwrite=overwrite, ) # --------------- @@ -92,6 +95,7 @@ def main( doptics=doptics[kcam], out_arr=out_arr, nrays=nrays, + strategy=strategy, ) elif strategy == 'outline': @@ -110,6 +114,8 @@ def main( _store( coll=coll, dout=dout, + config=config, + overwrite=overwrite, ) else: @@ -129,7 +135,7 @@ def _check( nrays=None, # storing store=None, - store_key=None, + overwrite=None, ): # ------------ @@ -184,10 +190,20 @@ def _check( store = ds._generic_check._check_var( store, 'store', types=bool, + default=True, + ) + + # ------------ + # overwrite + # ------------ + + overwrite = ds._generic_check._check_var( + overwrite, 'overwrite', + types=bool, default=False, ) - return key, strategy, nrays, store + return key, strategy, nrays, store, overwrite # ############################################################### @@ -202,6 +218,7 @@ def _random( doptics=None, out_arr=None, nrays=None, + strategy=None, ): # --------------- @@ -244,6 +261,22 @@ def _random( e0i_x, e0i_y, e0i_z = [dvect[f"e0_{kk}"] for kk in lc] e1i_x, e1i_y, e1i_z = [dvect[f"e1_{kk}"] for kk in lc] + # ----------------------------------- + # prepare output + # ----------------------------------- + + shape_out = shape_cam + (nrays*nrays,) + + dout = { + 'key': f'{kcam}_rays_{strategy}', + 'start_x': np.full(shape_out, np.nan), + 'start_y': np.full(shape_out, np.nan), + 'start_z': np.full(shape_out, np.nan), + 'vect_x': np.full(shape_out, np.nan), + 'vect_y': np.full(shape_out, np.nan), + 'vect_z': np.full(shape_out, np.nan), + } + # ----------------------------------- # pinhole camera (shared apertures) # ----------------------------------- @@ -282,6 +315,8 @@ def _random( ray_vdir = np.full((3, nrays*end0.size), np.nan) for ind in np.ndindex(shape_cam): + sli_out = ind + (slice(None),) + # unit vectors if parallel is not True: e0i_x, e0i_y, e0i_z = [dvect[f"e0_{kk}"][ind] for kk in lc] @@ -292,24 +327,37 @@ def _random( ray_orig[1, :] = cy[ind] + start0f * e0i_y + start1f * e1i_y ray_orig[2, :] = cz[ind] + start0f * e0i_z + start1f * e1i_z - # ray_vdir, normalized + # end points ray_vdir[0, :] = op_cent[0] + end0f * op_e0[0] + end1f * op_e1[0] ray_vdir[1, :] = op_cent[1] + end0f * op_e0[1] + end1f * op_e1[1] ray_vdir[2, :] = op_cent[2] + end0f * op_e0[2] + end1f * op_e1[2] - ray_norm = np.sqrt(np.sum(ray_vdir**2, axis=0)) - ray_vdir[:] = ray_vdir / ray_norm - # vignetting (npoly, nrays) - iok = _GG.vignetting( - ray_orig, - ray_vdir, - vignett_poly, - lnvert, - num_threads=16, - ) + # ray_vdir, normalized + ray_vdir[0, :] = ray_vdir[0, :] - ray_orig[0, :] + ray_vdir[1, :] = ray_vdir[1, :] - ray_orig[1, :] + ray_vdir[2, :] = ray_vdir[2, :] - ray_orig[2, :] - print(ind, iok.sum(), iok.size) + ray_norm = np.sqrt(np.sum(ray_vdir**2, axis=0)) + ray_vdir[:] = ray_vdir / ray_norm + # vignetting (npoly, nrays) is wrong !!! + # iok = _GG.vignetting( + # ray_orig, + # ray_vdir, + # vignett_poly, + # lnvert, + # num_threads=16, + # ) + # print(ind, iok.sum(), iok.size) + + indrand = (np.random.randint(0, ray_orig.shape[1], nrays*nrays),) + + dout['start_x'][sli_out] = ray_orig[(0,) + indrand] + dout['start_y'][sli_out] = ray_orig[(1,) + indrand] + dout['start_z'][sli_out] = ray_orig[(2,) + indrand] + dout['vect_x'][sli_out] = ray_vdir[(0,) + indrand] + dout['vect_y'][sli_out] = ray_vdir[(1,) + indrand] + dout['vect_z'][sli_out] = ray_vdir[(2,) + indrand] # --------------- # mesh pixels @@ -322,7 +370,7 @@ def _random( print(ind) - return + return dout # ######################### @@ -449,7 +497,34 @@ def _mesh(): # ############################################################### -def _store(): +def _store( + coll=None, + kdiag=None, + dout=None, + config=None, + overwrite=None, +): + + # ----------------- + # add ref + # ----------------- + + nrays = list(dout.values())[0]['start_x'].shape[-1] + + kref = f"{kdiag}_nrays" + coll.add_ref(key=kref, size=nrays) + # -------------- + # add rays + # -------------- + + for kcam, v0 in dout.items(): + + ref = coll.dobj['camera'][kcam]['dgeom']['ref'] + (kref,) + coll.add_rays( + ref=ref, + config=config, + **v0 + ) return \ No newline at end of file From d69bcd640d1281b021bc0423d81f6ec0c7248715 Mon Sep 17 00:00:00 2001 From: Didier Vezinet Date: Fri, 13 Sep 2024 06:57:31 -0400 Subject: [PATCH 09/10] [#973] pinned down setuptools version to 70.0.0 for msvccompiler --- requirements.txt | 2 +- setup.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements.txt b/requirements.txt index 018160b88..40f1dd1f8 100644 --- a/requirements.txt +++ b/requirements.txt @@ -3,7 +3,7 @@ scipy numpy # scikit-sparse # does not work on windows, and requires "apt/brew install libsuitesparse-dev/suite-sparse" on linux / MacOs # scikit-umfpack # similar issue -setuptools>=70.0.0 +setuptools==70.0.0 matplotlib contourpy requests diff --git a/setup.py b/setup.py index 1e0075aa9..9bd8a9b4a 100644 --- a/setup.py +++ b/setup.py @@ -314,7 +314,7 @@ def get_version_tofu(path=_HERE): # requirements files see: # https://packaging.python.org/en/latest/requirements.html install_requires=[ - "setuptools>=70.0.0", + "setuptools==70.0.0", "numpy", "scipy", # "scikit-sparse", From f158a03a67cf135b32ed98027c215368dece7e92 Mon Sep 17 00:00:00 2001 From: Didier Vezinet Date: Fri, 13 Sep 2024 07:02:51 -0400 Subject: [PATCH 10/10] [#973] pinned down setuptools version to !=65.*,!=74.* for msvccompiler --- requirements.txt | 2 +- setup.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements.txt b/requirements.txt index 40f1dd1f8..079198bf6 100644 --- a/requirements.txt +++ b/requirements.txt @@ -3,7 +3,7 @@ scipy numpy # scikit-sparse # does not work on windows, and requires "apt/brew install libsuitesparse-dev/suite-sparse" on linux / MacOs # scikit-umfpack # similar issue -setuptools==70.0.0 +setuptools!=65.*,!=74.* matplotlib contourpy requests diff --git a/setup.py b/setup.py index 9bd8a9b4a..e468e4aba 100644 --- a/setup.py +++ b/setup.py @@ -314,7 +314,7 @@ def get_version_tofu(path=_HERE): # requirements files see: # https://packaging.python.org/en/latest/requirements.html install_requires=[ - "setuptools==70.0.0", + "setuptools!=65.*,!=74.*", "numpy", "scipy", # "scikit-sparse",