diff --git a/.github/workflows/lint-and-test.yml b/.github/workflows/lint-and-test.yml deleted file mode 100644 index af128fb6..00000000 --- a/.github/workflows/lint-and-test.yml +++ /dev/null @@ -1,37 +0,0 @@ -name: Lint and Test Galaxy Tools with Planemo - -on: [push] - -jobs: - lint-and-test: - runs-on: ubuntu-latest - strategy: - max-parallel: 5 - - steps: - - uses: actions/checkout@v3 - - name: Set up Python 3.10 - uses: actions/setup-python@v3 - with: - python-version: '3.10' - - name: Add conda to system path - run: | - # $CONDA is an environment variable pointing to the root of the miniconda directory - echo $CONDA/bin >> $GITHUB_PATH - - name: Install dependencies - run: | - conda env update --file environment.yml --name base - pip install planemo - - name: Lint with Planemo - run: | - for tool in tools/astropytools/*xml; do - echo "linting $tool" - planemo l $tool - done - - name: Test with Planemo - run: | - for tool in tools/astropytools/*xml; do - echo "testing $tool" - planemo t $tool - done - diff --git a/.github/workflows/live-preview.yaml b/.github/workflows/live-preview.yaml index a1fb307e..03c7ea8c 100644 --- a/.github/workflows/live-preview.yaml +++ b/.github/workflows/live-preview.yaml @@ -26,7 +26,7 @@ jobs: steps: - uses: actions/setup-python@v4 with: - python-version: "3.7" + python-version: "3.8" - uses: actions/checkout@v3 with: fetch-depth: 0 @@ -63,7 +63,7 @@ jobs: run: | cd deploy-preview - dir_list=`for tl in $TOOL_LIST ; do echo $tl | awk -F '/' '{print $2}'; done | uniq` + dir_list=`for tl in $TOOL_LIST ; do echo $tl | awk -F '/' '{for(i=2;i<=(NF-1);i++) printf("%s%s",$i,i==(NF-1)?" ":FS)}'; done | uniq` for dr in $dir_list; do tools_list=`for tl in $TOOL_LIST; do if [[ "$tl" == *"$dr"* ]] ; then basename $tl ; fi ; done` diff --git a/deploy-preview/deploy.yml b/deploy-preview/deploy.yml index 23019223..46cae926 100644 --- a/deploy-preview/deploy.yml +++ b/deploy-preview/deploy.yml @@ -12,6 +12,12 @@ tasks: + - name: Create tool dir + ansible.builtin.file: + path: "{{ dest_tools_dir }}/{{ tool_dir }}_pr{{ pr_num }}" + state: directory + mode: 0755 + - name: Sync tool dir ansible.posix.synchronize: src: "{{ src_tools_dir }}/{{ tool_dir }}/" @@ -29,8 +35,8 @@ - name: Make name unique replace: path: "{{ dest_tools_dir }}/{{ tool_dir }}_pr{{ pr_num }}/{{ item }}" - regexp: ' bool: is_valid = True @@ -507,6 +518,20 @@ def _set_archive(self): self._archives.append( TapArchive(access_url=self._service_access_url)) + elif self._archive_type == 'custom': + self._service_access_url = \ + self._json_parameters['archive_selection']['access_url'] + + if Utils.is_valid_url(self._service_access_url): + self._archives.append( + TapArchive(access_url=self._service_access_url)) + else: + error_message = "archive access url is not a valid url" + Logger.create_action_log( + Logger.ACTION_ERROR, + Logger.ACTION_TYPE_ARCHIVE_CONNECTION, + error_message) + else: keyword = \ self._json_parameters['archive_selection']['keyword'] @@ -752,6 +777,11 @@ def run(self): for archive in self._archives: try: + + if archive.access_url in ARCHIVES_TIMEOUT_BYPASS: + archive.get_resources = \ + timeout(40)(TapArchive.get_resources.__get__(archive)) # noqa: E501 + _file_url, error_message = archive.get_resources( self._adql_query, self._number_of_files, @@ -1250,9 +1280,9 @@ def write_urls_to_output(urls: [], output, access_url="access_url"): with open(output, "w") as file_output: for url in urls: try: - file_output.write(url[access_url] + ',') + file_output.write(str(url[access_url]) + ',') except Exception: - error_message = "url field not found for url" + error_message = f"url field {access_url} not found for url" Logger.create_action_log( Logger.ACTION_ERROR, Logger.ACTION_TYPE_WRITE_URL, @@ -1305,6 +1335,11 @@ def collect_resource_keys(urls_data: list) -> list: resource_keys.append(key) return resource_keys + @staticmethod + def is_valid_url(url: str) -> bool: + regex_url = re.compile(r'^https?://(?:[A-Za-z0-9-]+\.)+[A-Za-z]{2,6}(?::\d+)?(?:/[^\s]*)?$') # noqa: E501 + return re.match(regex_url, url) is not None + class Logger: _logs = [] diff --git a/tools/archives/pyvo_integration/astronomical_archives.xml b/tools/archives/pyvo_integration/astronomical_archives.xml index 8f9a6433..7e944008 100644 --- a/tools/archives/pyvo_integration/astronomical_archives.xml +++ b/tools/archives/pyvo_integration/astronomical_archives.xml @@ -1,4 +1,4 @@ - + queries astronomical archives through Virtual Observatory protocols operation_0224 @@ -24,6 +24,7 @@ + @@ -47,6 +48,11 @@ + + + ^https?://[A-Za-z0-9]([A-Za-z0-9-\.]{0,61}[A-Za-z0-9])?\.[A-Za-z]{2,6}(:\d+)?(/[^\s]*)?$ + +
@@ -255,6 +261,40 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + @@ -460,7 +500,7 @@ The Table Access Protocol (TAP) lets you execute queries against our database ta ----- -The MAST Archive at STScI TAP end point for the TESS Input Catalog.

The TIC is used to help identify two-minute cadence target selection for the TESS mission, and to calculate physical and observational properties of planet candidates. It is for use by both the TESS science team and the public, and it is periodically updated – the current version is TIC-8. TIC-8 uses the GAIA DR2 catalog as a base and merges a large number of other photometric catalogs, including 2MASS, UCAC4, APASS, SDSS, WISE, etc. There are roughly 1.5 billion stellar and extended sources in TIC-8, containing compiled magnitudes including B, V, u, g, r, i, z, J, H, K, W1-W4, and G. +The MAST Archive at STScI TAP end point for the TESS Input Catalog.The TIC is used to help identify two-minute cadence target selection for the TESS mission, and to calculate physical and observational properties of planet candidates. It is for use by both the TESS science team and the public, and it is periodically updated – the current version is TIC-8. TIC-8 uses the GAIA DR2 catalog as a base and merges a large number of other photometric catalogs, including 2MASS, UCAC4, APASS, SDSS, WISE, etc. There are roughly 1.5 billion stellar and extended sources in TIC-8, containing compiled magnitudes including B, V, u, g, r, i, z, J, H, K, W1-W4, and G. The TIC can be directly accessed through the Mikulski Archive for Space Telescopes (MAST), using either queries or bulk download. The Table Access Protocol (TAP) lets you execute queries against our database tables, and inspect various metadata. Upload is not currently supported. @@ -543,7 +583,7 @@ Tables exposed through this endpoint include: epn_core from the gem_mars schema, ----- -**ArVO Byu TAP** http://arvo-registry.sci.am/tap ArVO Byurakan TAP service +**ArVO Byu TAP** arvo-registry.sci.am/tap ArVO Byurakan TAP service ----- @@ -612,7 +652,7 @@ Illumination by the Sun of each face of the comet 67P/Churyumov-Gerasimenko base CSHP_DV_130_01_LORES_OBJ.OBJ. The service provides the cosine between the normal of each face (in the same order as the faces defined in the shape model) and the Sun direction; both numerical values and images of the illumination are available. Each map is defined for a given position of the Sun in the frame of 67P (67P/C-G_CK). Longitude 0 is at the center of each map. The code is developed by A. Beth, - Imperial College London, UK and the service is provided by CDPP (http://cdpp.eu). Acknowlegment: The illumination models + Imperial College London, UK and the service is provided by CDPP (cdpp.eu). Acknowlegment: The illumination models have been developed at the Department of Physics at Imperial College London (UK) under the financial support of STFC grant of UK ST/N000692/1 and ESA contract 4000119035/16/ES/JD (Rosetta RPC-PIU). We would also like to warmly thank Bernhard Geiger (ESA) for his support in validating the 2D-illumination maps.