diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 0cf8c69606..ca7692a6a2 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -24,7 +24,7 @@ repos:
                 .*\.svg$
           )
   - repo: https://github.com/igorshubovych/markdownlint-cli
-    rev: v0.41.0
+    rev: v0.42.0
     hooks:
       - id: markdownlint-fix
   # Using this mirror lets us use mypyc-compiled black, which is about 2x faster
diff --git a/src/raster/r.buildvrt.gdal/Makefile b/src/raster/r.buildvrt.gdal/Makefile
new file mode 100644
index 0000000000..3327d0a22e
--- /dev/null
+++ b/src/raster/r.buildvrt.gdal/Makefile
@@ -0,0 +1,7 @@
+MODULE_TOPDIR = ../..
+
+PGM=r.buildvrt.gdal
+
+include $(MODULE_TOPDIR)/include/Make/Script.make
+
+default: script
diff --git a/src/raster/r.buildvrt.gdal/r.buildvrt.gdal.html b/src/raster/r.buildvrt.gdal/r.buildvrt.gdal.html
new file mode 100644
index 0000000000..16c95d7ef7
--- /dev/null
+++ b/src/raster/r.buildvrt.gdal/r.buildvrt.gdal.html
@@ -0,0 +1,67 @@
+<h2>DESCRIPTION</h2>
+
+<em>r.buildvrt.gdal</em> builds GDAL virtual rasters over GRASS GIS raster
+maps and links them to the mapset with <em>r.external</em>. The module is
+written as a workaround for a limitation in GRASS GIS Virtual Rasters (VRT)
+format with GDAL-linked raster maps (through <em>r.external</em> /
+<em>r.external.out</em>. In that case GRASS GIS Virtual Rasters currently
+show performance issues. See:
+<a href="https://github.com/OSGeo/grass/issues/4345">#4345</a>
+
+<p>
+For the resulting maps GDAL VRT text files are created either in a
+directory named "gdal" in the current mapset or in a user-defined <b>
+vrt_directory</b>. Those files are not removed when the raster map is
+removed and the user is responsible for removing them when needed.
+
+<h2>REQUIREMENTS</h2>
+<em>r.buildvrt.gdal</em> uses the Python bindings for
+<a href="https://pypi.org/project/GDAL">GDAL</a> and requires the
+GDAL-GRASS driver to include raster maps in native GRASS format in
+GDAL VRTs.
+
+<h2>EXAMPLES</h2>
+<div class="code"><pre>
+# Create external example data
+regs='s,0,1000
+n,500,1500'
+
+eval `g.gisenv`
+external_path="${GISDBASE}/${LOCATION}/${MAPSET}/.tmp/vrt"
+mkdir -p "$external_path"
+for reg in $regs
+do
+  r=$(echo $reg | cut -f1 -d",")
+  s=$(echo $reg | cut -f2 -d",")
+  n=$(echo $reg | cut -f3 -d",")
+
+  g.region -g n=$n s=$s w=0 e=1000 res=1
+  r.external.out format=GTiff options="compress=LZW,PREDICTOR=3" \
+    directory="$external_path"
+  r.mapcalc --o --v expression="${r}_${s}_gtiff_ntfs=float(x()*y())"
+done
+
+# Run performance tests
+g.region -g n=1500 s=0 w=0 e=1000 res=1
+format_type=gtiff_ntfs
+rmaps=$(g.list type=raster pattern="*_*_${format_type}", sep=",")
+
+# Using GRASS GIS VRT
+r.buildvrt --o --v input="$rmaps" output=vrt_${format_type}
+time r.univar map=vrt_${format_type}
+
+# Using GDAL VRT
+r.buildvrt.gdal --o --v input="$rmaps" output=vrt_${format_type}_gdal
+time r.univar map=vrt_${format_type}_gdal
+</pre></div>
+
+<h2>SEE ALSO</h2>
+<em>
+<a href="https://grass.osgeo.org/grass-stable/manuals/r.buildvrt.html">r.buildvrt</a>,
+<a href="https://grass.osgeo.org/grass-stable/manuals/r.patch.html">r.patch</a>,
+<a href="https://grass.osgeo.org/grass-stable/manuals/r.external.html">r.external</a>,
+<a href="https://grass.osgeo.org/grass-stable/manuals/r.external.out.html">r.external.out</a>
+</em>
+
+<h2>AUTHORS</h2>
+Stefan Blumentrath
diff --git a/src/raster/r.buildvrt.gdal/r.buildvrt.gdal.py b/src/raster/r.buildvrt.gdal/r.buildvrt.gdal.py
new file mode 100644
index 0000000000..a9ab63c0b5
--- /dev/null
+++ b/src/raster/r.buildvrt.gdal/r.buildvrt.gdal.py
@@ -0,0 +1,188 @@
+#!/usr/bin/env python3
+
+"""
+ MODULE:       r.buildvrt.gdal
+ AUTHOR(S):    Stefan Blumentrath
+ PURPOSE:      Build GDAL Virtual Rasters (VRT) over GRASS GIS raster maps
+ COPYRIGHT:    (C) 2024 by stefan.blumentrath, and the GRASS Development Team
+
+  This program is free software; you can redistribute it and/or modify
+  it under the terms of the GNU General Public License as published by
+  the Free Software Foundation; either version 2 of the License, or
+  (at your option) any later version.
+
+  This program is distributed in the hope that it will be useful,
+  but WITHOUT ANY WARRANTY; without even the implied warranty of
+  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+  GNU General Public License for more details.
+
+"""
+
+# %module
+# % description: Build GDAL Virtual Rasters (VRT) over GRASS GIS raster maps
+# % keyword: raster
+# % keyword: virtual
+# % keyword: gdal
+# % keyword: patch
+# %end
+
+# %option G_OPT_R_INPUTS
+# % key: input
+# % type: string
+# % required: no
+# % multiple: yes
+# %end
+
+# %option G_OPT_F_INPUT
+# % key: file
+# % required: no
+# %end
+
+# %option G_OPT_M_DIR
+# % key: vrt_directory
+# % description: Directory to store GDAL VRT files in. Default is: $GISDBASE/$PROJECT/$MAPSET/gdal
+# % required: no
+# %end
+
+# %option G_OPT_R_OUTPUT
+# %end
+
+# %flag
+# % key: m
+# % label: Read data range from metadata
+# % description: WARNING: metadata are sometimes approximations with wrong data range
+# %end
+
+# %flag
+# % key: r
+# % label: Create fast link without data range
+# % description: WARNING: some modules do not work correctly without known data range
+# %end
+
+# %rules
+# % required: input,file
+# % exclusive: input,file
+# % exclusive: -m,-r
+# %end
+
+
+import json
+import sys
+
+from pathlib import Path
+
+import grass.script as gs
+
+
+def get_raster_gdalpath(
+    map_name, check_linked=True, has_grasadriver=False, gis_env=None
+):
+    """Get the GDAL-readable path to a GRASS GIS raster map
+
+    Returns either the link stored in the GDAL-link file in the cell_misc
+    directory for raster maps linked with r.external or r.external.out
+    - if requested - or the path to the header of the GRASS GIS raster
+    map"""
+    if check_linked:
+        # Check GDAL link header
+        map_info = gs.find_file(map_name)
+        header_path = (
+            Path(gis_env["GISDBASE"])
+            / gis_env["LOCATION_NAME"]
+            / map_info["mapset"]
+            / "cell_misc"
+            / map_info["name"]
+            / "gdal"
+        )
+        if header_path.is_file():
+            gdal_path = Path(
+                gs.parse_key_val(header_path.read_text(), sep=": ")["file"]
+            )
+            if gdal_path.exists():
+                return str(gdal_path)
+
+    # Get native GRASS GIS format header
+    if not has_grasadriver:
+        gs.fatal(
+            _(
+                "The GDAL-GRASS GIS driver is unavailable. "
+                "Cannot create GDAL VRTs for map <{}>. "
+                "Please install the GDAL-GRASS plugin."
+            ).format(map_name)
+        )
+
+    gdal_path = Path(gs.find_file(map_name)["file"].replace("/cell/", "/cellhd/"))
+    if gdal_path.is_file():
+        return gdal_path
+
+    # Fail if file path cannot be determined
+    gs.fatal(_("Cannot determine GDAL readable path to raster map {}").format(map_name))
+
+
+def main():
+    """run the main workflow"""
+    options, flags = gs.parser()
+
+    # lazy imports
+    global gdal
+    try:
+        from osgeo import gdal
+    except ImportError:
+        gs.fatal(
+            _(
+                "Unable to load GDAL Python bindings (requires "
+                "package 'python-gdal' or Python library GDAL "
+                "to be installed)."
+            )
+        )
+
+    # Check if GRASS GIS driver is available
+    has_grassdriver = True
+    if not gdal.GetDriverByName("GRASS"):
+        has_grassdriver = False
+
+    # Get GRASS GIS environment info
+    gisenv = gs.gisenv()
+
+    # Get inputs
+    if options["input"]:
+        inputs = options["input"].split(",")
+    else:
+        inputs = Path(options["file"]).read_text(encoding="UTF8").strip().split("\n")
+
+    if len(inputs) < 1:
+        gs.fatal(_("At least one input map is required".format(inputs[0])))
+
+    inputs = [get_raster_gdalpath(raster_map, gis_env=gisenv) for raster_map in inputs]
+
+    # Get output
+    output = options["output"]
+
+    # Create a directory to place GDAL VRTs in
+    if options["vrt_directory"]:
+        vrt_dir = Path(options["vrt_directory"])
+    else:
+        vrt_dir = Path(gisenv["GISDBASE"]).joinpath(
+            gisenv["LOCATION_NAME"], gisenv["MAPSET"], "gdal"
+        )
+    vrt_dir.mkdir(exist_ok=True, parents=True)
+
+    # Create GDAL VRT
+    vrt_path = str(vrt_dir / f"{output}.vrt")
+    gs.verbose(_("Creating GDAL VRT '{}'.").format(vrt_path))
+    gdal.BuildVRT(vrt_path, inputs)
+
+    # Import (link) GDAL VRT
+    gs.run_command(
+        "r.external",
+        quiet=True,
+        flags=f"oa{''.join([key for key, val in flags.items() if val])}",
+        input=str(vrt_path),
+        output=output,
+    )
+    gs.raster_history(output, overwrite=True)
+
+
+if __name__ == "__main__":
+
+    sys.exit(main())
diff --git a/src/raster/r.buildvrt.gdal/testsuite/test_r_buildvrt_gdal.py b/src/raster/r.buildvrt.gdal/testsuite/test_r_buildvrt_gdal.py
new file mode 100644
index 0000000000..4e5d0adaaa
--- /dev/null
+++ b/src/raster/r.buildvrt.gdal/testsuite/test_r_buildvrt_gdal.py
@@ -0,0 +1,202 @@
+#!/usr/bin/env python3
+
+"""
+MODULE:    Test of r.buildvrt.gdal
+
+AUTHOR(S): Stefan Blumentrath
+
+PURPOSE:   Test of r.buildvrt.gdal
+
+COPYRIGHT: (C) 2024 by Stefan Blumentrath and the GRASS Development Team
+
+This program is free software under the GNU General Public
+License (>=v2). Read the file COPYING that comes with GRASS
+for details.
+"""
+
+import os
+
+from pathlib import Path
+
+import grass.script as gs
+
+from grass.gunittest.case import TestCase
+from grass.gunittest.main import test
+
+
+class TestBuildGDALVRT(TestCase):
+    """The main test case for the r.buildvrt.gdal module"""
+
+    @classmethod
+    def setUpClass(cls):
+        """Ensures expected computational region (and anything else needed)
+
+        These are things needed by all test function but not modified by
+        any of them.
+        """
+        cls.vrt_univar = """n=1500000
+        null_cells=0
+        cells=1500000
+        min=0.25
+        max=1498750.25
+        range=1498750
+        mean=375000
+        mean_of_abs=375000
+        stddev=330718.777396167
+        variance=109374909722.416
+        coeff_var=88.1916739723113
+        sum=562500000000"""
+
+        # Create external example data
+        regions = {"s": (0, 1000), "n": (500, 1500)}
+        tmp_dir = Path(gs.tempfile(create=False))
+        tmp_dir.mkdir(parents=True, exist_ok=True)
+        cls.map_input_file = tmp_dir / "map_file.txt"
+        gs.run_command(
+            "r.external.out",
+            format="GTiff",
+            options="compress=LZW,PREDICTOR=3",
+            directory=str(tmp_dir),
+        )
+        map_list = []
+        for name, ns_extent in regions.items():
+            gs.run_command(
+                "g.region", n=ns_extent[1], s=ns_extent[0], w=0, e=1000, res=1
+            )
+            map_name = f"tmp_vrt_gtiff_{ns_extent[1]}_{ns_extent[0]}"
+            map_list.append(map_name)
+            gs.mapcalc(f"{map_name}=float(x()*y())")
+
+        cls.map_input_file.write_text("\n".join(map_list), encoding="UTF8")
+
+        # Set region
+        gs.use_temp_region()
+        gs.run_command("g.region", n=1500, s=0, w=0, e=1000, res=1)
+
+    @classmethod
+    def tearDownClass(cls):
+        """Remove the temporary region (and anything else we created)"""
+        gs.del_temp_region()
+        gs.run_command("g.remove", flags="f", type="raster", pattern="tmp_vrt_g*_*")
+
+    def tearDown(self):
+        """Remove the output created from the module
+
+        This is executed after each test function run. If we had
+        something to set up before each test function run, we would use setUp()
+        function.
+
+        Since we remove the raster map after running each test function,
+        we can reuse the same name for all the test functions.
+        """
+        gs.run_command("g.remove", flags="f", type="raster", pattern="tmp_vrt_gda*")
+
+    def test_r_buildvrt_gdal_no_flag(self):
+        """Check that the output is created and readable"""
+        # run the import module
+        raster_maps = gs.list_strings(type="raster", pattern="tmp_vrt_gtiff*_*")
+
+        self.assertModule(
+            "r.buildvrt.gdal",
+            verbose=True,
+            input=",".join(raster_maps),
+            output="tmp_vrt_gdal",
+        )
+        vrt_info = """north=1500
+south=0
+east=1000
+west=0
+nsres=1
+ewres=1
+rows=1500
+cols=1000
+cells=1500000
+datatype=FCELL
+ncats=0
+min=0.25
+max=1498750
+map=tmp_vrt_gdal
+maptype=GDAL-link
+title=""
+timestamp="none"
+units="none"
+vdatum="none"
+semantic_label="none"
+"""
+        self.assertRasterFitsUnivar(
+            "tmp_vrt_gdal", reference=self.vrt_univar, precision=2
+        )
+        self.assertRasterFitsInfo("tmp_vrt_gdal", reference=vrt_info, precision=2)
+
+    def test_r_buildvrt_gdal_r_flag(self):
+        """Check that the output is created and readable with r-flag"""
+        # run the import module
+        raster_maps = gs.list_strings(type="raster", pattern="tmp_vrt_gtiff*_*")
+
+        self.assertModule(
+            "r.buildvrt.gdal",
+            flags="r",
+            verbose=True,
+            input=",".join(raster_maps),
+            output="tmp_vrt_gdal_r",
+        )
+        self.assertRasterFitsUnivar(
+            "tmp_vrt_gdal_r", reference=self.vrt_univar, precision=2
+        )
+
+    def test_r_buildvrt_gdal_vrt_dir(self):
+        """Check that the output is created and readable with r-flag"""
+        # run the import module
+        raster_maps = gs.list_strings(type="raster", pattern="tmp_vrt_gtiff*_*")
+        vrt_directory = gs.tempfile(create=False)
+        self.assertModule(
+            "r.buildvrt.gdal",
+            verbose=True,
+            input=",".join(raster_maps),
+            output="tmp_vrt_gdal_dir",
+            vrt_directory=vrt_directory,
+        )
+        self.assertRasterFitsUnivar(
+            "tmp_vrt_gdal_dir", reference=self.vrt_univar, precision=2
+        )
+        self.assertFileExists(vrt_directory + "/tmp_vrt_gdal_dir.vrt")
+
+    def test_r_buildvrt_fails_rm(self):
+        """Check that module fails with both -m and -r"""
+        raster_maps = gs.list_strings(type="raster", pattern="tmp_vrt_gtiff*_*")
+        # run the import module
+        self.assertModuleFail(
+            "r.buildvrt.gdal",
+            verbose=True,
+            input=",".join(raster_maps),
+            file=str(self.map_input_file),
+            output="tmp_vrt_gdal_input_file",
+        )
+
+    def test_r_buildvrt_fails_rm(self):
+        """Check that module fails with both -m and -r"""
+        # run the import module
+        self.assertModuleFail(
+            "r.buildvrt.gdal",
+            verbose=True,
+            flags="rm",
+            file=str(self.map_input_file),
+            output="tmp_vrt_gdal_rm",
+        )
+
+    def test_r_buildvrt_gdal_vrt_file(self):
+        """Check that the output is created and readable with file input"""
+        # run the import module
+        self.assertModule(
+            "r.buildvrt.gdal",
+            verbose=True,
+            file=str(self.map_input_file),
+            output="tmp_vrt_gdal_file",
+        )
+        self.assertRasterFitsUnivar(
+            "tmp_vrt_gdal_file", reference=self.vrt_univar, precision=2
+        )
+
+
+if __name__ == "__main__":
+    test()
diff --git a/src/raster/r.in.vect/r.in.vect.html b/src/raster/r.in.vect/r.in.vect.html
index 6dcb47778a..5223c2fbbf 100644
--- a/src/raster/r.in.vect/r.in.vect.html
+++ b/src/raster/r.in.vect/r.in.vect.html
@@ -1,72 +1,72 @@
 <h2>DESCRIPTION</h2>
 
-<em>r.in.vect</em> transforms an external vector file (like GeoPackage) 
-into a raster file and imports it into GRASS GIS. Optionally, 
-attributes from the vector layer can be converted to raster category 
+<em>r.in.vect</em> transforms an external vector file (like GeoPackage)
+into a raster file and imports it into GRASS GIS. Optionally,
+attributes from the vector layer can be converted to raster category
 labels.
 
 <p>
-When users have a vector file that they want to convert to a raster 
-map, they would normally import the vector map into GRASS GIS using, 
-e.g., <em>v.in.ogr</em>, and subsequently convert the resulting vector 
-into a raster map using <em>v.to.rast</em>. Because of the topological 
-vector format of GRASS GIS, importing large complex vector maps can be 
-slow. To speed up the process, <em>r.in.vect</em> converts the 
-user-defined vector file to an intermediate geoTIF file (using <a 
-href="https://gdal.org/api/python/utilities.html#osgeo.gdal.Rasterize">gdal.rasterize</a>) 
-and imports it into GRASS GIS. 
+When users have a vector file that they want to convert to a raster
+map, they would normally import the vector map into GRASS GIS using,
+e.g., <em>v.in.ogr</em>, and subsequently convert the resulting vector
+into a raster map using <em>v.to.rast</em>. Because of the topological
+vector format of GRASS GIS, importing large complex vector maps can be
+slow. To speed up the process, <em>r.in.vect</em> converts the
+user-defined vector file to an intermediate geoTIF file (using <a
+href="https://gdal.org/api/python/utilities.html#osgeo.gdal.Rasterize">gdal.rasterize</a>)
+and imports it into GRASS GIS.
 
 <p>
-The objects in the vector map will be assigned an user-defined value 
-using the <b>value</b> parameter. Alternatively, the user can use the 
-<b>attribute_column</b> to specify the name of an existing column from 
-the vector map's attribute table. The values in that column will be 
+The objects in the vector map will be assigned an user-defined value
+using the <b>value</b> parameter. Alternatively, the user can use the
+<b>attribute_column</b> to specify the name of an existing column from
+the vector map's attribute table. The values in that column will be
 used as raster values in the output raster map.
 
 <h2>Notes</h2>
 
-By default, <em>r.in.vect</em> will only affect data in areas lying 
-inside the boundaries of the current computational region. Before 
-running the function, users should therefore ensure that the 
-computational region is correctly set, and that the region's resolution 
-is at the desired level. Alternatively, users can use the <b>-v</b> 
-flag to set the exent of the raster layer to that of the vector layer. 
-To ensure that the resulting raster map cleanly aligns with the 
-computational region, the extent may be slightly larger than that of 
+By default, <em>r.in.vect</em> will only affect data in areas lying
+inside the boundaries of the current computational region. Before
+running the function, users should therefore ensure that the
+computational region is correctly set, and that the region's resolution
+is at the desired level. Alternatively, users can use the <b>-v</b>
+flag to set the exent of the raster layer to that of the vector layer.
+To ensure that the resulting raster map cleanly aligns with the
+computational region, the extent may be slightly larger than that of
 the vector layer.
 
 <p>
-If the coordinate reference system (CRS) of the vector file differs 
-from that of the mapset in which users want to import the raster, the 
+If the coordinate reference system (CRS) of the vector file differs
+from that of the mapset in which users want to import the raster, the
 vector file will be first reprojected using <em>ogr2ogr</em>.
 
 <p>
-The <b>label_column</b> parameter can be used to assign raster category 
-labels. Users should check if each unique value from the category 
-column has one corresponding label in the label column. If there are 
-categories with more than one label, the first from the label column 
+The <b>label_column</b> parameter can be used to assign raster category
+labels. Users should check if each unique value from the category
+column has one corresponding label in the label column. If there are
+categories with more than one label, the first from the label column
 will be used (and a warning will be printed).
 
 <p>
-With the <b>-d</b> flag, all pixels touched by lines or polygons will 
-be updated, not just those on the line render path, or which center 
-point is within the polygon. For lines, this is similar to setting the 
+With the <b>-d</b> flag, all pixels touched by lines or polygons will
+be updated, not just those on the line render path, or which center
+point is within the polygon. For lines, this is similar to setting the
 <b>-d</b> flag in <em>v.to.rast</em>.
 
 <p>
-Note that this will make a difference for complex and large vector 
-layers. For simple and small vector layers, it is probably faster to 
+Note that this will make a difference for complex and large vector
+layers. For simple and small vector layers, it is probably faster to
 import the vector layer first and converting it to a raster in GRASS.
 
 <h2>EXAMPLE</h2>
 
-The examples of <em>r.in.vect</em> use vector maps from the 
-<a href="https://grass.osgeo.org/download/data/">North Carolina sample 
-data set</a>. 
+The examples of <em>r.in.vect</em> use vector maps from the
+<a href="https://grass.osgeo.org/download/data/">North Carolina sample
+data set</a>.
 
 <h3>Example 1</h3>
 
-First, export a vector layer as a GeoPackage. 
+First, export a vector layer as a GeoPackage.
 
 <div class="code"><pre>
 # Export the geology vector map as Geopackage
@@ -74,9 +74,9 @@ <h3>Example 1</h3>
 </pre></div>
 
 <p>
-Import the geology.gpkg as raster. Raster cells overlapping with the 
-vector features will be assigned a value of 1, and the other raster 
-cells null. If you have RAM to spare, increase the memory to speed up 
+Import the geology.gpkg as raster. Raster cells overlapping with the
+vector features will be assigned a value of 1, and the other raster
+cells null. If you have RAM to spare, increase the memory to speed up
 the import.
 
 <div class="code"><pre>
@@ -90,23 +90,23 @@ <h3>Example 1</h3>
 memory=2000
 </pre></div>
 
-<div align="left" style="margin: 10px"> <a href="r_in_vect_im01.png"> 
-<img src="r_in_vect_im01.png" alt="The geology vector file converted 
-to, and imported as raster in GRASS. Example 1" border="0"> 
-</a><br><i>Figure 1: The geology vector file was converted to, and 
-imported as a raster into GRASS GIS, using the default settings.</i> 
+<div align="left" style="margin: 10px"> <a href="r_in_vect_im01.png">
+<img src="r_in_vect_im01.png" alt="The geology vector file converted
+to, and imported as raster in GRASS. Example 1" border="0">
+</a><br><i>Figure 1: The geology vector file was converted to, and
+imported as a raster into GRASS GIS, using the default settings.</i>
 </div>
 
 <p>
-If the GeoPackage file (or any other data source) has 
-multiple layers, users need to specify which layer to use with 
-the <b>layer</b> parameter. Otherwise, the first layer will be 
-selected. 
+If the GeoPackage file (or any other data source) has
+multiple layers, users need to specify which layer to use with
+the <b>layer</b> parameter. Otherwise, the first layer will be
+selected.
 
 <h3>Example 2</h3>
 
-Import the geology.gpkg as raster. Specify the column holding the 
-values to use as raster values and the column holding the labels for 
+Import the geology.gpkg as raster. Specify the column holding the
+values to use as raster values and the column holding the labels for
 the raster values.
 
 <div class="code"><pre>
@@ -121,20 +121,20 @@ <h3>Example 2</h3>
 r.colors map=geology_rast2 color=random
 </pre></div>
 
-<div align="left" style="margin: 10px"> <a href="r_in_vect_im02.png"> 
-<img src="r_in_vect_im02.png" alt="The geology vector file converted 
-to, and imported as raster in GRASS GIS. Example 2" border="0"> 
-</a><br><i>Figure 2: The geology vector file converted to raster and 
-imported into GRASS GIS using the values from the vector attribute 
+<div align="left" style="margin: 10px"> <a href="r_in_vect_im02.png">
+<img src="r_in_vect_im02.png" alt="The geology vector file converted
+to, and imported as raster in GRASS GIS. Example 2" border="0">
+</a><br><i>Figure 2: The geology vector file converted to raster and
+imported into GRASS GIS using the values from the vector attribute
 column GEOL250_ as raster values.</i> </div>
 
 
 <h3>Example 3</h3>
 
-First, set the resolution to 1 meter. Next, export the busroute6 vector 
-map as GeoPackage, and import it as a raster. Use the <b>-v</b> 
-flag to ensure the extent of the raster matches that of the 
-vector (by default, the bounding box of the raster map will 
+First, set the resolution to 1 meter. Next, export the busroute6 vector
+map as GeoPackage, and import it as a raster. Use the <b>-v</b>
+flag to ensure the extent of the raster matches that of the
+vector (by default, the bounding box of the raster map will
 match that of the current computational region).
 
 <div class="code"><pre>
@@ -154,16 +154,16 @@ <h3>Example 3</h3>
 memory=2000
 </pre></div>
 
-<div align="left" style="margin: 10px"> <a href="r_in_vect_im03.png"> 
-<img src="r_in_vect_im03.png" alt="The busroute6 vector file converted 
-to raster and imported into GRASS GIS. Example 3" border="0"> 
-</a><br><i>Figure 3: The busroute6 vector file converted to raster and 
+<div align="left" style="margin: 10px"> <a href="r_in_vect_im03.png">
+<img src="r_in_vect_im03.png" alt="The busroute6 vector file converted
+to raster and imported into GRASS GIS. Example 3" border="0">
+</a><br><i>Figure 3: The busroute6 vector file converted to raster and
 imported into GRASS GIS using the extent of the vector map.</i> </div>
 
 
 <h3>Example 4</h3>
 
-The same as above, but using the <b>-d</b> flag to create densified 
+The same as above, but using the <b>-d</b> flag to create densified
 lines.
 
 <div class="code"><pre>
@@ -175,12 +175,12 @@ <h3>Example 4</h3>
 memory=2000
 </pre></div>
 
-<div align="left" style="margin: 10px"> <a href="r_in_vect_im04.png"> 
-<img src="r_in_vect_im04.png" alt="The busroute6 vector file converted 
-to raster and imported into GRASS GIS. Example 4" border="0"> 
-</a><br><i>Figure 4: Rasterize the busroute 6 vector map using the 
-<b>-d</b> flag to create densified lines by adding extra cells (shown 
-in red). This avoids gaps or lines that consist of cells that are only 
+<div align="left" style="margin: 10px"> <a href="r_in_vect_im04.png">
+<img src="r_in_vect_im04.png" alt="The busroute6 vector file converted
+to raster and imported into GRASS GIS. Example 4" border="0">
+</a><br><i>Figure 4: Rasterize the busroute 6 vector map using the
+<b>-d</b> flag to create densified lines by adding extra cells (shown
+in red). This avoids gaps or lines that consist of cells that are only
 diagonally connected.</i> </div>
 
 <h2>SEE ALSO</h2>
@@ -191,6 +191,6 @@ <h2>SEE ALSO</h2>
 
 <h2>AUTHORS</h2>
 
-Paulo van Breugel (<a href="https://ecodiv.earth">ecodiv.earth</a>)<br> 
-Applied Geo-information Sciences<br> <a href="https://www.has.nl/">HAS 
+Paulo van Breugel (<a href="https://ecodiv.earth">ecodiv.earth</a>)<br>
+Applied Geo-information Sciences<br> <a href="https://www.has.nl/">HAS
 green academy, University of Applied Sciences</a><br>
diff --git a/src/temporal/t.stac/README.md b/src/temporal/t.stac/README.md
index a70f113cf8..878acd426a 100644
--- a/src/temporal/t.stac/README.md
+++ b/src/temporal/t.stac/README.md
@@ -1,9 +1,9 @@
-# (In-Development) t.stac
+# t.stac
 
 ## Description
 
 The **t.stac** toolset utilizes the
-[pystac-client (v0.5.1)](https://github.com/stac-utils/pystac-client) to search
+[pystac-client (v0.8.3)](https://github.com/stac-utils/pystac-client) to search
 STAC APIs and import items into GRASS GIS.
 
 ### Item Search Parameters
@@ -83,7 +83,9 @@ required. Use items_as_dicts to avoid object unmarshalling errors.
 
 ### Dependencies
 
-* [pystac-client (v0.5.1)](https://github.com/stac-utils/pystac-client)
+* [pystac-client (v0.8.3)](https://github.com/stac-utils/pystac-client)
+* [pystac (v1.10.1)](https://pystac.readthedocs.io/en/stable/)
+* [tqdm (4.66.3)](https://github.com/tqdm/tqdm)
 
 #### Optional Query
 
diff --git a/src/temporal/t.stac/libstac/__init__.py b/src/temporal/t.stac/libstac/__init__.py
index e69de29bb2..aac562c450 100644
--- a/src/temporal/t.stac/libstac/__init__.py
+++ b/src/temporal/t.stac/libstac/__init__.py
@@ -0,0 +1 @@
+import staclib as libstac  # noqa
diff --git a/src/temporal/t.stac/libstac/staclib.py b/src/temporal/t.stac/libstac/staclib.py
index ea609d6007..7b6155b8ad 100644
--- a/src/temporal/t.stac/libstac/staclib.py
+++ b/src/temporal/t.stac/libstac/staclib.py
@@ -1,13 +1,206 @@
-import grass.script as gs
-from grass.pygrass.gis.region import Region
-from grass.pygrass.vector import VectorTopo
-from grass.pygrass.vector.geometry import Point, Area, Centroid, Boundary
+#!/usr/bin/env python3
+
+############################################################################
+#
+# MODULE:       staclib
+# AUTHOR:       Corey T. White, OpenPlains Inc. & NCSU
+# PURPOSE:      Helper library to import STAC data in to GRASS.
+# COPYRIGHT:    (C) 2024 Corey White
+#               This program is free software under the GNU General
+#               Public License (>=v2). Read the file COPYING that
+#               comes with GRASS for details.
+#
+#############################################################################
+
+
+import os
+import sys
 import base64
 import tempfile
 import json
-import os
-from pystac_client.conformance import ConformanceClasses
-from pystac_client.exceptions import APIError
+from datetime import datetime
+from dateutil import parser
+from io import StringIO
+from pprint import pprint
+import grass.script as gs
+from grass.exceptions import CalledModuleError
+from grass.pygrass.vector import VectorTopo
+from grass.pygrass.vector.geometry import Point, Centroid, Boundary
+from concurrent.futures import ThreadPoolExecutor
+
+# Import pystac_client modules
+try:
+    from pystac_client import Client
+    from pystac_client.exceptions import APIError
+    from pystac_client.conformance import ConformanceClasses
+except ImportError as err:
+    gs.fatal(_("Unable to import pystac_client: {err}"))
+
+
+def _import_tqdm(error):
+    """Import tqdm module"""
+    try:
+        from tqdm import tqdm
+
+        return tqdm
+    except ImportError as err:
+        if error:
+            raise err
+        return None
+
+
+def _import_pystac_mediatype(error):
+    """Import pystac module"""
+    try:
+        from pystac import MediaType
+
+        return MediaType
+    except ImportError as err:
+        if error:
+            raise err
+        return None
+
+
+class STACHelper:
+    """STAC Helper Class"""
+
+    def __init__(self):
+        self.client = None
+
+    def connect_to_stac(self, url, headers=None):
+        """Connect to a STAC catalog."""
+        if self.client is None:
+            try:
+                self.client = Client.open(url, headers)
+                return self.client
+            except APIError as err:
+                gs.fatal(f"Failed to connect to STAC catalog: {err}")
+        else:
+            gs.warning(_("Client already connected."))
+            return self.client
+
+    def get_all_collections(self):
+        """Get a list of collections from STAC Client"""
+        if self.conforms_to_collections():
+            gs.verbose(_("Client conforms to Collection"))
+        try:
+            collections = self.client.get_collections()
+            collection_list = list(collections)
+            return [i.to_dict() for i in collection_list]
+
+        except APIError as e:
+            gs.fatal(_("Error getting collections: {}".format(e)))
+
+    def get_collection(self, collection_id):
+        """Get a collection frofrom io import StringIOm STAC Client"""
+        try:
+            collection = self.client.get_collection(collection_id)
+            self.collection = collection.to_dict()
+            return self.collection
+
+        except APIError as e:
+            gs.fatal(_("Error getting collection: {}".format(e)))
+
+    def search_api(self, **kwargs):
+        """Search the STAC API"""
+        if self.conforms_to_item_search():
+            gs.verbose(_("STAC API Conforms to Item Search"))
+
+        if kwargs.get("filter"):
+            self.conforms_to_filter()
+
+        if kwargs.get("query"):
+            self.conforms_to_query()
+
+        try:
+            search = self.client.search(**kwargs)
+        except APIError as e:
+            gs.fatal(_("Error searching STAC API: {}".format(e)))
+        except NotImplementedError as e:
+            gs.fatal(_("Error searching STAC API: {}".format(e)))
+        except Exception as e:
+            gs.fatal(_("Error searching STAC API: {}".format(e)))
+
+        try:
+            gs.message(_(f"Search Matched: {search.matched()} items"))
+        except e:
+            gs.warning(_(f"No items found: {e}"))
+            return None
+
+        return search
+
+    def report_stac_item(self, item):
+        """Print a report of the STAC item to the console."""
+        sys.stdout.write(f"Collection ID: {item.collection_id}\n")
+        sys.stdout.write(f"Item: {item.id}\n")
+        print_attribute(item, "geometry", "Geometry")
+        sys.stdout.write(f"Bbox: {item.bbox}\n")
+
+        print_attribute(item, "datetime", "Datetime")
+        print_attribute(item, "start_datetime", "Start Datetime")
+        print_attribute(item, "end_datetime", "End Datetime")
+        sys.stdout.write("Extra Fields:\n")
+        print_summary(item.extra_fields)
+
+        print_list_attribute(item.stac_extensions, "Extensions:")
+        # libstac.print_attribute(it_import_tqdmem, "stac_extensions", "Extensions")
+        sys.stdout.write("Properties:\n")
+        print_summary(item.properties)
+
+    def _check_conformance(self, conformance_class, response="fatal"):
+        """Check if the STAC API conforms to the given conformance class"""
+        if not self.client.conforms_to(conformance_class):
+            if response == "fatal":
+                gs.fatal(_(f"STAC API does not conform to {conformance_class}"))
+                return False
+            elif response == "warning":
+                gs.warning(_(f"STAC API does not conform to {conformance_class}"))
+                return True
+            elif response == "verbose":
+                gs.verbose(_(f"STAC API does not conform to {conformance_class}"))
+                return True
+            elif response == "info":
+                gs.info(_(f"STAC API does not conform to {conformance_class}"))
+                return True
+            elif response == "message":
+                sys.stdout.write(f"STAC API does not conform to {conformance_class}\n")
+                return True
+
+    def conforms_to_collections(self):
+        """Check if the STAC API conforms to the Collections conformance class"""
+        return self._check_conformance(
+            ConformanceClasses.COLLECTIONS, response="warning"
+        )
+
+    def conforms_to_item_search(self):
+        """Check if the STAC API conforms to the Item Search conformance class"""
+        return self._check_conformance(
+            ConformanceClasses.ITEM_SEARCH, response="warning"
+        )
+
+    def conforms_to_filter(self):
+        """Check if the STAC API conforms to the Filter conformance class"""
+        return self._check_conformance(ConformanceClasses.FILTER, response="warning")
+
+    def conforms_to_query(self):
+        """Check if the STAC API conforms to the Query conformance class"""
+        return self._check_conformance(ConformanceClasses.QUERY, response="warning")
+
+    def conforms_to_sort(self):
+        """Check if the STAC API conforms to the Sort conformance class"""
+        return self._check_conformance(ConformanceClasses.SORT, response="warning")
+
+    def conforms_to_fields(self):
+        """Check if the STAC API conforms to the Fields conformance class"""
+        return self._check_conformance(ConformanceClasses.FIELDS, response="warning")
+
+    def conforms_to_core(self):
+        """Check if the STAC API conforms to the Core conformance class"""
+        return self._check_conformance(ConformanceClasses.CORE, response="warning")
+
+    def conforms_to_context(self):
+        """Check if the STAC API conforms to the Context conformance class"""
+        return self._check_conformance(ConformanceClasses.CONTEXT, response="warning")
 
 
 def encode_credentials(username, password):
@@ -63,81 +256,157 @@ def print_summary(data, depth=1):
     for key, value in data.items():
         indentation = generate_indentation(start_depth)
         if isinstance(value, dict):
-            gs.message(_(f"#\n# {indentation}{key}:"))
+            sys.stdout.write(f"{'-' * 75}\n")
+            sys.stdout.write(f"\n {indentation}{key}:\n")
             print_summary(value, depth=start_depth + 1)
         if isinstance(value, list):
-            gs.message(_(f"# {indentation}{key}:"))
+            sys.stdout.write(f"{'-' * 75}\n")
+            sys.stdout.write(f"{indentation}{key}:\n")
             for item in value:
                 if isinstance(item, dict):
                     print_summary(item, depth=start_depth + 1)
         else:
-            gs.message(_(f"# {indentation}{key}: {value}"))
+            sys.stdout.write(f"# {indentation}{key}: {value}\n")
+
+
+def print_json_to_stdout(data, pretty=False):
+    """Pretty print data to stdout"""
+    if pretty:
+        output = StringIO()
+        pprint(data, stream=output)
+        sys.stdout.write(output.getvalue())
+    else:
+        json_output = json.dumps(data)
+        sys.stdout.write(json_output)
 
 
 def print_list_attribute(data, title):
     "Print a list attribute"
-    gs.message(_(f"{title}"))
+    sys.stdout.write(f"{'-' * 75}\n")
+    sys.stdout.write(f"{title}\n")
+    sys.stdout.write(f"{'-' * 75}\n")
     for item in data:
-        gs.message(_(f"\t{item}"))
+        sys.stdout.write(f"\t{item}\n")
+    sys.stdout.write(f"{'-' * 75}\n")
 
 
 def print_attribute(item, attribute, message=None):
     """Print an attribute of the item and handle AttributeError."""
     message = message if message else attribute.capitalize()
     try:
-        gs.message(_(f"{message}: {getattr(item, attribute)}"))
+        sys.stdout.write(f"{message}: {getattr(item, attribute)}\n")
     except AttributeError:
         gs.info(_(f"{message} not found."))
 
 
 def print_basic_collection_info(collection):
     """Print basic information about a collection"""
-    gs.message(_(f"Collection ID: {collection.get('id')}"))
-    gs.message(_(f"STAC Version: {collection.get('stac_version')}"))
-    gs.message(_(f"Description: {collection.get('description')}"))
-    gs.message(_(f"Extent: {collection.get('extent')}"))
-    gs.message(_(f"License: {collection.get('license')}"))
-    gs.message(_(f"Keywords: {collection.get('keywords')}"))
+    sys.stdout.write(f"Collection ID: {collection.get('id')}\n")
+    sys.stdout.write(f"STAC Version: {collection.get('stac_version')}\n")
+    sys.stdout.write(f"Description: {collection.get('description')}\n")
+    sys.stdout.write(f"Extent: {collection.get('extent')}\n")
+    sys.stdout.write(f"License: {collection.get('license')}\n")
+    sys.stdout.write(f"Keywords: {collection.get('keywords')}\n")
     item_summary = collection.get("summaries")
-    gs.message(_(f"{'-' * 75}\n"))
+    sys.stdout.write(f"{'-' * 75}\n\n")
     if item_summary:
-        gs.message(_("Summary:"))
+        sys.stdout.write("Summary:\n")
         for k, v in item_summary.items():
-            gs.message(_(f"{k}: {v}"))
-        gs.message(_(f"{'-' * 75}\n"))
+            sys.stdout.write(f"{k}: {v}\n")
+        sys.stdout.write(f"{'-' * 75}\n\n")
     item_assets = collection.get("item_assets")
     item_asset_keys = item_assets.keys()
 
-    gs.message(_(f"Item Assets Keys: {list(item_asset_keys)}"))
-    gs.message(_(f"{'-' * 75}\n"))
+    sys.stdout.write(f"Item Assets Keys: {list(item_asset_keys)}\n")
+    sys.stdout.write(f"{'-' * 75}\n\n")
     for key, value in item_assets.items():
-        gs.message(_(f"Asset: {value.get('title')}"))
-        gs.message(_(f"Key: {key}"))
-        gs.message(_(f"Roles: {value.get('roles')}"))
-        gs.message(_(f"Type: {value.get('type')}"))
-        gs.message(_(f"Description: {value.get('description')}"))
+        sys.stdout.write(f"Asset: {value.get('title')}\n")
+        sys.stdout.write(f"Key: {key}\n")
+        sys.stdout.write(f"Roles: {value.get('roles')}\n")
+        sys.stdout.write(f"Type: {value.get('type')}\n")
+        sys.stdout.write(f"Description: {value.get('description')}\n")
         if value.get("gsd"):
-            gs.message(_(f"GSD: {value.get('gsd')}"))
+            sys.stdout.write(f"GSD: {value.get('gsd')}\n")
         if value.get("eo:bands"):
-            gs.message(_("EO Bands:"))
+            sys.stdout.write("EO Bands:\n")
             for band in value.get("eo:bands"):
-                gs.message(_(f"Band: {band}"))
+                sys.stdout.write(f"Band: {band}\n")
         if value.get("proj:shape"):
-            gs.message(_(f"Shape: {value.get('proj:shape')}"))
+            sys.stdout.write(f"Shape: {value.get('proj:shape')}\n")
         if value.get("proj:transform"):
-            gs.message(_(f"Asset Transform: {value.get('proj:transform')}"))
+            sys.stdout.write(f"Asset Transform: {value.get('proj:transform')}\n")
         if value.get("proj:crs"):
-            gs.message(_(f"CRS: {value.get('proj:crs')}"))
+            sys.stdout.write(f"CRS: {value.get('proj:crs')}\n")
         if value.get("proj:geometry"):
-            gs.message(_(f"Geometry: {value.get('proj:geometry')}"))
+            sys.stdout.write(f"Geometry: {value.get('proj:geometry')}\n")
         if value.get("proj:extent"):
-            gs.message(_(f"Asset Extent: {value.get('proj:extent')}"))
+            sys.stdout.write(f"Asset Extent: {value.get('proj:extent')}\n")
         if value.get("raster:bands"):
-            gs.message(_("Raster Bands:"))
+            sys.stdout.write("Raster Bands:\n")
             for band in value.get("raster:bands"):
-                gs.message(_(f"Band: {band}"))
+                sys.stdout.write(f"Band: {band}\n")
+
+        sys.stdout.write(f"{'-' * 75}\n\n")
+
+
+def collection_metadata(collection):
+    """Get collection"""
+
+    sys.stdout.write(f"{'-' * 75}\n\n")
+    sys.stdout.write(f"Collection Id: {collection.get('id')}\n")
+    sys.stdout.write(f"Title: {collection.get('title')}\n")
+    sys.stdout.write(f"Description: {collection.get('description')}\n")
+
+    extent = collection.get("extent")
+    if extent:
+        spatial = extent.get("spatial")
+        if spatial:
+            bbox = spatial.get("bbox")
+            if bbox:
+                sys.stdout.write(f"bbox: {bbox}\n")
+        temporal = extent.get("temporal")
+        if temporal:
+            interval = temporal.get("interval")
+            if interval:
+                sys.stdout.write(f"Temporal Interval: {interval}\n")
+
+    sys.stdout.write(f"License: {collection.get('license')}\n")
+    sys.stdout.write(f"Keywords: {collection.get('keywords')}\n")
+    # sys.stdout.write(f"Providers: {collection.get('providers')}\n")
+    sys.stdout.write(f"Links: {collection.get('links')}\n")
+    sys.stdout.write(f"Stac Extensions: {collection.get('stac_extensions')}\n")
 
-        gs.message(_(f"{'-' * 75}\n"))
+    try:
+        sys.stdout.write("\n# Summaries:\n")
+        print_summary(collection.get("summaries"))
+    except AttributeError:
+        gs.info(_("Summaries not found."))
+
+    try:
+        sys.stdout.write("\n# Extra Fields:\n")
+        print_summary(collection.get("extra_fields"))
+    except AttributeError:
+        gs.info(_("# Extra Fields not found."))
+    sys.stdout.write(f"{'-' * 75}\n\n")
+
+
+def report_plain_asset_summary(asset):
+    MediaType = _import_pystac_mediatype(False)
+    sys.stdout.write("\nAsset\n")
+    sys.stdout.write(f"Asset Item Id: {asset.get('item_id')}\n")
+
+    sys.stdout.write(f"Asset Title: {asset.get('title')}\n")
+    sys.stdout.write(f"Asset Filename: {asset.get('file_name')}\n")
+    sys.stdout.write(f"raster:bands: {asset.get('raster:bands')}\n")
+    sys.stdout.write(f"eo:bands: {asset.get('eo:bands')}\n")
+    sys.stdout.write(f"Asset Description: {asset.get('description')}\n")
+
+    if MediaType:
+        sys.stdout.write(f"Asset Media Type: {MediaType(asset.get('type')).name}\n")
+    else:
+        sys.stdout.write(f"Asset Media Type: {asset.get('type')}\n")
+    sys.stdout.write(f"Asset Roles: {asset.get('roles')}\n")
+    sys.stdout.write(f"Asset Href: {asset.get('href')}\n")
 
 
 def region_to_wgs84_decimal_degrees_bbox():
@@ -147,7 +416,6 @@ def region_to_wgs84_decimal_degrees_bbox():
         float(c)
         for c in [region["ll_w"], region["ll_s"], region["ll_e"], region["ll_n"]]
     ]
-    gs.message(_("BBOX: {}".format(bbox)))
     return bbox
 
 
@@ -178,7 +446,7 @@ def check_url_type(url):
         gs.warning(_("HTTP is not secure. Using HTTPS instead."))
         return url.replace("https://", "/vsicurl/https://")
     else:
-        gs.message(_(f"Unknown Protocol: {url}"))
+        sys.stdout.write(f"Unknown Protocol: {url}\n")
         return "unknown"
 
 
@@ -289,25 +557,12 @@ def _flatten_dict(d, parent_key="", sep="_"):
 
 def create_vector_from_feature_collection(vector, search, limit, max_items):
     """Create a vector from items in a Feature Collection"""
-    n_matched = None
-    try:
-        n_matched = search.matched()
-    except Exception:
-        gs.verbose(_("STAC API doesn't support matched() method."))
-
-    if n_matched:
-        pages = (n_matched // max_items) + 1
-    else:
-        # These requests tend to be very slow
-        pages = len(list(search.pages()))
-
-    gs.message(_(f"Fetching items {n_matched} from {pages} pages."))
 
     feature_collection = {"type": "FeatureCollection", "features": []}
 
     # Extract asset information for each item
-    for page in range(pages):
-        temp_features = search.item_collection_as_dict()
+    for page in search.pages_as_dicts():
+        temp_features = page
         for idx, item in enumerate(temp_features["features"]):
             flattened_assets = _flatten_dict(
                 item["assets"], parent_key="assets", sep="."
@@ -330,18 +585,40 @@ def create_vector_from_feature_collection(vector, search, limit, max_items):
     gs.run_command("v.colors", map=vector, color="random", quiet=True)
 
 
+def format_datetime(dt_str):
+    # Parse the datetime string
+    dt = parser.parse(dt_str)
+    # Format the datetime object to the desired format
+    return dt.strftime("%Y-%m-%d %H:%M:%S")
+
+
 def register_strds_from_items(collection_items_assets, strds_output):
     """Create registy for STRDS from collection items assets"""
+
     with open(strds_output, "w") as f:
         for asset in collection_items_assets:
             semantic_label = asset.get("file_name").split(".")[-1]
             created_date = asset.get("datetime")
-
-            if created_date:
-                f.write(f"{asset['file_name']}|{created_date}|{semantic_label}\n")
+            eobands = asset.get("eo:bands")
+            if eobands:
+                for idx, band in enumerate(eobands):
+
+                    band_name = band.get("common_name")
+                    if created_date:
+                        formatted_date = format_datetime(created_date)
+                        f.write(
+                            f"{asset['file_name']}.{idx + 1}|{formatted_date}|{band_name}\n"
+                        )
+                    else:
+                        gs.warning(_("No datetime found for item."))
+                        f.write(f"{asset['file_name']}.{idx + 1}|{None}|{band_name}\n")
             else:
-                gs.warning(_("No datetime found for item."))
-                f.write(f"{asset['file_name']}|{None}|{semantic_label}\n")
+                if created_date:
+                    formatted_date = format_datetime(created_date)
+                    f.write(f"{asset['file_name']}|{formatted_date}|{semantic_label}\n")
+                else:
+                    gs.warning(_("No datetime found for item."))
+                    f.write(f"{asset['file_name']}|{None}|{semantic_label}\n")
 
 
 def fetch_items_with_pagination(items_search, limit, max_items):
@@ -400,7 +677,7 @@ def create_metadata_vector(vector, metadata):
     ) as new_vec:
 
         for i, item in enumerate(metadata):
-            gs.message(_("Adding collection: {}".format(item.get("id"))))
+            sys.stdout.write(f"Adding collection: {item.get('id')}\n")
             # Transform bbox to locations CRS
             # Safe extraction
             extent = item.get("extent", {})
@@ -418,9 +695,7 @@ def create_metadata_vector(vector, metadata):
             if bbox_list and isinstance(bbox_list[0], list) and len(bbox_list[0]) == 4:
                 wgs84_bbox = bbox_list[0]
             else:
-                gs.warning(
-                    _("Invalid bbox. Skipping Collection {}.".format(item.get("id")))
-                )
+                gs.warning(_(f"Invalid bbox. Skipping Collection {item.get('id')}.\n"))
                 continue
 
             bbox = wgs84_bbox_to_boundary(wgs84_bbox)
@@ -466,74 +741,78 @@ def create_metadata_vector(vector, metadata):
     return metadata
 
 
-def get_all_collections(client):
-    """Get a list of collections from STAC Client"""
-    if conform_to_collections(client):
-        gs.verbose(_("Client conforms to Collection"))
-    try:
-        collections = client.get_collections()
-        collection_list = list(collections)
-        return [i.to_dict() for i in collection_list]
-
-    except APIError as e:
-        gs.fatal(_("Error getting collections: {}".format(e)))
-
-
-def _check_conformance(client, conformance_class, response="fatal"):
-    """Check if the STAC API conforms to the given conformance class"""
-    if not client.conforms_to(conformance_class):
-        if response == "fatal":
-            gs.fatal(_(f"STAC API does not conform to {conformance_class}"))
-            return False
-        elif response == "warning":
-            gs.warning(_(f"STAC API does not conform to {conformance_class}"))
-            return True
-        elif response == "verbose":
-            gs.verbose(_(f"STAC API does not conform to {conformance_class}"))
-            return True
-        elif response == "info":
-            gs.info(_(f"STAC API does not conform to {conformance_class}"))
-            return True
-        elif response == "message":
-            gs.message(_(f"STAC API does not conform to {conformance_class}"))
-            return True
-
-
-def conform_to_collections(client):
-    """Check if the STAC API conforms to the Collections conformance class"""
-    return _check_conformance(client, ConformanceClasses.COLLECTIONS)
-
-
-def conform_to_item_search(client):
-    """Check if the STAC API conforms to the Item Search conformance class"""
-    return _check_conformance(client, ConformanceClasses.ITEM_SEARCH)
-
-
-def conform_to_filter(client):
-    """Check if the STAC API conforms to the Filter conformance class"""
-    return _check_conformance(client, ConformanceClasses.FILTER)
-
-
-def conform_to_query(client):
-    """Check if the STAC API conforms to the Query conformance class"""
-    return _check_conformance(client, ConformanceClasses.QUERY)
+def import_grass_raster(params):
+    assets, resample_method, extent, resolution, resolution_value, memory = params
+    sys.stdout.write(f"Downloading Asset: {assets}\n")
+    input_url = check_url_type(assets["href"])
+    sys.stdout.write(f"Import Url: {input_url}\n")
 
-
-def conform_to_sort(client):
-    """Check if the STAC API conforms to the Sort conformance class"""
-    return _check_conformance(client, ConformanceClasses.SORT)
-
-
-def conform_to_fields(client):
-    """Check if the STAC API conforms to the Fields conformance class"""
-    return _check_conformance(client, ConformanceClasses.FIELDS)
-
-
-def conform_to_core(client):
-    """Check if the STAC API conforms to the Core conformance class"""
-    return _check_conformance(client, ConformanceClasses.CORE)
-
-
-def conform_to_context(client):
-    """Check if the STAC API conforms to the Context conformance class"""
-    return _check_conformance(client, ConformanceClasses.CONTEXT)
+    try:
+        sys.stdout.write(f"Importing: {assets['file_name']}\n")
+        gs.parse_command(
+            "r.import",
+            input=input_url,
+            output=assets["file_name"],
+            resample=resample_method,
+            extent=extent,
+            resolution=resolution,
+            resolution_value=resolution_value,
+            title=assets["file_name"],
+            memory=memory,
+            quiet=True,
+        )
+    except CalledModuleError as e:
+        gs.fatal(_("Error importing raster: {}".format(e.stderr)))
+
+
+def download_assets(
+    assets,
+    resample_method,
+    resample_extent,
+    resolution,
+    resolution_value,
+    memory=300,
+    nprocs=1,
+):
+    """Downloads a list of images from the given URLs to the given filenames."""
+    number_of_assets = len(assets)
+    resample_extent_list = [resample_extent] * number_of_assets
+    resolution_list = [resolution] * number_of_assets
+    resolution_value_list = [resolution_value] * number_of_assets
+    resample_method_list = [resample_method] * number_of_assets
+    memory_list = [memory] * number_of_assets
+    max_cpus = os.cpu_count() - 1
+    if nprocs > max_cpus:
+        gs.warning(
+            _(
+                "Number of processes {nprocs} is greater than the number of CPUs {max_cpus}."
+            )
+        )
+        nprocs = max_cpus
+
+    def execute_import_grass_raster(pbar=None):
+        with ThreadPoolExecutor(max_workers=nprocs) as executor:
+            try:
+                for _a in executor.map(
+                    import_grass_raster,
+                    zip(
+                        assets,
+                        resample_method_list,
+                        resample_extent_list,
+                        resolution_list,
+                        resolution_value_list,
+                        memory_list,
+                    ),
+                ):
+                    if pbar:
+                        pbar.update(1)
+            except Exception as e:
+                gs.fatal(_("Error importing raster: {}".format(str(e))))
+
+    tqdm = _import_tqdm(False)
+    if tqdm is None:
+        gs.warning(_("tqdm module not found. Progress bar will not be displayed."))
+        execute_import_grass_raster()
+    else:
+        with tqdm(total=number_of_assets, desc="Downloading assets") as pbar:
+            execute_import_grass_raster(pbar)
diff --git a/src/temporal/t.stac/requirements.txt b/src/temporal/t.stac/requirements.txt
index 36c2bb3e7f..53013059fe 100644
--- a/src/temporal/t.stac/requirements.txt
+++ b/src/temporal/t.stac/requirements.txt
@@ -1,3 +1,3 @@
-pystac==1.10
-pystac_client==0.8
-tqdm==4.66
+pystac==1.10.1
+pystac_client==0.8.3
+tqdm==4.66.3
diff --git a/src/temporal/t.stac/t.stac.catalog/t.stac.catalog.html b/src/temporal/t.stac/t.stac.catalog/t.stac.catalog.html
index f58d0e9c60..0a5474c115 100644
--- a/src/temporal/t.stac/t.stac.catalog/t.stac.catalog.html
+++ b/src/temporal/t.stac/t.stac.catalog/t.stac.catalog.html
@@ -9,10 +9,7 @@ <h2>DESCRIPTION</h2>
 <h2>REQUIREMENTS</h2>
 
 <ul>
-    <li><a href="https://pystac.readthedocs.io/en/stable/installation.html">PySTAC (1.10.x)</a></li>
-    <li><a href="https://pystac-client.readthedocs.io/en/stable/">PySTAC_Client (0.8)</a></li>
-    <li>tqdm (4.66.x)</li>
-    <li>numpy (1.26.x)</li>
+    <li><a href="https://pystac-client.readthedocs.io/en/stable/">PySTAC_Client (0.8.3)</a></li>
 </ul>
 
 <h2>EXAMPLES</h2>
@@ -27,10 +24,9 @@ <h3>STAC Catalog JSON metadata</h3>
 GRASS Jupyter Notebooks can be used to visualize the catalog metadata.
 
 <pre><code class="code">
-    from grass import gs
-    catalog = gs.parse_command('t.stac.catalog', url="https://earth-search.aws.element84.com/v1/")
-
-    print(catalog)
+from grass import gs
+catalog = gs.parse_command("t.stac.catalog", url="https://earth-search.aws.element84.com/v1/", flags="p")
+print(catalog)
 
     # Output
     {'conformsTo': ['https://api.stacspec.org/v1.0.0/core',
@@ -56,76 +52,20 @@ <h3>STAC Catalog JSON metadata</h3>
 
 <h3>STAC Catalog plain text metadata</h3>
 <pre><code>
-t.stac.catalog url=https://earth-search.aws.element84.com/v1/ format=plain
-
-# Output
-    Client Id: earth-search-aws
-    Client Title: Earth Search by Element 84
-    Client Description: A STAC API of public datasets on AWS
-    Client STAC Extensions: []
-    Client Extra Fields: {'type': 'Catalog', 'conformsTo': ['https://api.stacspec.org/v1.0.0/core', 'https://api.stacspec.org/v1.0.0/collections', 'https://api.stacspec.org/v1.0.0/ogcapi-features', 'https://api.stacspec.org/v1.0.0/item-search', 'https://api.stacspec.org/v1.0.0/ogcapi-features#fields', 'https://api.stacspec.org/v1.0.0/ogcapi-features#sort', 'https://api.stacspec.org/v1.0.0/ogcapi-features#query', 'https://api.stacspec.org/v1.0.0/item-search#fields', 'https://api.stacspec.org/v1.0.0/item-search#sort', 'https://api.stacspec.org/v1.0.0/item-search#query', 'https://api.stacspec.org/v0.3.0/aggregation', 'http://www.opengis.net/spec/ogcapi-features-1/1.0/conf/core', 'http://www.opengis.net/spec/ogcapi-features-1/1.0/conf/oas30', 'http://www.opengis.net/spec/ogcapi-features-1/1.0/conf/geojson']}
-    Client catalog_type: ABSOLUTE_PUBLISHED
-    ---------------------------------------------------------------------------
-    Collections: 9
-    sentinel-2-pre-c1-l2a: Sentinel-2 Pre-Collection 1 Level-2A
-    Sentinel-2 Pre-Collection 1 Level-2A (baseline < 05.00), with data and metadata matching collection sentinel-2-c1-l2a
-    Extent: {'spatial': {'bbox': [[-180, -90, 180, 90]]}, 'temporal': {'interval': [['2015-06-27T10:25:31.456000Z', None]]}}
-    License: proprietary
-    ---------------------------------------------------------------------------
-    cop-dem-glo-30: Copernicus DEM GLO-30
-    The Copernicus DEM is a Digital Surface Model (DSM) which represents the surface of the Earth including buildings, infrastructure and vegetation. GLO-30 Public provides limited worldwide coverage at 30 meters because a small subset of tiles covering specific countries are not yet released to the public by the Copernicus Programme.
-    Extent: {'spatial': {'bbox': [[-180, -90, 180, 90]]}, 'temporal': {'interval': [['2021-04-22T00:00:00Z', '2021-04-22T00:00:00Z']]}}
-    License: proprietary
-    ---------------------------------------------------------------------------
-    naip: NAIP: National Agriculture Imagery Program
-    The [National Agriculture Imagery Program](https://www.fsa.usda.gov/programs-and-services/aerial-photography/imagery-programs/naip-imagery/) (NAIP) provides U.S.-wide, high-resolution aerial imagery, with four spectral bands (R, G, B, IR).  NAIP is administered by the [Aerial Field Photography Office](https://www.fsa.usda.gov/programs-and-services/aerial-photography/) (AFPO) within the [US Department of Agriculture](https://www.usda.gov/) (USDA).  Data are captured at least once every three years for each state.  This dataset represents NAIP data from 2010-present, in [cloud-optimized GeoTIFF](https://www.cogeo.org/) format.
-    Extent: {'spatial': {'bbox': [[-160, 17, -67, 50]]}, 'temporal': {'interval': [['2010-01-01T00:00:00Z', '2022-12-31T00:00:00Z']]}}
-    License: proprietary
-    ---------------------------------------------------------------------------
-    cop-dem-glo-90: Copernicus DEM GLO-90
-    The Copernicus DEM is a Digital Surface Model (DSM) which represents the surface of the Earth including buildings, infrastructure and vegetation. GLO-90 provides worldwide coverage at 90 meters.
-    Extent: {'spatial': {'bbox': [[-180, -90, 180, 90]]}, 'temporal': {'interval': [['2021-04-22T00:00:00Z', '2021-04-22T00:00:00Z']]}}
-    License: proprietary
-    ---------------------------------------------------------------------------
-    landsat-c2-l2: Landsat Collection 2 Level-2
-    Atmospherically corrected global Landsat Collection 2 Level-2 data from the Thematic Mapper (TM) onboard Landsat 4 and 5, the Enhanced Thematic Mapper Plus (ETM+) onboard Landsat 7, and the Operational Land Imager (OLI) and Thermal Infrared Sensor (TIRS) onboard Landsat 8 and 9.
-    Extent: {'spatial': {'bbox': [[-180, -90, 180, 90]]}, 'temporal': {'interval': [['1982-08-22T00:00:00Z', None]]}}
-    License: proprietary
-    ---------------------------------------------------------------------------
-    sentinel-2-l2a: Sentinel-2 Level-2A
-    Global Sentinel-2 data from the Multispectral Instrument (MSI) onboard Sentinel-2
-    Extent: {'spatial': {'bbox': [[-180, -90, 180, 90]]}, 'temporal': {'interval': [['2015-06-27T10:25:31.456000Z', None]]}}
-    License: proprietary
-    ---------------------------------------------------------------------------
-    sentinel-2-l1c: Sentinel-2 Level-1C
-    Global Sentinel-2 data from the Multispectral Instrument (MSI) onboard Sentinel-2
-    Extent: {'spatial': {'bbox': [[-180, -90, 180, 90]]}, 'temporal': {'interval': [['2015-06-27T10:25:31.456000Z', None]]}}
-    License: proprietary
-    ---------------------------------------------------------------------------
-    sentinel-2-c1-l2a: Sentinel-2 Collection 1 Level-2A
-    Sentinel-2 Collection 1 Level-2A, data from the Multispectral Instrument (MSI) onboard Sentinel-2
-    Extent: {'spatial': {'bbox': [[-180, -90, 180, 90]]}, 'temporal': {'interval': [['2015-06-27T10:25:31.456000Z', None]]}}
-    License: proprietary
-    ---------------------------------------------------------------------------
-    sentinel-1-grd: Sentinel-1 Level-1C Ground Range Detected (GRD)
-    Sentinel-1 is a pair of Synthetic Aperture Radar (SAR) imaging satellites launched in 2014 and 2016 by the European Space Agency (ESA). Their 6 day revisit cycle and ability to observe through clouds makes this dataset perfect for sea and land monitoring, emergency response due to environmental disasters, and economic applications. This dataset represents the global Sentinel-1 GRD archive, from beginning to the present, converted to cloud-optimized GeoTIFF format.
-    Extent: {'spatial': {'bbox': [[-180, -90, 180, 90]]}, 'temporal': {'interval': [['2014-10-10T00:28:21Z', None]]}}
-    License: proprietary
-    ---------------------------------------------------------------------------
-</code></pre>
+t.stac.catalog url=https://earth-search.aws.element84.com/v1/ format=plain -b
 
-<h3>Basic STAC catalog metadata</h3>
-<pre><code>
-    t.stac.catalog url=https://earth-search.aws.element84.com/v1/ format=plain -b
+---------------------------------------------------------------------------
+Catalog: Earth Search by Element 84
+---------------------------------------------------------------------------
 Client Id: earth-search-aws
-Client Title: Earth Search by Element 84
 Client Description: A STAC API of public datasets on AWS
 Client STAC Extensions: []
-Client Extra Fields: {'type': 'Catalog', 'conformsTo': ['https://api.stacspec.org/v1.0.0/core', 'https://api.stacspec.org/v1.0.0/collections', 'https://api.stacspec.org/v1.0.0/ogcapi-features', 'https://api.stacspec.org/v1.0.0/item-search', 'https://api.stacspec.org/v1.0.0/ogcapi-features#fields', 'https://api.stacspec.org/v1.0.0/ogcapi-features#sort', 'https://api.stacspec.org/v1.0.0/ogcapi-features#query', 'https://api.stacspec.org/v1.0.0/item-search#fields', 'https://api.stacspec.org/v1.0.0/item-search#sort', 'https://api.stacspec.org/v1.0.0/item-search#query', 'https://api.stacspec.org/v0.3.0/aggregation', 'http://www.opengis.net/spec/ogcapi-features-1/1.0/conf/core', 'http://www.opengis.net/spec/ogcapi-features-1/1.0/conf/oas30', 'http://www.opengis.net/spec/ogcapi-features-1/1.0/conf/geojson']}
 Client catalog_type: ABSOLUTE_PUBLISHED
 ---------------------------------------------------------------------------
 Collections: 9
 ---------------------------------------------------------------------------
+Collection Id | Collection Title
+---------------------------------------------------------------------------
 sentinel-2-pre-c1-l2a: Sentinel-2 Pre-Collection 1 Level-2A
 cop-dem-glo-30: Copernicus DEM GLO-30
 naip: NAIP: National Agriculture Imagery Program
@@ -135,7 +75,42 @@ <h3>Basic STAC catalog metadata</h3>
 sentinel-2-l1c: Sentinel-2 Level-1C
 sentinel-2-c1-l2a: Sentinel-2 Collection 1 Level-2A
 sentinel-1-grd: Sentinel-1 Level-1C Ground Range Detected (GRD)
+---------------------------------------------------------------------------
+</code></pre>
+
+<h3>Basic STAC catalog metadata</h3>
+<pre><code>
+t.stac.catalog url=https://earth-search.aws.element84.com/v1/ format=plain
 
+---------------------------------------------------------------------------
+Catalog: Earth Search by Element 84
+---------------------------------------------------------------------------
+Client Id: earth-search-aws
+Client Description: A STAC API of public datasets on AWS
+Client STAC Extensions: []
+Client catalog_type: ABSOLUTE_PUBLISHED
+---------------------------------------------------------------------------
+Collections: 9
+---------------------------------------------------------------------------
+Collection: Sentinel-2 Pre-Collection 1 Level-2A
+---------------------------------------------------------------------------
+Collection Id: sentinel-2-pre-c1-l2a
+Sentinel-2 Pre-Collection 1 Level-2A (baseline < 05.00), with data and metadata matching collection sentinel-2-c1-l2a
+Extent: {'spatial': {'bbox': [[-180, -90, 180, 90]]}, 'temporal': {'interval': [['2015-06-27T10:25:31.456000Z', None]]}}
+License: proprietary
+---------------------------------------------------------------------------
+---------------------------------------------------------------------------
+Collection: Copernicus DEM GLO-30
+---------------------------------------------------------------------------
+Collection Id: cop-dem-glo-30
+The Copernicus DEM is a Digital Surface Model (DSM) which represents the surface of the Earth including buildings, infrastructure and vegetation. GLO-30 Public provides limited worldwide coverage at 30 meters because a small subset of tiles covering specific countries are not yet released to the public by the Copernicus Programme.
+Extent: {'spatial': {'bbox': [[-180, -90, 180, 90]]}, 'temporal': {'interval': [['2021-04-22T00:00:00Z', '2021-04-22T00:00:00Z']]}}
+License: proprietary
+---------------------------------------------------------------------------
+...
+Extent: {'spatial': {'bbox': [[-180, -90, 180, 90]]}, 'temporal': {'interval': [['2014-10-10T00:28:21Z', None]]}}
+License: proprietary
+---------------------------------------------------------------------------
 </code></pre>
 
 <h2>AUTHENTICATION</h2>
@@ -143,8 +118,6 @@ <h2>AUTHENTICATION</h2>
 The <em>t.stac.catalog</em> tool supports authentication with the STAC API using the <em>GDAL's</em> virtual fie system <em>/vsi/</em>.
 
 
-
-
 <h3>Basic Authentication</h3>
 <pre><code>
     t.stac.catalog url="https://earth-search.aws.element84.com/v1/" settings="user:password"
diff --git a/src/temporal/t.stac/t.stac.catalog/t.stac.catalog.py b/src/temporal/t.stac/t.stac.catalog/t.stac.catalog.py
index 0c388d6d66..b69ad0304a 100644
--- a/src/temporal/t.stac/t.stac.catalog/t.stac.catalog.py
+++ b/src/temporal/t.stac/t.stac.catalog/t.stac.catalog.py
@@ -63,26 +63,44 @@
 # % description: Return basic information only
 # %end
 
+# %flag
+# % key: p
+# % description: Pretty print the JSON output
+# %end
+
 import sys
+import json
+from io import StringIO
+from contextlib import contextmanager
 from pprint import pprint
 import grass.script as gs
 from grass.pygrass.utils import get_lib_path
 
-# Import STAC Client
-from pystac_client import Client
-from pystac_client.exceptions import APIError
-import json
-
 
-path = get_lib_path(modname="t.stac", libname="staclib")
-if path is None:
-    gs.fatal("Not able to find the stac library directory.")
-sys.path.append(path)
+@contextmanager
+def add_sys_path(new_path):
+    """Add a path to sys.path and remove it when done"""
+    original_sys_path = sys.path[:]
+    sys.path.append(new_path)
+    try:
+        yield
+    finally:
+        sys.path = original_sys_path
 
 
 def main():
     """Main function"""
-    import staclib as libstac
+
+    # Import dependencies
+    path = get_lib_path(modname="t.stac", libname="staclib")
+    if path is None:
+        gs.fatal("Not able to find the stac library directory.")
+
+    with add_sys_path(path):
+        try:
+            import staclib as libstac
+        except ImportError as err:
+            gs.fatal(f"Unable to import staclib: {err}")
 
     # STAC Client options
     client_url = options["url"]  # required
@@ -90,54 +108,57 @@ def main():
 
     # Flag options
     basic_info = flags["b"]  # optional
+    pretty_print = flags["p"]  # optional
 
     # Set the request headers
     settings = options["settings"]
     req_headers = libstac.set_request_headers(settings)
 
     try:
-        client = Client.open(client_url, headers=req_headers)
-
-        # Check if the client conforms to the STAC Item Search
-        # This will exit the program if the client does not conform
-        libstac.conform_to_item_search(client)
+        stac_helper = libstac.STACHelper()
+        client = stac_helper.connect_to_stac(client_url, req_headers)
 
         if format == "plain":
-            gs.message(_(f"Client Id: {client.id}"))
-            gs.message(_(f"Client Title: {client.title}"))
-            gs.message(_(f"Client Description: {client.description}"))
-            gs.message(_(f"Client STAC Extensions: {client.stac_extensions}"))
-            gs.message(_(f"Client Extra Fields: {client.extra_fields}"))
-            gs.message(_(f"Client catalog_type: {client.catalog_type}"))
-            gs.message(_(f"{'-' * 75}\n"))
+            sys.stdout.write(f"{'-' * 75}\n")
+            sys.stdout.write(f"Catalog: {client.title}\n")
+            sys.stdout.write(f"{'-' * 75}\n")
+            sys.stdout.write(f"Client Id: {client.id}\n")
+            sys.stdout.write(f"Client Description: {client.description}\n")
+            sys.stdout.write(f"Client STAC Extensions: {client.stac_extensions}\n")
+            sys.stdout.write(f"Client catalog_type: {client.catalog_type}\n")
+            sys.stdout.write(f"{'-' * 75}\n")
 
             # Get all collections
-            collection_list = libstac.get_all_collections(client)
-            gs.message(_(f"Collections: {len(collection_list)}\n"))
-            gs.message(_(f"{'-' * 75}\n"))
+            collection_list = stac_helper.get_all_collections()
+            sys.stdout.write(f"Collections: {len(collection_list)}\n")
+            sys.stdout.write(f"{'-' * 75}\n")
 
             if basic_info:
+                sys.stdout.write("Collection Id | Collection Title\n")
+                sys.stdout.write(f"{'-' * 75}\n")
                 for i in collection_list:
-                    gs.message(_(f"{i.get('id')}: {i.get('title')}"))
-
-            if not basic_info:
+                    sys.stdout.write(f"{i.get('id')}: {i.get('title')}\n")
+                sys.stdout.write(f"{'-' * 75}\n")
+            else:
                 for i in collection_list:
-                    gs.message(_(f"{i.get('id')}: {i.get('title')}"))
-                    gs.message(_(f"{i.get('description')}"))
-                    gs.message(_(f"Extent: {i.get('extent')}"))
-                    gs.message(_(f"License: {i.get('license')}"))
-                    gs.message(_(f"{'-' * 75}\n"))
+                    sys.stdout.write(f"Collection: {i.get('title')}\n")
+                    sys.stdout.write(f"{'-' * 75}\n")
+                    sys.stdout.write(f"Collection Id: {i.get('id')}\n")
+                    sys.stdout.write(f"{i.get('description')}\n")
+                    sys.stdout.write(f"Extent: {i.get('extent')}\n")
+                    sys.stdout.write(f"License: {i.get('license')}\n")
+                    sys.stdout.write(f"{'-' * 75}\n")
                     libstac.print_list_attribute(
                         client.get_conforms_to(), "Conforms To:"
                     )
-                    gs.message(_(f"{'-' * 75}\n"))
+                    sys.stdout.write(f"{'-' * 75}\n")
                 return None
         else:
-            json_output = json.dumps(client.to_dict())
-            return json_output
+            client_dict = client.to_dict()
+            libstac.print_json_to_stdout(client_dict, pretty_print)
 
-    except APIError as e:
-        gs.fatal(_("APIError Error opening STAC API: {}".format(e)))
+    except Exception as e:
+        gs.fatal(_("Error: {}".format(e)))
 
 
 if __name__ == "__main__":
diff --git a/src/temporal/t.stac/t.stac.collection/t.stac.collection.py b/src/temporal/t.stac/t.stac.collection/t.stac.collection.py
index 70a38d8268..74000224f0 100644
--- a/src/temporal/t.stac/t.stac.collection/t.stac.collection.py
+++ b/src/temporal/t.stac/t.stac.collection/t.stac.collection.py
@@ -71,36 +71,43 @@
 # % description: Return basic information only
 # %end
 
+# %flag
+# % key: p
+# % description: Pretty print the JSON output
+# %end
+
 import sys
+import json
+from io import StringIO
 from pprint import pprint
+from contextlib import contextmanager
 import grass.script as gs
 from grass.pygrass.utils import get_lib_path
 
 
-from pystac_client import Client
-from pystac_client.exceptions import APIError
-from pystac_client.conformance import ConformanceClasses
-
-path = get_lib_path(modname="t.stac", libname="staclib")
-if path is None:
-    gs.fatal("Not able to find the stac library directory.")
-sys.path.append(path)
-
-
-def get_all_collections(client):
-    """Get a list of collections from STAC Client"""
+@contextmanager
+def add_sys_path(new_path):
+    """Add a path to sys.path and remove it when done"""
+    original_sys_path = sys.path[:]
+    sys.path.append(new_path)
     try:
-        collections = client.get_collections()
-        collection_list = list(collections)
-        return [i.to_dict() for i in collection_list]
-
-    except APIError as e:
-        gs.fatal(_("Error getting collections: {}".format(e)))
+        yield
+    finally:
+        sys.path = original_sys_path
 
 
 def main():
     """Main function"""
-    import staclib as libstac
+    # Import dependencies
+    path = get_lib_path(modname="t.stac", libname="staclib")
+    if path is None:
+        gs.fatal("Not able to find the stac library directory.")
+
+    with add_sys_path(path):
+        try:
+            import staclib as libstac
+        except ImportError as err:
+            gs.fatal(f"Unable to import staclib: {err}")
 
     # STAC Client options
     client_url = options["url"]  # required
@@ -112,41 +119,26 @@ def main():
 
     # Flag options
     basic_info = flags["b"]  # optional
+    pretty_print = flags["p"]  # optional
 
     # Set the request headers
     settings = options["settings"]
     req_headers = libstac.set_request_headers(settings)
 
-    try:
-        client = Client.open(client_url, headers=req_headers)
-    except APIError as e:
-        gs.fatal(_("APIError Error opening STAC API: {}".format(e)))
-
-    if libstac.conform_to_collections(client):
-        gs.verbose(_("Conforms to STAC Collections"))
+    # Connect to STAC API
+    stac_helper = libstac.STACHelper()
+    stac_helper.connect_to_stac(client_url, req_headers)
+    stac_helper.conforms_to_collections()
 
     if collection_id:
-        try:
-            collection = client.get_collection(collection_id)
-            collection_dict = collection.to_dict()
-            if format == "json":
-                gs.message(_(f"collection: {collection}"))
-                return collection_dict
-                # return pprint(collection.to_dict())
-            elif format == "plain":
-                if basic_info:
-                    return libstac.print_basic_collection_info(collection_dict)
-                return libstac.print_summary(collection_dict)
-
-        except APIError as e:
-            gs.fatal(_("APIError Error getting collection: {}".format(e)))
-
-    # Create metadata vector
-    # if vector_metadata:
-    #     gs.message(_(f"Outputting metadata to {vector_metadata}"))
-    #     libstac.create_metadata_vector(vector_metadata, collection_list)
-    #     gs.message(_(f"Metadata written to {vector_metadata}"))
-    #     return vector_metadata
+        collection_dict = stac_helper.get_collection(collection_id)
+
+        if format == "plain":
+            if basic_info:
+                return libstac.print_basic_collection_info(collection_dict)
+            return libstac.print_summary(collection_dict)
+        elif format == "json":
+            return libstac.print_json_to_stdout(collection_dict, pretty_print)
 
 
 if __name__ == "__main__":
diff --git a/src/temporal/t.stac/t.stac.html b/src/temporal/t.stac/t.stac.html
index 1fd15ea85f..127a3276f0 100644
--- a/src/temporal/t.stac/t.stac.html
+++ b/src/temporal/t.stac/t.stac.html
@@ -2,11 +2,11 @@ <h2>DESCRIPTION</h2>
 
 <p>
 The <em>t.stac</em> toolset allows the user to explore metadata and ingest SpatioTemporal Asset Catalog
-(STAC) items, collections, and catalogs. The toolset is based on the PySTAC library and provides a set of 
+(STAC) items, collections, and catalogs. The toolset is based on the PySTAC library and provides a set of
 modules for working with STAC APIs.
 
-<a href="https://stacspec.org/">STAC</a> is a specification for organizing geospatial information in a way 
-that is interoperable across software and data services. The 
+<a href="https://stacspec.org/">STAC</a> is a specification for organizing geospatial information in a way
+that is interoperable across software and data services. The
 <a href="https://github.com/stac-utils/pystac-client">pystac-client</a> is used to interact with STAC APIs.
 
 
@@ -27,7 +27,7 @@ <h2>REQUIREMENTS</h2>
 </ul>
 
 <p>
-After dependencies are fulfilled, the toolset can be installed using the 
+After dependencies are fulfilled, the toolset can be installed using the
 <em>g.extension</em> tool:
 <div class="code"><pre>
 g.extension extension=t.stac
diff --git a/src/temporal/t.stac/t.stac.item/t.stac.item.py b/src/temporal/t.stac/t.stac.item/t.stac.item.py
index 00fbdbc19a..aa06300f58 100644
--- a/src/temporal/t.stac/t.stac.item/t.stac.item.py
+++ b/src/temporal/t.stac/t.stac.item/t.stac.item.py
@@ -251,6 +251,11 @@
 # % description: Dowload and import assets
 # %end
 
+# %flag
+# % key: p
+# % description: Pretty print the JSON output
+# %end
+
 # %option G_OPT_M_NPROCS
 # %end
 
@@ -261,102 +266,21 @@
 import sys
 from pprint import pprint
 import json
-
-# from multiprocessing.pool import ThreadPool
-from pystac_client import Client
-from pystac_client.exceptions import APIError
-from pystac import MediaType
-from concurrent.futures import ThreadPoolExecutor
-from tqdm import tqdm
-import tempfile
-
+from io import StringIO
+from contextlib import contextmanager
 import grass.script as gs
 from grass.pygrass.utils import get_lib_path
-from grass.exceptions import CalledModuleError
-
-
-path = get_lib_path(modname="t.stac", libname="staclib")
-if path is None:
-    gs.fatal("Not able to find the stac library directory.")
-sys.path.append(path)
 
-import staclib as libstac
-
-
-def search_stac_api(client, **kwargs):
-    """Search the STAC API"""
-    if libstac.conform_to_item_search(client):
-        gs.verbose(_("STAC API Conforms to Item Search"))
-    try:
-        search = client.search(**kwargs)
-    except APIError as e:
-        gs.fatal(_("Error searching STAC API: {}".format(e)))
-    except NotImplementedError as e:
-        gs.fatal(_("Error searching STAC API: {}".format(e)))
-    except Exception as e:
-        gs.fatal(_("Error searching STAC API: {}".format(e)))
 
+@contextmanager
+def add_sys_path(new_path):
+    """Add a path to sys.path and remove it when done"""
+    original_sys_path = sys.path[:]
+    sys.path.append(new_path)
     try:
-        gs.message(_(f"Search Matched: {search.matched()} items"))
-        # These requests tend to be very slow
-        # gs.message(_(f"Pages: {len(list(search.pages()))}"))
-        # gs.message(_(f"Max items per page: {len(list(search.items()))}"))
-
-    except e:
-        gs.warning(_(f"No items found: {e}"))
-        return None
-
-    return search
-
-
-def collection_metadata(collection):
-    """Get collection"""
-
-    gs.message(_("*" * 80))
-    gs.message(_(f"Collection Id: {collection.id}"))
-
-    libstac.print_attribute(collection, "title", "Collection Title")
-    libstac.print_attribute(collection, "description", "Description")
-    gs.message(_(f"Spatial Extent: {collection.extent.spatial.bboxes}"))
-    gs.message(_(f"Temporal Extent: {collection.extent.temporal.intervals}"))
-
-    libstac.print_attribute(collection, "license")
-    libstac.print_attribute(collection, "keywords")
-    libstac.print_attribute(collection, "links")
-    libstac.print_attribute(collection, "providers")
-    libstac.print_attribute(collection, "stac_extensions", "Extensions")
-
-    try:
-        gs.message(_("\n# Summaries:"))
-        libstac.print_summary(collection.summaries.to_dict())
-    except AttributeError:
-        gs.info(_("Summaries not found."))
-
-    try:
-        gs.message(_("\n# Extra Fields:"))
-        libstac.print_summary(collection.extra_fields)
-    except AttributeError:
-        gs.info(_("# Extra Fields not found."))
-    gs.message(_("*" * 80))
-
-
-def report_stac_item(item):
-    """Print a report of the STAC item to the console."""
-    gs.message(_(f"Collection ID: {item.collection_id}"))
-    gs.message(_(f"Item: {item.id}"))
-    libstac.print_attribute(item, "geometry", "Geometry")
-    gs.message(_(f"Bbox: {item.bbox}"))
-
-    libstac.print_attribute(item, "datetime", "Datetime")
-    libstac.print_attribute(item, "start_datetime", "Start Datetime")
-    libstac.print_attribute(item, "end_datetime", "End Datetime")
-    gs.message(_("Extra Fields:"))
-    libstac.print_summary(item.extra_fields)
-
-    libstac.print_list_attribute(item.stac_extensions, "Extensions:")
-    # libstac.print_attribute(item, "stac_extensions", "Extensions")
-    gs.message(_("Properties:"))
-    libstac.print_summary(item.properties)
+        yield
+    finally:
+        sys.path = original_sys_path
 
 
 def collect_item_assets(item, assset_keys, asset_roles):
@@ -381,90 +305,18 @@ def collect_item_assets(item, assset_keys, asset_roles):
         return asset_dict
 
 
-def report_plain_asset_summary(asset):
-    gs.message(_("\nAsset"))
-    gs.message(_(f"Asset Item Id: {asset.get('item_id')}"))
-
-    gs.message(_(f"Asset Title: {asset.get('title')}"))
-    gs.message(_(f"Asset Filename: {asset.get('file_name')}"))
-    gs.message(_(f"Raster bands: {asset.get('raster:bands')}"))
-    gs.message(_(f"Raster bands: {asset.get('eo:bands')}"))
-    gs.message(_(f"Asset Description: {asset.get('description')}"))
-    gs.message(_(f"Asset Media Type: { MediaType(asset.get('type')).name}"))
-    gs.message(_(f"Asset Roles: {asset.get('roles')}"))
-    gs.message(_(f"Asset Href: {asset.get('href')}"))
-
-
-def import_grass_raster(params):
-    assets, resample_method, extent, resolution, resolution_value, memory = params
-    gs.message(_(f"Downloading Asset: {assets}"))
-    input_url = libstac.check_url_type(assets["href"])
-    gs.message(_(f"Import Url: {input_url}"))
-
-    try:
-        gs.message(_(f"Importing: {assets['file_name']}"))
-        gs.parse_command(
-            "r.import",
-            input=input_url,
-            output=assets["file_name"],
-            resample=resample_method,
-            extent=extent,
-            resolution=resolution,
-            resolution_value=resolution_value,
-            title=assets["file_name"],
-            memory=memory,
-            quiet=True,
-        )
-    except CalledModuleError as e:
-        gs.fatal(_("Error importing raster: {}".format(e.stderr)))
-
-
-def download_assets(
-    assets,
-    resample_method,
-    resample_extent,
-    resolution,
-    resolution_value,
-    memory=300,
-    nprocs=1,
-):
-    """Downloads a list of images from the given URLs to the given filenames."""
-    number_of_assets = len(assets)
-    resample_extent_list = [resample_extent] * number_of_assets
-    resolution_list = [resolution] * number_of_assets
-    resolution_value_list = [resolution_value] * number_of_assets
-    resample_method_list = [resample_method] * number_of_assets
-    memory_list = [memory] * number_of_assets
-    max_cpus = os.cpu_count() - 1
-    if nprocs > max_cpus:
-        gs.warning(
-            _(
-                "Number of processes {nprocs} is greater than the number of CPUs {max_cpus}."
-            )
-        )
-        nprocs = max_cpus
-
-    with tqdm(total=number_of_assets, desc="Downloading assets") as pbar:
-        with ThreadPoolExecutor(max_workers=nprocs) as executor:
-            try:
-                for _a in executor.map(
-                    import_grass_raster,
-                    zip(
-                        assets,
-                        resample_method_list,
-                        resample_extent_list,
-                        resolution_list,
-                        resolution_value_list,
-                        memory_list,
-                    ),
-                ):
-                    pbar.update(1)
-            except Exception as e:
-                gs.fatal(_("Error importing raster: {}".format(str(e))))
-
-
 def main():
     """Main function"""
+    # Import dependencies
+    path = get_lib_path(modname="t.stac", libname="staclib")
+    if path is None:
+        gs.fatal("Not able to find the stac library directory.")
+
+    with add_sys_path(path):
+        try:
+            import staclib as libstac
+        except ImportError as err:
+            gs.fatal(f"Unable to import staclib: {err}")
 
     # STAC Client options
     client_url = options["url"]  # required
@@ -494,6 +346,7 @@ def main():
     item_metadata = flags["i"]
     asset_metadata = flags["a"]
     download = flags["d"]
+    pretty_print = flags["p"]  # optional
 
     # Output options
     strds_output = options["strds_output"]  # optional
@@ -512,32 +365,22 @@ def main():
     search_params = {}  # Store STAC API search parameters
     collection_items_assets = []
 
-    try:
-
-        # Set the request headers
-        settings = options["settings"]
-        req_headers = libstac.set_request_headers(settings)
-
-        client = Client.open(client_url, headers=req_headers)
-    except APIError as e:
-        gs.fatal(_("APIError Error opening STAC API: {}".format(e)))
+    # Set the request headers
+    settings = options["settings"]
+    req_headers = libstac.set_request_headers(settings)
 
-    try:
-        collection = client.get_collection(collection_id)
-    except APIError as e:
-        gs.fatal(_(f"Error getting collection {collection_id}: {e}"))
+    # Connect to STAC API
+    stac_helper = libstac.STACHelper()
+    stac_helper.connect_to_stac(client_url, req_headers)
+    collection = stac_helper.get_collection(collection_id)
 
     if summary_metadata:
         if format == "plain":
-            return collection_metadata(collection)
+            return libstac.collection_metadata(collection)
         elif format == "json":
-            return pprint(collection.to_dict())
-        else:
-            # Return plain text by default
-            return collection_metadata(collection)
+            return libstac.print_json_to_stdout(collection, pretty_print)
 
     # Start item search
-
     if intersects:
         # Convert the vector to a geojson
         output_geojson = "tmp_stac_intersects.geojson"
@@ -556,7 +399,7 @@ def main():
 
     # Set the bbox to the current region if the user did not specify the bbox or intersects option
     if not bbox and not intersects:
-        gs.message(_("Setting bbox to current region: {}".format(bbox)))
+        gs.verbose(_("Setting bbox to current region: {}".format(bbox)))
         bbox = libstac.region_to_wgs84_decimal_degrees_bbox()
 
     if datetime:
@@ -573,8 +416,6 @@ def main():
     if filter_lang:
         search_params["filter_lang"] = filter_lang
 
-    if libstac.conform_to_query(client):
-        gs.verbose(_("STAC API Conforms to Item Search Query"))
     if query:
         if isinstance(query, str):
             query = json.loads(query)
@@ -591,7 +432,8 @@ def main():
     search_params["bbox"] = bbox
 
     # Search the STAC API
-    items_search = search_stac_api(client=client, **search_params)
+    items_search = stac_helper.search_api(**search_params)
+
     # Create vector layer of items metadata
     if items_vector:
         libstac.create_vector_from_feature_collection(
@@ -604,12 +446,14 @@ def main():
     # Report item metadata
     if item_metadata:
         if format == "plain":
+            gs.message(_("bbox: {}\n".format(bbox)))
             gs.message(_(f"Items Found: {len(list(items))}"))
             for item in items:
-                report_stac_item(item)
+                stac_helper.report_stac_item(item)
             return None
         if format == "json":
-            return pprint([item.to_dict() for item in items])
+            item_list = [item.to_dict() for item in items]
+            return libstac.print_json_to_stdout(item_list, pretty_print)
 
     for item in items:
         asset = collect_item_assets(item, asset_keys, asset_roles=item_roles)
@@ -620,17 +464,20 @@ def main():
         strds_output = os.path.abspath(strds_output)
         libstac.register_strds_from_items(collection_items_assets, strds_output)
 
-    gs.message(_(f"{len(collection_items_assets)} Assets Ready for download..."))
     if asset_metadata:
-        for asset in collection_items_assets:
-            if format == "plain":
-                report_plain_asset_summary(asset)
-            if format == "json":
-                pprint(asset)
+        if format == "plain":
+            gs.message(
+                _(f"{len(collection_items_assets)} Assets Ready for download...")
+            )
+            for asset in collection_items_assets:
+                libstac.report_plain_asset_summary(asset)
+
+        if format == "json":
+            return libstac.print_json_to_stdout(collection_items_assets, pretty_print)
 
     if download:
         # Download and Import assets
-        download_assets(
+        libstac.download_assets(
             assets=collection_items_assets,
             resample_method=method,
             resample_extent=extent,