Skip to content

Commit

Permalink
splitmap
Browse files Browse the repository at this point in the history
  • Loading branch information
cboettig committed Dec 19, 2023
1 parent d44b7a8 commit 29b5892
Show file tree
Hide file tree
Showing 4 changed files with 147 additions and 123 deletions.
2 changes: 1 addition & 1 deletion Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ COPY /pages ./pages
ENV PROJ_LIB='/opt/conda/share/proj'

USER root
RUN apt-get update && apt-get -y install git
RUN apt-get update && apt-get -y install git git-lfs
RUN chown -R ${NB_UID} ${HOME}
USER ${NB_USER}

Expand Down
89 changes: 52 additions & 37 deletions fire.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,43 @@
import dask.distributed
import matplotlib.pyplot as plt
import rioxarray
from datetime import datetime, timedelta


def stac_search(box, datetime):
# STAC Search for this imagery in space/time window
items = (
pystac_client.Client.
open("https://planetarycomputer.microsoft.com/api/stac/v1",
modifier=planetary_computer.sign_inplace).
search(collections=["sentinel-2-l2a"],
bbox=box,
datetime=datetime,
query={"eo:cloud_cover": {"lt": 10}}).
item_collection())
return items

def compute_nbs(items, box):
# Time to compute:
client = dask.distributed.Client()
# landsat_bands = ["nir08", "swir16"]
sentinel_bands = ["B08", "B12", "SCL"] # NIR, SWIR, and Cloud Mask

# The magic of gdalwarper. Can also resample, reproject, and aggregate on the fly
data = odc.stac.load(items,
bands=sentinel_bands,
bbox=box
)
# Compute the Normalized Burn Ratio, must be float
swir = data["B12"].astype("float")
nir = data["B08"].astype("float")
# can resample and aggregate in xarray. compute with dask
nbs = (((nir - swir) / (nir + swir)).
# resample(time="MS").
# median("time", keep_attrs=True).
compute()
)
return nbs


nps = gpd.read_file("/vsicurl/https://huggingface.co/datasets/cboettig/biodiversity/resolve/main/data/NPS.gdb")
Expand All @@ -15,47 +52,25 @@

# extract and reproject the Joshua Tree NP Polygon
jtree = nps[nps.PARKNAME == "Joshua Tree"].to_crs(calfire.crs)

# All Fires in the DB that intersect the Park
jtree_fires = jtree.overlay(calfire, how="intersection")

# Extract a polygon if interest. > 2015 for Sentinel, otherwise we can use LandSat
recent = jtree_fires[jtree_fires.YEAR_ > "2015"]
big = recent[recent.Shape_Area == recent.Shape_Area.max()].to_crs("EPSG:4326")
datetime = big.ALARM_DATE.item() + "/" + big.CONT_DATE.item()

# Get bounding box + dates before & after fire for STAC search
box = big.buffer(0.005).bounds.to_numpy()[0] # Fire bbox + buffer
#box = jtree.to_crs("EPSG:4326").bounds.to_numpy()[0] # Park bbox

# STAC Search for this imagery in space/time window
items = (
pystac_client.Client.
open("https://planetarycomputer.microsoft.com/api/stac/v1",
modifier=planetary_computer.sign_inplace).
search(collections=["sentinel-2-l2a"],
bbox=box,
datetime=datetime,
query={"eo:cloud_cover": {"lt": 10}}).
item_collection())


# Time to compute:
client = dask.distributed.Client()
# landsat_bands = ["nir08", "swir16"]
sentinel_bands = ["B08", "B12", "SCL"]

# The magic of gdalwarper. Can also resample, reproject, and aggregate on the fly
data = odc.stac.load(items,
bands=sentinel_bands,
bbox=box
)

swir = data["B12"].astype("float")
nir = data["B08"].astype("float")

# can resample and aggregate in xarray. compute with dask
nbs = (((nir - swir) / (nir + swir)).
# resample(time="MS").
# median("time", keep_attrs=True).
compute()
)

nbs.rio.to_raster(raster_path="nbs.tif", driver="COG")
alarm_date = datetime.strptime(big.ALARM_DATE.item(), "%Y-%m-%dT%H:%M:%S+00:00")
before_date = alarm_date - timedelta(days=14)
after_date = alarm_date + timedelta(days=14)
search_dates = before_date.strftime("%Y-%m-%d") + "/" + after_date.strftime("%Y-%m-%d")

# here we go!
items = stac_search(box, search_dates)
nbs = compute_nbs(items, box)

# write first and last date to tif
nbs.isel(time=0).rio.to_raster(raster_path="before.tif", driver="COG")
nbs.isel(time=(nbs.time.size-1)).rio.to_raster(raster_path="after.tif", driver="COG")
35 changes: 3 additions & 32 deletions pages/01_leafmap.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,45 +23,16 @@
datetime = big.ALARM_DATE.item() + "/" + big.CONT_DATE.item()
box = big.buffer(0.01).bounds.to_numpy()[0] # Fire bbox + buffer #box = jtree.to_crs("EPSG:4326").bounds.to_numpy()[0] # Park bbox

items = ( # STAC Search for this imagery in space/time window
pystac_client.Client.
open("https://planetarycomputer.microsoft.com/api/stac/v1",
modifier=planetary_computer.sign_inplace).
search(collections=["sentinel-2-l2a"],
bbox=box,
datetime=datetime,
query={"eo:cloud_cover": {"lt": 10}}).
item_collection())

# Time to compute:
client = dask.distributed.Client()
sentinel_bands = ["B08", "B12", "SCL"]
# The magic of gdalwarper. Can also resample, reproject, and aggregate on the fly
data = odc.stac.load(items, bands=sentinel_bands, bbox=box)
swir = data["B12"].astype("float")
nir = data["B08"].astype("float")
# can resample and aggregate in xarray. compute with dask
nbs = (((nir - swir) / (nir + swir)).
# resample(time="MS").
# median("time", keep_attrs=True).
compute()
)

import tempfile
import os
temp_dir = tempfile.gettempdir()
nbs_file = os.path.join(temp_dir, "random_filename.tif")
nbs.rio.to_raster(raster_path=nbs_file, driver="COG")

nbs_url = "/vsicurl/https://huggingface.co/datasets/cboettig/solara-data/resolve/main/nbs.tif"
before_url = "/vsicurl/https://huggingface.co/datasets/cboettig/solara-data/resolve/main/before.tif"
after_url = "/vsicurl/https://huggingface.co/datasets/cboettig/solara-data/resolve/main/after.tif"


class Map(leafmap.Map):
def __init__(self, **kwargs):
super().__init__(**kwargs)
# Add what you want below
self.add_gdf(jtree_fires)
self.add_cog_layer(nbs_url)
self.add_split_map(before_url, after_url)


@solara.component
Expand Down
144 changes: 91 additions & 53 deletions solara-test.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
"cells": [
{
"cell_type": "code",
"execution_count": 3,
"execution_count": 2,
"metadata": {},
"outputs": [],
"source": [
Expand All @@ -20,7 +20,7 @@
},
{
"cell_type": "code",
"execution_count": 4,
"execution_count": 3,
"metadata": {},
"outputs": [],
"source": [
Expand All @@ -36,7 +36,7 @@
},
{
"cell_type": "code",
"execution_count": 6,
"execution_count": 4,
"metadata": {},
"outputs": [],
"source": [
Expand All @@ -53,69 +53,107 @@
"# Extract a polygon if interest. > 2015 for Sentinel, otherwise we can use LandSat\n",
"recent = jtree_fires[jtree_fires.YEAR_ > \"2015\"]\n",
"big = recent[recent.Shape_Area == recent.Shape_Area.max()].to_crs(\"EPSG:4326\")\n",
"datetime = big.ALARM_DATE.item() + \"/\" + big.CONT_DATE.item()\n",
"box = big.buffer(0.005).bounds.to_numpy()[0] # Fire bbox + buffer\n",
"#box = jtree.to_crs(\"EPSG:4326\").bounds.to_numpy()[0] # Park bbox\n",
"\n",
"# STAC Search for this imagery in space/time window\n",
"items = (\n",
" pystac_client.Client.\n",
" open(\"https://planetarycomputer.microsoft.com/api/stac/v1\",\n",
" modifier=planetary_computer.sign_inplace).\n",
" search(collections=[\"sentinel-2-l2a\"],\n",
" bbox=box,\n",
" datetime=datetime,\n",
" query={\"eo:cloud_cover\": {\"lt\": 10}}).\n",
" item_collection())\n",
"\n",
"\n",
"# Time to compute:\n",
"\n",
"client = dask.distributed.Client()\n",
"# landsat_bands = [\"nir08\", \"swir16\"]\n",
"sentinel_bands = [\"B08\", \"B12\", \"SCL\"]\n",
"\n",
"# The magic of gdalwarper. Can also resample, reproject, and aggregate on the fly\n",
"data = odc.stac.load(items,\n",
" bands=sentinel_bands,\n",
" bbox=box\n",
")\n",
"\n",
"swir = data[\"B12\"].astype(\"float\")\n",
"nir = data[\"B08\"].astype(\"float\")\n",
"\n",
"# can resample and aggregate in xarray. compute with dask\n",
"nbs = (((nir - swir) / (nir + swir)).\n",
" # resample(time=\"MS\").\n",
" # median(\"time\", keep_attrs=True).\n",
" compute()\n",
")\n",
"\n",
"\n",
"import tempfile\n",
"import os\n",
"temp_dir = tempfile.gettempdir()\n",
"nbs_file = os.path.join(temp_dir, \"random_filename.tif\")\n",
"nbs.rio.to_raster(raster_path=nbs_file, driver=\"COG\")\n"
"box = big.buffer(0.01).bounds.to_numpy()[0] # Fire bbox + buffer\n",
"#box = jtree.to_crs(\"EPSG:4326\").bounds.to_numpy()[0] # Park bbox\n"
]
},
{
"cell_type": "code",
"execution_count": 7,
"execution_count": 5,
"metadata": {},
"outputs": [],
"source": [
"from datetime import datetime, timedelta\n",
"alarm_date = datetime.strptime(big.ALARM_DATE.item(), \"%Y-%m-%dT%H:%M:%S+00:00\") \n",
"before_date = alarm_date - timedelta(days=14)\n",
"after_date = alarm_date + timedelta(days=14)"
]
},
{
"cell_type": "code",
"execution_count": 6,
"metadata": {},
"outputs": [],
"source": [
"search_dates = big.ALARM_DATE.item() + \"/\" + big.CONT_DATE.item()\n",
"search_dates = before_date.strftime(\"%Y-%m-%d\") + \"/\" + after_date.strftime(\"%Y-%m-%d\")"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"\n",
"def stac_search(box, datetime): \n",
" # STAC Search for this imagery in space/time window\n",
" items = (\n",
" pystac_client.Client.\n",
" open(\"https://planetarycomputer.microsoft.com/api/stac/v1\",\n",
" modifier=planetary_computer.sign_inplace).\n",
" search(collections=[\"sentinel-2-l2a\"],\n",
" bbox=box,\n",
" datetime=datetime,\n",
" query={\"eo:cloud_cover\": {\"lt\": 10}}).\n",
" item_collection())\n",
" return items\n",
"\n",
"def compute_nbs(items, box):\n",
" # Time to compute:\n",
" client = dask.distributed.Client()\n",
" # landsat_bands = [\"nir08\", \"swir16\"]\n",
" sentinel_bands = [\"B08\", \"B12\", \"SCL\"] # NIR, SWIR, and Cloud Mask\n",
"\n",
" # The magic of gdalwarper. Can also resample, reproject, and aggregate on the fly\n",
" data = odc.stac.load(items,\n",
" bands=sentinel_bands,\n",
" bbox=box\n",
" )\n",
" # Compute the Normalized Burn Ratio, must be float\n",
" swir = data[\"B12\"].astype(\"float\")\n",
" nir = data[\"B08\"].astype(\"float\")\n",
" # can resample and aggregate in xarray. compute with dask\n",
" nbs = (((nir - swir) / (nir + swir)).\n",
" # resample(time=\"MS\").\n",
" # median(\"time\", keep_attrs=True).\n",
" compute()\n",
" )\n",
" return nbs\n",
"\n",
"items = stac_search(box, search_dates)\n",
"nbs = compute_nbs(items, box)\n",
"\n"
]
},
{
"cell_type": "code",
"execution_count": 11,
"metadata": {},
"outputs": [],
"source": [
"\n",
"nbs.isel(time=0).rio.to_raster(raster_path=\"before.tif\", driver=\"COG\")\n",
"nbs.isel(time=(nbs.time.size-1)).rio.to_raster(raster_path=\"after.tif\", driver=\"COG\")\n"
]
},
{
"cell_type": "code",
"execution_count": 14,
"metadata": {},
"outputs": [],
"source": [
"class Map(leafmap.Map):\n",
" def __init__(self, **kwargs):\n",
" super().__init__(**kwargs)\n",
" # Add what you want below\n",
" # self.add_gdf(jtree, layer_name = \"Joshua Tree NP\")\n",
" # self.add_gdf(jtree_fires)\n",
" self.add_gdf(big, later_name = big.FIRE_NAME.item())\n",
" self.add_raster(nbs_file)\n",
" self.add_stac_gui()\n",
" #self.add_raster(\"before.tif\", layer_name = \"before\", colormap=\"viridis\")\n",
" #self.add_raster(\"after.tif\", layer_name = \"after\", colormap=\"viridis\")\n",
" self.split_map(\"before.tif\", \"after.tif\")\n",
" #self.add_stac_gui()\n",
"\n",
"\n",
"@solara.component\n",
Expand All @@ -141,13 +179,13 @@
},
{
"cell_type": "code",
"execution_count": 8,
"execution_count": 15,
"metadata": {},
"outputs": [
{
"data": {
"application/vnd.jupyter.widget-view+json": {
"model_id": "887d65f2a633403a8132bdd1c7ce49b7",
"model_id": "b0f148fc3c664528a23eeec53cec6a03",
"version_major": 2,
"version_minor": 0
},
Expand Down Expand Up @@ -183,7 +221,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.10.12"
"version": "3.11.6"
}
},
"nbformat": 4,
Expand Down

0 comments on commit 29b5892

Please sign in to comment.