Skip to content

Commit

Permalink
Merge pull request #1648 from openforis/development
Browse files Browse the repository at this point in the history
Merge development into main Production 2023-07-14
  • Loading branch information
a-luz authored Jul 14, 2023
2 parents 9b34a05 + ee434c8 commit 585381a
Show file tree
Hide file tree
Showing 12 changed files with 212 additions and 44 deletions.
2 changes: 1 addition & 1 deletion deps.edn
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
com.draines/postal {:mvn/version "2.0.3"}
com.cognitect/transit-clj {:mvn/version "1.0.324"}
hiccup/hiccup {:mvn/version "2.0.0-alpha2"}
org.clojure/clojure {:mvn/version "1.10.1"}
org.clojure/clojure {:mvn/version "1.11.1"}
org.clojure/core.async {:mvn/version "1.3.610"}
org.clojure/data.json {:mvn/version "1.0.0"}
org.clojure/tools.cli {:mvn/version "1.0.194"}
Expand Down
132 changes: 102 additions & 30 deletions src/clj/collect_earth_online/db/doi.clj
Original file line number Diff line number Diff line change
@@ -1,26 +1,33 @@
(ns collect-earth-online.db.doi
(:require [clj-http.client :as http]
[clojure.java.io :as io]
[triangulum.database :refer [call-sql
sql-primitive]]
[triangulum.config :refer [get-config]]
[triangulum.type-conversion :as tc]
[collect-earth-online.generators.external-file :refer [create-shape-files]])
(:import java.time.format.DateTimeFormatter
java.time.LocalDateTime))
(:require
[clj-http.client :as http]
[clojure.data.json :as json]
[clojure.java.io :as io]
[collect-earth-online.generators.external-file :refer [create-and-zip-files-for-doi]]
[collect-earth-online.views :refer [data-response]]
[triangulum.config :refer [get-config]]
[triangulum.database :refer [call-sql]]
[triangulum.type-conversion :as tc])
(:import
java.time.LocalDateTime
java.time.format.DateTimeFormatter))


(def base-url (:url (get-config :zenodo)))


(defn req-headers
[]
(let [auth-token (:api-key (get-config :zenodo))]
{"Authorization" (str "Bearer " auth-token)}))


(defn get-zenodo-deposition
[deposition-id]
(let [headers (req-headers)]
(http/get (str base-url "/deposit/depositions/" deposition-id) {:headers headers :as :json})))


(defn create-contributors-list
[users institution-name]
(map (fn [user]
Expand All @@ -29,6 +36,7 @@
:affiliation institution-name})
users))


(defn create-zenodo-deposition!
[institution-name
project-name
Expand All @@ -50,26 +58,16 @@
:as :json
:headers headers})))

(defn upload-deposition-file!
[bucket-url project-id table-name]
(let [headers (req-headers)
shape-file (create-shape-files table-name project-id)]
(http/put (str bucket-url "/" table-name "-" project-id ".zip")
{:content-type :multipart/form-data
:headers headers
:multipart [{:name "Content/type" :content "application/zip"}
{:name "file" :content (io/file shape-file)}]})))

(defn insert-doi!
[{:keys [id metadata] :as doi-info}
project-id user-id]
(let [doi-path (-> metadata :prereserve_doi :doi)]
(-> "insert_doi"
(call-sql id project-id user-id doi-path doi-info)
first
sql-primitive)))
(first
(call-sql "insert_doi" id project-id user-id doi-path (tc/clj->jsonb doi-info)))))


(defn create-doi
(defn create-doi!
[{:keys [params]}]
(let [user-id (:userId params -1)
project-id (tc/val->int (:projectId params))
Expand All @@ -79,14 +77,88 @@
creator (first (call-sql "get_user_by_id" user-id))
contributors (call-sql "select_assigned_users_by_project" project-id)]
(->
(create-zenodo-deposition! institution-name project-name creator contributors description)
:body
(insert-doi! project-id user-id))))
(create-zenodo-deposition! institution-name project-name creator contributors description)
:body
(insert-doi! project-id user-id)
data-response)))


(defn build-survey-data
[plot-data]
(reduce (fn [acc surv]
(let [edn-sample-data (tc/jsonb->clj (:samples surv))]
(conj acc
{:email (:email surv)
:analysis_duration (:analysis_duration surv)
:collection_time (-> surv :collection_time str)
:flagged (:flagged surv)
:flagged_reason (:flagged_reason surv)
:confidence (:confidence surv)
:answers (group-by (fn [s] (-> s :id)) edn-sample-data)})))
[] plot-data))


(defn merge-plot-data
[plot-data]
(let [merged-plot-data (apply merge plot-data)
survey-data (build-survey-data plot-data)]
(-> merged-plot-data
(dissoc :samples)
(dissoc :email)
(dissoc :analysis_duration)
(dissoc :collection_time)
(dissoc :flagged)
(dissoc :flagged_reason)
(dissoc :confidence)
(assoc :survey survey-data))))


(defn group-plot-data
[plot-data]
(->> plot-data
(group-by (fn [s] (-> s :plotid str keyword)))
(map (fn [[key value]]
[key (merge-plot-data value)]))
(into {})))


(defn get-project-data
[project-id]
(let [project-data (first (call-sql "select_project_info_for_doi" project-id))
plot-data (call-sql "dump_project_plot_data" project-id)]
(-> project-data
(assoc :plot-info (group-plot-data plot-data))
(update :survey_rules #(tc/jsonb->clj %))
(update :survey_questions #(tc/jsonb->clj %))
(update :aoi_features #(tc/jsonb->clj %))
(update :created_date #(str %))
(update :published_date #(str %)))))


(defn upload-deposition-files!
[bucket-url project-id zip-file]
(let [headers (req-headers)]
(http/put (str bucket-url "/" project-id "-files" ".zip")
{:content-type :multipart/form-data
:headers headers
:multipart [{:name "Content/type" :content "application/zip"}
{:name "file" :content (io/file zip-file)}]})))


(defn upload-doi-files!
[{:keys [params]}]
(let [project-id (:projectId params)
doi (first (call-sql "select_doi_by_project" project-id))
bucket (-> doi :full_data tc/jsonb->clj :links :bucket)
project-data (json/write-str (get-project-data project-id))
zip-file (create-and-zip-files-for-doi project-id project-data)]
(upload-deposition-files! bucket project-id zip-file)
(data-response {})))


(defn download-doi-files
[{:keys [params]}]
(let [project-id (:projectId params)
doi (first (call-sql "select_doi_by_project" project-id))
bucket (-> doi :links :bucket)
options (:options params)]
(upload-deposition-file! bucket project-id options)))
doi (first (call-sql "select_doi_by_project" project-id))
bucket (-> doi :full_data tc/jsonb->clj :links :bucket)]
(data-response (str bucket "/" project-id "-files.zip"))))
10 changes: 4 additions & 6 deletions src/clj/collect_earth_online/db/plots.clj
Original file line number Diff line number Diff line change
Expand Up @@ -240,17 +240,15 @@
plots))
sorted-plots))]
(if plots-info
(do
(try
(unlock-plots user-id)
;; TODO, CEO-90 Technically there is a race condition here. We need a lock function
;; that returns truthy/falsy if it correctly created a unique lock.
;; The quickest way to finish this is to return a "race condition error."
;; If we get users complaining we can try a recursive find.
(call-sql "lock_plot"
(:plot_id (first plots-info))
user-id
(time-plus-five-min))
(data-response (map #(build-collection-plot % user-id review-mode?) plots-info)))
(data-response (map #(build-collection-plot % user-id review-mode?) plots-info))
(catch Exception _e
(data-response "Unable to get the requested plot. Please try again.")))
(data-response "not-found"))))

;;;
Expand Down
17 changes: 17 additions & 0 deletions src/clj/collect_earth_online/generators/external_file.clj
Original file line number Diff line number Diff line change
Expand Up @@ -271,6 +271,23 @@
(sh-wrapper folder-name {} (str "rm -rf " shape-folder-name) (str "mkdir " shape-folder-name))
(export-table-to-file shape-folder-name project-id table-name db-config)))


(defn create-data-file
[folder-name json-data]
(spit (str folder-name "data.json")
json-data))

(defn create-and-zip-files-for-doi
[project-id project-data]
(let [folder-name (str tmp-dir "/ceo-tmp-" project-id "-files/")]
(sh-wrapper tmp-dir {} (str "rm -rf " folder-name) (str "mkdir " folder-name))
(create-shape-files folder-name "plot" project-id)
(create-shape-files folder-name "sample" project-id)
(create-data-file folder-name project-data)
(sh-wrapper tmp-dir {}
(str "7z a " folder-name "/files" ".zip " folder-name "/*"))
(str folder-name "files.zip")))

(defn zip-shape-files
[project-id]
(let [folder-name (str tmp-dir "/ceo-tmp-" project-id "-files/")]
Expand Down
13 changes: 13 additions & 0 deletions src/clj/collect_earth_online/routing.clj
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
[collect-earth-online.db.plots :as plots]
[collect-earth-online.db.projects :as projects]
[collect-earth-online.db.users :as users]
[collect-earth-online.db.doi :as doi]
[collect-earth-online.proxy :as proxy]))

(def routes
Expand Down Expand Up @@ -107,6 +108,17 @@
[:get "/create-shape-files"] {:handler projects/create-shape-files!
:auth-type :user
:auth-action :block}

;; DOI API
[:post "/create-doi"] {:handler doi/create-doi!
:auth-type :admin
:auth-action :block}
[:post "/upload-doi-files"] {:hander doi/upload-doi-files!
:auth-type :admin
:auth-action :block}
[:get "/doi"] {:handler doi/get-zenodo-deposition}
[:get "/doi/files"] {:handler doi/download-doi-files}

;; Plots API
[:get "/get-collection-plot"] {:handler plots/get-collection-plot
:auth-type :collect
Expand Down Expand Up @@ -179,3 +191,4 @@
[:get "/get-nicfi-tiles"] {:handler proxy/get-nicfi-tiles
:auth-type :no-cross
:auth-action :block}})

2 changes: 1 addition & 1 deletion src/js/collection.js
Original file line number Diff line number Diff line change
Expand Up @@ -127,7 +127,7 @@ class Collection extends React.Component {
this.showGeoDash();
}
clearInterval(this.state.storedInterval);
this.setState({ storedInterval: setInterval(this.resetPlotLock, 2.3 * 60 * 1000) });
this.setState({ storedInterval: setInterval(this.resetPlotLock, 1 * 60 * 1000) });
// updateMapImagery is poorly named, this function is detecting if we need to show the "zoom to" overlay
this.updateMapImagery();
}
Expand Down
7 changes: 6 additions & 1 deletion src/js/geodash/MapWidget.js
Original file line number Diff line number Diff line change
Expand Up @@ -159,7 +159,12 @@ export default class MapWidget extends React.Component {
this.props.imageryList.find((imagery) => imagery.title === "Open Street Map") ||
this.props.imageryList[0];
const basemapLayer = new TileLayer({
source: mercator.createSource(sourceConfig, id, attribution, isProxied),
source: mercator.createSource(
sourceConfig,
id,
attribution,
isProxied,
),
});
const plotSampleLayer = new VectorLayer({
source: this.props.vectorSource,
Expand Down
16 changes: 12 additions & 4 deletions src/js/utils/mercator.js
Original file line number Diff line number Diff line change
Expand Up @@ -207,6 +207,13 @@ mercator.sendGEERequest = (theJson, sourceConfig, imageryId, attribution) => {
return geeSource;
};

mercator.newestNICFILayer = () => {
const latestDate = new Date(new Date().setDate(1));
const month = latestDate.getMonth().toString().padStart(2, "0");
const dateMonth = `${latestDate.getFullYear()}-${month}`;
return `planet_medres_normalized_analytic_${dateMonth}_mosaic`;
}

// [Pure] Returns a new ol.source.* object or null if the sourceConfig is invalid.
mercator.createSource = (
sourceConfig,
Expand Down Expand Up @@ -252,14 +259,15 @@ mercator.createSource = (
attributions: attribution,
});
} else if (type === "PlanetNICFI") {
const dataLayer = (sourceConfig.time === "newest") ? mercator.newestNICFILayer() : sourceConfig.time;
return new XYZ({
url:
"get-nicfi-tiles?z={z}&x={x}&y={y}" +
`&dataLayer=${sourceConfig.time}` +
"get-nicfi-tiles?z={z}&x={x}&y={y}" +
`&dataLayer=${dataLayer}` +
`&band=${sourceConfig.band}` +
`&imageryId=${imageryId}`,
attributions: attribution,
});
attributions: attribution,
});
} else if (type === "PlanetDaily") {
// make ajax call to get layerid then add xyz layer
const theJson = {
Expand Down
3 changes: 2 additions & 1 deletion src/py/gee/routes.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,8 @@ def image(requestDict):

def imageCollection(requestDict):
visParams = safeParseJSON(getDefault(requestDict, 'visParams', {}))
if visParams.get("bands"):
bands = visParams.get("bands")
if bands and isinstance(bands, str):
bands = visParams.get("bands").replace(' ', '')
visParams.update({"bands": bands})
values = imageCollectionToMapId(
Expand Down
4 changes: 4 additions & 0 deletions src/sql/changes/2023-06-05-add-constraints.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
ALTER TABLE IF EXISTS public.plot_locks DROP CONSTRAINT plot_locks_pkey;

ALTER TABLE IF EXISTS public.plot_locks ADD CONSTRAINT plot_locks_plot_rid_key UNIQUE (plot_rid);
ALTER TABLE IF EXISTS public.plot_locks ADD CONSTRAINT plot_locks_user_rid_key UNIQUE (user_rid);
1 change: 1 addition & 0 deletions src/sql/functions/plots.sql
Original file line number Diff line number Diff line change
Expand Up @@ -575,6 +575,7 @@ CREATE OR REPLACE FUNCTION get_plot_shapes(_project_id integer)
USING (plot_uid)
$$ LANGUAGE SQL;


CREATE OR REPLACE FUNCTION get_sample_shapes(_project_id integer)
RETURNS TABLE (project_id integer,
plot_id integer,
Expand Down
49 changes: 49 additions & 0 deletions src/sql/functions/project.sql
Original file line number Diff line number Diff line change
Expand Up @@ -1050,3 +1050,52 @@ CREATE OR REPLACE FUNCTION dump_project_sample_data(_project_id integer)
ORDER BY plot_uid, sample_uid

$$ LANGUAGE SQL;

-- Returns relevant information for DOI creation by ID.
CREATE OR REPLACE FUNCTION select_project_info_for_doi(_project_id integer)
RETURNS table (
project_id integer,
availability text,
name text,
description text,
boundary text,
aoi_features jsonb,
plot_distribution text,
num_plots integer,
plot_shape text,
plot_size real,
sample_distribution text,
samples_per_plot integer,
sample_resolution real,
survey_questions jsonb,
survey_rules jsonb,
created_date date,
published_date date,
closed_date date
) AS $$

SELECT project_uid,
availability,
name,
description,
ST_AsGeoJSON(boundary),
aoi_features,
plot_distribution,
num_plots,
plot_shape,
plot_size,
sample_distribution,
samples_per_plot,
sample_resolution,
survey_questions,
survey_rules,
created_date,
published_date,
closed_date
FROM projects
WHERE project_uid = _project_id
AND availability <> 'archived'
GROUP BY project_uid

$$ LANGUAGE SQL;

0 comments on commit 585381a

Please sign in to comment.