From 178e39eada3c25c133cd48ed823b75951352e211 Mon Sep 17 00:00:00 2001 From: Emily Davis Date: Fri, 13 Sep 2024 17:52:09 -0600 Subject: [PATCH 01/60] Handle & display error on save paramset failure --- src/natcap/invest/datastack.py | 20 +++++++-- src/natcap/invest/ui_server.py | 21 +++++++-- tests/test_ui_server.py | 5 ++- .../renderer/components/SaveAsModal/index.jsx | 1 + .../renderer/components/SetupTab/index.jsx | 44 +++++++++++++++---- workbench/src/renderer/server_requests.js | 12 +++-- workbench/tests/renderer/investtab.test.js | 7 ++- 7 files changed, 86 insertions(+), 24 deletions(-) diff --git a/src/natcap/invest/datastack.py b/src/natcap/invest/datastack.py index 461a12d60e..a68b153078 100644 --- a/src/natcap/invest/datastack.py +++ b/src/natcap/invest/datastack.py @@ -535,6 +535,9 @@ def build_parameter_set(args, model_name, paramset_path, relative=False): Returns: ``None`` + + Raises: + ValueError if creating a relative path fails. """ def _recurse(args_param): if isinstance(args_param, dict): @@ -552,10 +555,19 @@ def _recurse(args_param): if (normalized_path == '.' or os.path.dirname(paramset_path) == normalized_path): return '.' - temp_rel_path = os.path.relpath( - normalized_path, os.path.dirname(paramset_path)) - # Always save unix paths. - linux_style_path = temp_rel_path.replace('\\', '/') + try: + temp_rel_path = os.path.relpath( + normalized_path, os.path.dirname(paramset_path)) + except ValueError: + # On Windows, ValueError is raised when ``path`` and + # ``start`` are on different drives + raise ValueError( + """Error: Cannot save datastack with relative + paths across drives. Choose a different save + location, or use absolute paths.""") + else: + # Always save unix paths. + linux_style_path = temp_rel_path.replace('\\', '/') else: # Always save unix paths. linux_style_path = normalized_path.replace('\\', '/') diff --git a/src/natcap/invest/ui_server.py b/src/natcap/invest/ui_server.py index bfbe1874ec..f7be83d66f 100644 --- a/src/natcap/invest/ui_server.py +++ b/src/natcap/invest/ui_server.py @@ -175,7 +175,9 @@ def write_parameter_set_file(): relativePaths: boolean Returns: - A string. + A dictionary with the following key/value pairs: + - message (string): for logging and/or rendering in the UI. + - error (boolean): True if an error occurred, otherwise False. """ payload = request.get_json() filepath = payload['filepath'] @@ -183,9 +185,20 @@ def write_parameter_set_file(): args = json.loads(payload['args']) relative_paths = payload['relativePaths'] - datastack.build_parameter_set( - args, modulename, filepath, relative=relative_paths) - return 'parameter set saved' + try: + datastack.build_parameter_set( + args, modulename, filepath, relative=relative_paths) + except ValueError as message: + LOGGER.error(str(message)) + return { + 'message': str(message), + 'error': True + } + else: + return { + 'message': 'Parameter set saved', + 'error': False + } @app.route(f'/{PREFIX}/save_to_python', methods=['POST']) diff --git a/tests/test_ui_server.py b/tests/test_ui_server.py index 426dc57005..15e93bbd4b 100644 --- a/tests/test_ui_server.py +++ b/tests/test_ui_server.py @@ -122,8 +122,11 @@ def test_write_parameter_set_file(self): }), 'relativePaths': True, } - _ = test_client.post( + response = test_client.post( f'{ROUTE_PREFIX}/write_parameter_set_file', json=payload) + self.assertEqual( + response.json, + {'message': 'Parameter set saved', 'error': False}) with open(filepath, 'r') as file: actual_data = json.loads(file.read()) self.assertEqual( diff --git a/workbench/src/renderer/components/SaveAsModal/index.jsx b/workbench/src/renderer/components/SaveAsModal/index.jsx index 06707a846d..a2e5e9917e 100644 --- a/workbench/src/renderer/components/SaveAsModal/index.jsx +++ b/workbench/src/renderer/components/SaveAsModal/index.jsx @@ -67,6 +67,7 @@ class SaveAsModal extends React.Component { } handleShow() { + this.props.removeSaveErrors(); this.setState({ relativePaths: false, show: true, diff --git a/workbench/src/renderer/components/SetupTab/index.jsx b/workbench/src/renderer/components/SetupTab/index.jsx index c241dd5706..5914c66ff4 100644 --- a/workbench/src/renderer/components/SetupTab/index.jsx +++ b/workbench/src/renderer/components/SetupTab/index.jsx @@ -100,6 +100,7 @@ class SetupTab extends React.Component { this.savePythonScript = this.savePythonScript.bind(this); this.saveJsonFile = this.saveJsonFile.bind(this); this.setSaveAlert = this.setSaveAlert.bind(this); + this.removeSaveErrors = this.removeSaveErrors.bind(this); this.wrapInvestExecute = this.wrapInvestExecute.bind(this); this.investValidate = this.investValidate.bind(this); this.debouncedValidate = this.debouncedValidate.bind(this); @@ -234,8 +235,8 @@ class SetupTab extends React.Component { relativePaths: relativePaths, args: JSON.stringify(args), }; - const response = await writeParametersToFile(payload); - this.setSaveAlert(response); + const {message, error} = await writeParametersToFile(payload); + this.setSaveAlert(message, error); } async saveDatastack(datastackPath) { @@ -249,9 +250,9 @@ class SetupTab extends React.Component { args: JSON.stringify(args), }; const key = window.crypto.getRandomValues(new Uint16Array(1))[0].toString(); - this.setSaveAlert('archiving...', key); + this.setSaveAlert('archiving...', false, key); const response = await archiveDatastack(payload); - this.setSaveAlert(response, key); + this.setSaveAlert(response, false, key); } /** State updater for alert messages from various save buttons. @@ -262,15 +263,35 @@ class SetupTab extends React.Component { * 1. display: because a new save occurred, or * 2. not display: on a re-render after `Expire` expired, or * 3. update: because 'archiving...' alert changes to final message + * @param {boolean} error - true if message was generated by an error, + * false otherwise. Defaults to false. * * @returns {undefined} */ setSaveAlert( message, + error = false, key = window.crypto.getRandomValues(new Uint16Array(1))[0].toString() ) { this.setState({ - saveAlerts: { ...this.state.saveAlerts, ...{ [key]: message } } + saveAlerts: { + ...this.state.saveAlerts, + ...{ [key]: { + message, + error + }}} + }); + } + + removeSaveErrors() { + const alerts = this.state.saveAlerts; + Object.keys(alerts).forEach((key) => { + if (alerts[key].error) { + delete alerts[key]; + } + }); + this.setState({ + saveAlerts: alerts }); } @@ -490,18 +511,22 @@ class SetupTab extends React.Component { const SaveAlerts = []; Object.keys(saveAlerts).forEach((key) => { - const message = saveAlerts[key]; + const { message, error } = saveAlerts[key]; if (message) { // Alert won't expire during archiving; will expire 2s after completion - const alertExpires = (message === 'archiving...') ? 1e7 : 2000; + // Alert won't expire when an error has occurred; + // will be hidden next time save modal opens + const alertExpires = (error || message === 'archiving...') ? 1e7 : 2000; SaveAlerts.push( - - {message} + + {t(message)} ); @@ -564,6 +589,7 @@ class SetupTab extends React.Component { savePythonScript={this.savePythonScript} saveJsonFile={this.saveJsonFile} saveDatastack={this.saveDatastack} + removeSaveErrors={this.removeSaveErrors} /> {SaveAlerts} diff --git a/workbench/src/renderer/server_requests.js b/workbench/src/renderer/server_requests.js index 286bcd690b..90ca4125aa 100644 --- a/workbench/src/renderer/server_requests.js +++ b/workbench/src/renderer/server_requests.js @@ -173,10 +173,14 @@ export function writeParametersToFile(payload) { body: JSON.stringify(payload), headers: { 'Content-Type': 'application/json' }, }) - .then((response) => response.text()) - .then((text) => { - logger.debug(text); - return text; + .then((response) => response.json()) + .then(({message, error}) => { + if (error) { + logger.error(message); + } else { + logger.debug(message); + } + return {message, error}; }) .catch((error) => logger.error(error.stack)) ); diff --git a/workbench/tests/renderer/investtab.test.js b/workbench/tests/renderer/investtab.test.js index 2a125fbddc..e3b6afdc07 100644 --- a/workbench/tests/renderer/investtab.test.js +++ b/workbench/tests/renderer/investtab.test.js @@ -217,7 +217,10 @@ describe('Sidebar Buttons', () => { }); test('Save to JSON: requests endpoint with correct payload', async () => { - const response = 'saved'; + const response = { + message: 'saved', + error: false, + }; writeParametersToFile.mockResolvedValue(response); const mockDialogData = { canceled: false, filePath: 'foo.json' }; ipcRenderer.invoke.mockResolvedValueOnce(mockDialogData); @@ -230,7 +233,7 @@ describe('Sidebar Buttons', () => { const saveButton = await findByRole('button', { name: 'Save' }); await userEvent.click(saveButton); - expect(await findByRole('alert')).toHaveTextContent(response); + expect(await findByRole('alert')).toHaveTextContent(response.message); const payload = writeParametersToFile.mock.calls[0][0]; expect(Object.keys(payload)).toEqual(expect.arrayContaining( ['filepath', 'moduleName', 'relativePaths', 'args'] From cf56816a32ba7dd64b3ce52fecff17b92aa88bfc Mon Sep 17 00:00:00 2001 From: Emily Davis Date: Mon, 16 Sep 2024 14:18:19 -0600 Subject: [PATCH 02/60] Update Python paramset tests --- tests/test_datastack.py | 22 +++++++++++++++++++++- tests/test_ui_server.py | 24 ++++++++++++++++++++++++ 2 files changed, 45 insertions(+), 1 deletion(-) diff --git a/tests/test_datastack.py b/tests/test_datastack.py index 45d2e590bd..a471b8e5e3 100644 --- a/tests/test_datastack.py +++ b/tests/test_datastack.py @@ -6,10 +6,10 @@ import pprint import shutil import sys -import tarfile import tempfile import textwrap import unittest +from unittest.mock import patch import numpy import pandas @@ -506,6 +506,26 @@ def test_relative_parameter_set(self): self.assertEqual(invest_version, __version__) self.assertEqual(callable_name, modelname) + def test_relative_path_failure(self): + """Datastack: raise error when relative path creation fails.""" + from natcap.invest import datastack + + params = { + 'data_dir': os.path.join(self.workspace, 'data_dir'), + } + modelname = 'natcap.invest.foo' + paramset_filename = os.path.join(self.workspace, 'paramset.json') + + # make the sample data so filepaths are interpreted correctly + os.makedirs(params['data_dir']) + + # Call build_parameter_set and force it into an error state + with self.assertRaises(ValueError): + with patch('natcap.invest.os.path.relpath', + side_effect=ValueError): + datastack.build_parameter_set( + params, modelname, paramset_filename, relative=True) + @unittest.skipUnless(sys.platform.startswith("win"), "requires Windows") def test_relative_parameter_set_windows(self): """Datastack: test relative parameter set paths saved linux style.""" diff --git a/tests/test_ui_server.py b/tests/test_ui_server.py index 15e93bbd4b..02d9436ebe 100644 --- a/tests/test_ui_server.py +++ b/tests/test_ui_server.py @@ -133,6 +133,30 @@ def test_write_parameter_set_file(self): set(actual_data), {'args', 'invest_version', 'model_name'}) + def test_write_parameter_set_file_error_handling(self): + """UI server: write_parameter_set_file endpoint + should catch a ValueError and return an error message. + """ + test_client = ui_server.app.test_client() + self.workspace_dir = tempfile.mkdtemp() + filepath = os.path.join(self.workspace_dir, 'datastack.json') + payload = { + 'filepath': filepath, + 'moduleName': 'natcap.invest.carbon', + 'args': json.dumps({ + 'workspace_dir': 'foo' + }), + 'relativePaths': True, + } + error_message = 'Error saving datastack' + with patch('natcap.invest.datastack.build_parameter_set', + side_effect=ValueError(error_message)): + response = test_client.post( + f'{ROUTE_PREFIX}/write_parameter_set_file', json=payload) + self.assertEqual( + response.json, + {'message': error_message, 'error': True}) + def test_save_to_python(self): """UI server: save_to_python endpoint.""" test_client = ui_server.app.test_client() From 73f283feaf95f78d1ef0f95beaca4b91c2d579d3 Mon Sep 17 00:00:00 2001 From: Emily Davis Date: Mon, 16 Sep 2024 16:22:26 -0600 Subject: [PATCH 03/60] Handle & display error on save datastack failure --- src/natcap/invest/datastack.py | 12 +++++-- src/natcap/invest/ui_server.py | 27 +++++++++++---- tests/test_datastack.py | 19 +++++++++++ tests/test_ui_server.py | 33 ++++++++++++++++++- .../renderer/components/SetupTab/index.jsx | 4 +-- workbench/src/renderer/server_requests.js | 12 ++++--- workbench/tests/renderer/investtab.test.js | 7 ++-- 7 files changed, 96 insertions(+), 18 deletions(-) diff --git a/src/natcap/invest/datastack.py b/src/natcap/invest/datastack.py index a68b153078..da0a7eca2b 100644 --- a/src/natcap/invest/datastack.py +++ b/src/natcap/invest/datastack.py @@ -226,6 +226,10 @@ def build_datastack_archive(args, model_name, datastack_path): Returns: ``None`` + + Raises: + ValueError if raised by build_parameter_set + (i.e., if creating a relative path fails). """ module = importlib.import_module(name=model_name) @@ -453,8 +457,12 @@ def build_datastack_archive(args, model_name, datastack_path): # write parameters to a new json file in the temp workspace param_file_uri = os.path.join(temp_workspace, 'parameters' + PARAMETER_SET_EXTENSION) - build_parameter_set( - rewritten_args, model_name, param_file_uri, relative=True) + try: + build_parameter_set( + rewritten_args, model_name, param_file_uri, relative=True) + except ValueError as message: + # Pass through for handling by ui_server + raise ValueError(message) # Remove the handler before archiving the working dir (and the logfile) archive_filehandler.close() diff --git a/src/natcap/invest/ui_server.py b/src/natcap/invest/ui_server.py index f7be83d66f..dd009c3a16 100644 --- a/src/natcap/invest/ui_server.py +++ b/src/natcap/invest/ui_server.py @@ -234,15 +234,27 @@ def build_datastack_archive(): args: JSON string of InVEST model args keys and values Returns: - A string. + A dictionary with the following key/value pairs: + - message (string): for logging and/or rendering in the UI. + - error (boolean): True if an error occurred, otherwise False. """ payload = request.get_json() - datastack.build_datastack_archive( - json.loads(payload['args']), - payload['moduleName'], - payload['filepath']) - - return 'datastack archive created' + try: + datastack.build_datastack_archive( + json.loads(payload['args']), + payload['moduleName'], + payload['filepath']) + except ValueError as message: + LOGGER.error(str(message)) + return { + 'message': str(message), + 'error': True + } + else: + return { + 'message': 'Datastack archive created', + 'error': False + } @app.route(f'/{PREFIX}/log_model_start', methods=['POST']) @@ -264,6 +276,7 @@ def log_model_exit(): payload['status']) return 'OK' + @app.route(f'/{PREFIX}/languages', methods=['GET']) def get_supported_languages(): """Return a mapping of supported languages to their display names.""" diff --git a/tests/test_datastack.py b/tests/test_datastack.py index a471b8e5e3..35d0695fa7 100644 --- a/tests/test_datastack.py +++ b/tests/test_datastack.py @@ -397,6 +397,25 @@ def test_archive_extraction(self): os.path.join(spatial_csv_dir, spatial_csv_dict[4]['path']), target_csv_vector_path) + def test_relative_path_failure(self): + """Datastack: raise error when relative path creation fails.""" + from natcap.invest import datastack + params = { + 'workspace_dir': os.path.join(self.workspace), + } + + archive_path = os.path.join(self.workspace, 'archive.invs.tar.gz') + + # Call build_datastack_archive and force build_parameter_set + # to raise an error + error_message = 'Error saving datastack' + with self.assertRaises(ValueError): + with patch('natcap.invest.datastack.build_parameter_set', + side_effect=ValueError(error_message)): + datastack.build_datastack_archive( + params, 'test_datastack_modules.simple_parameters', + archive_path) + class ParameterSetTest(unittest.TestCase): """Test Datastack.""" diff --git a/tests/test_ui_server.py b/tests/test_ui_server.py index 02d9436ebe..78cc86cd01 100644 --- a/tests/test_ui_server.py +++ b/tests/test_ui_server.py @@ -190,11 +190,42 @@ def test_build_datastack_archive(self): 'carbon_pools_path': data_path }), } - _ = test_client.post( + response = test_client.post( f'{ROUTE_PREFIX}/build_datastack_archive', json=payload) + self.assertEqual( + response.json, + {'message': 'Datastack archive created', 'error': False}) # test_datastack.py asserts the actual archiving functionality self.assertTrue(os.path.exists(target_filepath)) + def test_build_datastack_archive_error_handling(self): + """UI server: build_datastack_archive endpoint + should catch a ValueError and return an error message. + """ + test_client = ui_server.app.test_client() + self.workspace_dir = tempfile.mkdtemp() + target_filepath = os.path.join(self.workspace_dir, 'data.tgz') + data_path = os.path.join(self.workspace_dir, 'data.csv') + with open(data_path, 'w') as file: + file.write('hello') + + payload = { + 'filepath': target_filepath, + 'moduleName': 'natcap.invest.carbon', + 'args': json.dumps({ + 'workspace_dir': 'foo', + 'carbon_pools_path': data_path + }), + } + error_message = 'Error saving datastack' + with patch('natcap.invest.datastack.build_datastack_archive', + side_effect=ValueError(error_message)): + response = test_client.post( + f'{ROUTE_PREFIX}/build_datastack_archive', json=payload) + self.assertEqual( + response.json, + {'message': error_message, 'error': True}) + @patch('natcap.invest.ui_server.usage.requests.post') @patch('natcap.invest.ui_server.usage.requests.get') def test_log_model_start(self, mock_get, mock_post): diff --git a/workbench/src/renderer/components/SetupTab/index.jsx b/workbench/src/renderer/components/SetupTab/index.jsx index 5914c66ff4..7577cbce59 100644 --- a/workbench/src/renderer/components/SetupTab/index.jsx +++ b/workbench/src/renderer/components/SetupTab/index.jsx @@ -251,8 +251,8 @@ class SetupTab extends React.Component { }; const key = window.crypto.getRandomValues(new Uint16Array(1))[0].toString(); this.setSaveAlert('archiving...', false, key); - const response = await archiveDatastack(payload); - this.setSaveAlert(response, false, key); + const {message, error} = await archiveDatastack(payload); + this.setSaveAlert(message, error, key); } /** State updater for alert messages from various save buttons. diff --git a/workbench/src/renderer/server_requests.js b/workbench/src/renderer/server_requests.js index 90ca4125aa..0e8be93fb3 100644 --- a/workbench/src/renderer/server_requests.js +++ b/workbench/src/renderer/server_requests.js @@ -146,10 +146,14 @@ export function archiveDatastack(payload) { body: JSON.stringify(payload), headers: { 'Content-Type': 'application/json' }, }) - .then((response) => response.text()) - .then((text) => { - logger.debug(text); - return text; + .then((response) => response.json()) + .then(({message, error}) => { + if (error) { + logger.error(message); + } else { + logger.debug(message); + } + return {message, error}; }) .catch((error) => logger.error(error.stack)) ); diff --git a/workbench/tests/renderer/investtab.test.js b/workbench/tests/renderer/investtab.test.js index e3b6afdc07..e68a5e4e8b 100644 --- a/workbench/tests/renderer/investtab.test.js +++ b/workbench/tests/renderer/investtab.test.js @@ -289,7 +289,10 @@ describe('Sidebar Buttons', () => { }); test('Save datastack: requests endpoint with correct payload', async () => { - const response = 'saved'; + const response = { + message: 'saved', + error: false, + }; archiveDatastack.mockImplementation(() => new Promise( (resolve) => { setTimeout(() => resolve(response), 500); @@ -308,7 +311,7 @@ describe('Sidebar Buttons', () => { expect(await findByRole('alert')).toHaveTextContent('archiving...'); await waitFor(() => { - expect(getByRole('alert')).toHaveTextContent(response); + expect(getByRole('alert')).toHaveTextContent(response.message); }); const payload = archiveDatastack.mock.calls[0][0]; expect(Object.keys(payload)).toEqual(expect.arrayContaining( From 35ee58f122152d79a76295bd13e1749bf46d804a Mon Sep 17 00:00:00 2001 From: Emily Davis Date: Tue, 17 Sep 2024 15:01:28 -0600 Subject: [PATCH 04/60] Update React tests to cover save alerts --- workbench/tests/renderer/investtab.test.js | 170 ++++++++++++++++----- 1 file changed, 129 insertions(+), 41 deletions(-) diff --git a/workbench/tests/renderer/investtab.test.js b/workbench/tests/renderer/investtab.test.js index e68a5e4e8b..bee50202b3 100644 --- a/workbench/tests/renderer/investtab.test.js +++ b/workbench/tests/renderer/investtab.test.js @@ -221,19 +221,18 @@ describe('Sidebar Buttons', () => { message: 'saved', error: false, }; - writeParametersToFile.mockResolvedValue(response); + writeParametersToFile.mockResolvedValueOnce(response); const mockDialogData = { canceled: false, filePath: 'foo.json' }; ipcRenderer.invoke.mockResolvedValueOnce(mockDialogData); const { findByText, findByLabelText, findByRole } = renderInvestTab(); const saveAsButton = await findByText('Save as...'); await userEvent.click(saveAsButton); - const jsonOption = await findByLabelText((content, element) => content.startsWith('Parameters only')); + const jsonOption = await findByLabelText((content) => content.startsWith('Parameters only')); await userEvent.click(jsonOption); const saveButton = await findByRole('button', { name: 'Save' }); await userEvent.click(saveButton); - expect(await findByRole('alert')).toHaveTextContent(response.message); const payload = writeParametersToFile.mock.calls[0][0]; expect(Object.keys(payload)).toEqual(expect.arrayContaining( ['filepath', 'moduleName', 'relativePaths', 'args'] @@ -254,19 +253,18 @@ describe('Sidebar Buttons', () => { test('Save to Python script: requests endpoint with correct payload', async () => { const response = 'saved'; - saveToPython.mockResolvedValue(response); + saveToPython.mockResolvedValueOnce(response); const mockDialogData = { canceled: false, filePath: 'foo.py' }; ipcRenderer.invoke.mockResolvedValueOnce(mockDialogData); const { findByText, findByLabelText, findByRole } = renderInvestTab(); const saveAsButton = await findByText('Save as...'); await userEvent.click(saveAsButton); - const pythonOption = await findByLabelText((content, element) => content.startsWith('Python script')); + const pythonOption = await findByLabelText((content) => content.startsWith('Python script')); await userEvent.click(pythonOption); const saveButton = await findByRole('button', { name: 'Save' }); await userEvent.click(saveButton); - expect(await findByRole('alert')).toHaveTextContent(response); const payload = saveToPython.mock.calls[0][0]; expect(Object.keys(payload)).toEqual(expect.arrayContaining( ['filepath', 'modelname', 'args'] @@ -293,26 +291,18 @@ describe('Sidebar Buttons', () => { message: 'saved', error: false, }; - archiveDatastack.mockImplementation(() => new Promise( - (resolve) => { - setTimeout(() => resolve(response), 500); - } - )); + archiveDatastack.mockResolvedValueOnce(response); const mockDialogData = { canceled: false, filePath: 'data.tgz' }; ipcRenderer.invoke.mockResolvedValue(mockDialogData); const { findByText, findByLabelText, findByRole, getByRole } = renderInvestTab(); const saveAsButton = await findByText('Save as...'); await userEvent.click(saveAsButton); - const datastackOption = await findByLabelText((content, element) => content.startsWith('Parameters and data')); + const datastackOption = await findByLabelText((content) => content.startsWith('Parameters and data')); await userEvent.click(datastackOption); const saveButton = await findByRole('button', { name: 'Save' }); await userEvent.click(saveButton); - expect(await findByRole('alert')).toHaveTextContent('archiving...'); - await waitFor(() => { - expect(getByRole('alert')).toHaveTextContent(response.message); - }); const payload = archiveDatastack.mock.calls[0][0]; expect(Object.keys(payload)).toEqual(expect.arrayContaining( ['filepath', 'moduleName', 'args'] @@ -334,6 +324,129 @@ describe('Sidebar Buttons', () => { expect(archiveDatastack).toHaveBeenCalledTimes(1); }); + test.each([ + ['Parameters only', 'saveJsonFile'], + ['Parameters and data', 'saveDatastack'], + ['Python script', 'savePythonScript'] + ])('%s: does nothing when canceled', async (label, method) => { + // callback data if the OS dialog was canceled + const mockDialogData = { + canceled: true, + filePaths: [] + }; + ipcRenderer.invoke.mockResolvedValue(mockDialogData); + const spy = jest.spyOn(SetupTab.WrappedComponent.prototype, method); + + const { findByText, findByLabelText, findByRole } = renderInvestTab(); + const saveAsButton = await findByText('Save as...'); + await userEvent.click(saveAsButton); + const option = await findByLabelText((content, element) => content.startsWith(label)); + await userEvent.click(option); + const saveButton = await findByRole('button', { name: 'Save' }); + await userEvent.click(saveButton); + + // Calls that would have triggered if a file was selected + expect(spy).toHaveBeenCalledTimes(0); + }); + + test.each([ + [ + 'Parameters only', + writeParametersToFile, + {message: 'Parameter set saved', error: false} + ], + [ + 'Parameters and data', + archiveDatastack, + {message: 'Datastack archive created', error: false} + ], + [ + 'Python script', + saveToPython, + 'Python script saved' + ] + ])('%s: renders success message', async (label, method, response) => { + ipcRenderer.invoke.mockResolvedValueOnce({canceled: false, filePath: 'example.txt'}); + method.mockImplementation(() => new Promise( + (resolve) => { + setTimeout(() => resolve(response), 10); + } + )); + + const { findByText, findByLabelText, findByRole } = renderInvestTab(); + const saveAsButton = await findByText('Save as...'); + await userEvent.click(saveAsButton); + const option = await findByLabelText((content) => content.startsWith(label)); + await userEvent.click(option); + const saveButton = await findByRole('button', { name: 'Save' }); + await userEvent.click(saveButton); + + const saveAlert = await findByRole('alert'); + if (method == archiveDatastack) { + expect(saveAlert).toHaveTextContent('archiving...'); + } + await waitFor(() => { + expect(saveAlert).toHaveTextContent(response.message ?? response); + }); + expect(saveAlert).toHaveClass('alert-success'); + }); + + test.each([ + [ + 'Parameters only', + writeParametersToFile, + {message: 'Error saving parameter set', error: true} + ], + [ + 'Parameters and data', + archiveDatastack, + {message: 'Error creating datastack archive', error: true} + ], + ])('%s: renders error message', async (label, method, response) => { + ipcRenderer.invoke.mockResolvedValueOnce({canceled: false, filePath: 'example.txt'}); + method.mockImplementation(() => new Promise( + (resolve) => { + setTimeout(() => resolve(response), 10); + } + )); + + const { findByText, findByLabelText, findByRole } = renderInvestTab(); + const saveAsButton = await findByText('Save as...'); + await userEvent.click(saveAsButton); + const option = await findByLabelText((content) => content.startsWith(label)); + await userEvent.click(option); + const saveButton = await findByRole('button', { name: 'Save' }); + await userEvent.click(saveButton); + + const saveAlert = await findByRole('alert'); + if (method == archiveDatastack) { + expect(saveAlert).toHaveTextContent('archiving...'); + } + await waitFor(() => { + expect(saveAlert).toHaveTextContent(response.message); + }); + expect(saveAlert).toHaveClass('alert-danger'); + }); + + test('Save errors are cleared when save modal opens', async () => { + ipcRenderer.invoke.mockResolvedValueOnce({canceled: false, filePath: 'example.txt'}); + writeParametersToFile.mockResolvedValueOnce({message: 'Error saving parameter set', error: true}); + + // Trigger error alert + const { findByText, findByLabelText, findByRole, queryByRole } = renderInvestTab(); + const saveAsButton = await findByText('Save as...'); + await userEvent.click(saveAsButton); + const jsonOption = await findByLabelText((content) => content.startsWith('Parameters only')); + await userEvent.click(jsonOption); + const saveButton = await findByRole('button', { name: 'Save' }); + await userEvent.click(saveButton); + expect(await findByRole('alert')).toHaveClass('alert-danger'); + + // Re-open save modal + await userEvent.click(saveAsButton); + expect(queryByRole('alert')).toBe(null); + }); + test('Load parameters from file: loads parameters', async () => { const mockDatastack = { module_name: spec.pyname, @@ -391,31 +504,6 @@ describe('Sidebar Buttons', () => { expect(spy).toHaveBeenCalledTimes(0); }); - test.each([ - ['Parameters only', 'saveJsonFile'], - ['Parameters and data', 'saveDatastack'], - ['Python script', 'savePythonScript'] - ])('%s: does nothing when canceled', async (label, method) => { - // callback data if the OS dialog was canceled - const mockDialogData = { - canceled: true, - filePaths: [] - }; - ipcRenderer.invoke.mockResolvedValue(mockDialogData); - const spy = jest.spyOn(SetupTab.WrappedComponent.prototype, method); - - const { findByText, findByLabelText, findByRole } = renderInvestTab(); - const saveAsButton = await findByText('Save as...'); - await userEvent.click(saveAsButton); - const option = await findByLabelText((content, element) => content.startsWith(label)); - await userEvent.click(option); - const saveButton = await findByRole('button', { name: 'Save' }); - await userEvent.click(saveButton); - - // Calls that would have triggered if a file was selected - expect(spy).toHaveBeenCalledTimes(0); - }); - test('Load parameters button has hover text', async () => { const { findByText, From ec5f90eae3c2335c789310f8a768314058dfc538 Mon Sep 17 00:00:00 2001 From: Emily Davis Date: Tue, 17 Sep 2024 15:06:11 -0600 Subject: [PATCH 05/60] Update HISTORY --- HISTORY.rst | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/HISTORY.rst b/HISTORY.rst index dfa49b3025..46eb4aee70 100644 --- a/HISTORY.rst +++ b/HISTORY.rst @@ -39,15 +39,17 @@ Unreleased Changes ------------------ * Workbench * Several small updates to the model input form UI to improve usability - and visual consistency (https://github.com/natcap/invest/issues/912) + and visual consistency (https://github.com/natcap/invest/issues/912). * Fixed a bug that caused the application to crash when attempting to open a workspace without a valid logfile - (https://github.com/natcap/invest/issues/1598) + (https://github.com/natcap/invest/issues/1598). * Fixed a bug that was allowing readonly workspace directories on Windows - (https://github.com/natcap/invest/issues/1599) + (https://github.com/natcap/invest/issues/1599). * Fixed a bug that, in certain scenarios, caused a datastack to be saved with relative paths when the Relative Paths checkbox was left unchecked - (https://github.com/natcap/invest/issues/1609) + (https://github.com/natcap/invest/issues/1609). + * Improved error handling when a datastack cannot be saved with relative + paths across drives (https://github.com/natcap/invest/issues/1608). 3.14.2 (2024-05-29) ------------------- From f8f889033a0ced475ab3234aa55faaaa0ccbd9da Mon Sep 17 00:00:00 2001 From: Emily Davis Date: Tue, 17 Sep 2024 15:46:49 -0600 Subject: [PATCH 06/60] Increase timeout for archiveDatastack alert test --- workbench/tests/renderer/investtab.test.js | 27 +++++++++------------- 1 file changed, 11 insertions(+), 16 deletions(-) diff --git a/workbench/tests/renderer/investtab.test.js b/workbench/tests/renderer/investtab.test.js index bee50202b3..0fff31abec 100644 --- a/workbench/tests/renderer/investtab.test.js +++ b/workbench/tests/renderer/investtab.test.js @@ -367,11 +367,15 @@ describe('Sidebar Buttons', () => { ] ])('%s: renders success message', async (label, method, response) => { ipcRenderer.invoke.mockResolvedValueOnce({canceled: false, filePath: 'example.txt'}); - method.mockImplementation(() => new Promise( - (resolve) => { - setTimeout(() => resolve(response), 10); - } - )); + if (method == archiveDatastack) { + method.mockImplementationOnce(() => new Promise( + (resolve) => { + setTimeout(() => resolve(response), 500); + } + )); + } else { + method.mockResolvedValueOnce(response); + } const { findByText, findByLabelText, findByRole } = renderInvestTab(); const saveAsButton = await findByText('Save as...'); @@ -404,11 +408,7 @@ describe('Sidebar Buttons', () => { ], ])('%s: renders error message', async (label, method, response) => { ipcRenderer.invoke.mockResolvedValueOnce({canceled: false, filePath: 'example.txt'}); - method.mockImplementation(() => new Promise( - (resolve) => { - setTimeout(() => resolve(response), 10); - } - )); + method.mockResolvedValueOnce(response); const { findByText, findByLabelText, findByRole } = renderInvestTab(); const saveAsButton = await findByText('Save as...'); @@ -419,12 +419,7 @@ describe('Sidebar Buttons', () => { await userEvent.click(saveButton); const saveAlert = await findByRole('alert'); - if (method == archiveDatastack) { - expect(saveAlert).toHaveTextContent('archiving...'); - } - await waitFor(() => { - expect(saveAlert).toHaveTextContent(response.message); - }); + expect(saveAlert).toHaveTextContent(response.message); expect(saveAlert).toHaveClass('alert-danger'); }); From bfff17b636c7308d9625145eb17fa1db09a8f9d6 Mon Sep 17 00:00:00 2001 From: Emily Davis Date: Mon, 7 Oct 2024 17:40:03 -0600 Subject: [PATCH 07/60] Remove unneeded error pass-through and unneeded else blocks --- src/natcap/invest/datastack.py | 15 +++------------ src/natcap/invest/ui_server.py | 18 ++++++++---------- 2 files changed, 11 insertions(+), 22 deletions(-) diff --git a/src/natcap/invest/datastack.py b/src/natcap/invest/datastack.py index da0a7eca2b..610cac4841 100644 --- a/src/natcap/invest/datastack.py +++ b/src/natcap/invest/datastack.py @@ -226,10 +226,6 @@ def build_datastack_archive(args, model_name, datastack_path): Returns: ``None`` - - Raises: - ValueError if raised by build_parameter_set - (i.e., if creating a relative path fails). """ module = importlib.import_module(name=model_name) @@ -457,12 +453,8 @@ def build_datastack_archive(args, model_name, datastack_path): # write parameters to a new json file in the temp workspace param_file_uri = os.path.join(temp_workspace, 'parameters' + PARAMETER_SET_EXTENSION) - try: - build_parameter_set( + build_parameter_set( rewritten_args, model_name, param_file_uri, relative=True) - except ValueError as message: - # Pass through for handling by ui_server - raise ValueError(message) # Remove the handler before archiving the working dir (and the logfile) archive_filehandler.close() @@ -573,9 +565,8 @@ def _recurse(args_param): """Error: Cannot save datastack with relative paths across drives. Choose a different save location, or use absolute paths.""") - else: - # Always save unix paths. - linux_style_path = temp_rel_path.replace('\\', '/') + # Always save unix paths. + linux_style_path = temp_rel_path.replace('\\', '/') else: # Always save unix paths. linux_style_path = normalized_path.replace('\\', '/') diff --git a/src/natcap/invest/ui_server.py b/src/natcap/invest/ui_server.py index dd009c3a16..7c4ad72af2 100644 --- a/src/natcap/invest/ui_server.py +++ b/src/natcap/invest/ui_server.py @@ -194,11 +194,10 @@ def write_parameter_set_file(): 'message': str(message), 'error': True } - else: - return { - 'message': 'Parameter set saved', - 'error': False - } + return { + 'message': 'Parameter set saved', + 'error': False + } @app.route(f'/{PREFIX}/save_to_python', methods=['POST']) @@ -250,11 +249,10 @@ def build_datastack_archive(): 'message': str(message), 'error': True } - else: - return { - 'message': 'Datastack archive created', - 'error': False - } + return { + 'message': 'Datastack archive created', + 'error': False + } @app.route(f'/{PREFIX}/log_model_start', methods=['POST']) From 1d592d09bf4b4ae06f34b07bfdb7ceee98f84484 Mon Sep 17 00:00:00 2001 From: Emily Davis Date: Mon, 7 Oct 2024 17:47:30 -0600 Subject: [PATCH 08/60] Change duration of save alerts to 4s, and update documentation for setSaveAlert --- workbench/src/renderer/components/SetupTab/index.jsx | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/workbench/src/renderer/components/SetupTab/index.jsx b/workbench/src/renderer/components/SetupTab/index.jsx index 7577cbce59..fee9ebecb4 100644 --- a/workbench/src/renderer/components/SetupTab/index.jsx +++ b/workbench/src/renderer/components/SetupTab/index.jsx @@ -258,13 +258,13 @@ class SetupTab extends React.Component { /** State updater for alert messages from various save buttons. * * @param {string} message - the message to display + * @param {boolean} error - true if message was generated by an error, + * false otherwise. Defaults to false. * @param {string} key - a key to uniquely identify each save action, * passed as prop to `Expire` so that it can be aware of whether to, * 1. display: because a new save occurred, or * 2. not display: on a re-render after `Expire` expired, or * 3. update: because 'archiving...' alert changes to final message - * @param {boolean} error - true if message was generated by an error, - * false otherwise. Defaults to false. * * @returns {undefined} */ @@ -516,7 +516,7 @@ class SetupTab extends React.Component { // Alert won't expire during archiving; will expire 2s after completion // Alert won't expire when an error has occurred; // will be hidden next time save modal opens - const alertExpires = (error || message === 'archiving...') ? 1e7 : 2000; + const alertExpires = (error || message === 'archiving...') ? 1e7 : 4000; SaveAlerts.push( Date: Mon, 7 Oct 2024 17:52:35 -0600 Subject: [PATCH 09/60] Remove extra whitespace & update comment --- src/natcap/invest/datastack.py | 2 +- workbench/src/renderer/components/SetupTab/index.jsx | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/natcap/invest/datastack.py b/src/natcap/invest/datastack.py index 610cac4841..0fb2cabed4 100644 --- a/src/natcap/invest/datastack.py +++ b/src/natcap/invest/datastack.py @@ -454,7 +454,7 @@ def build_datastack_archive(args, model_name, datastack_path): param_file_uri = os.path.join(temp_workspace, 'parameters' + PARAMETER_SET_EXTENSION) build_parameter_set( - rewritten_args, model_name, param_file_uri, relative=True) + rewritten_args, model_name, param_file_uri, relative=True) # Remove the handler before archiving the working dir (and the logfile) archive_filehandler.close() diff --git a/workbench/src/renderer/components/SetupTab/index.jsx b/workbench/src/renderer/components/SetupTab/index.jsx index fee9ebecb4..02256f5a40 100644 --- a/workbench/src/renderer/components/SetupTab/index.jsx +++ b/workbench/src/renderer/components/SetupTab/index.jsx @@ -513,7 +513,7 @@ class SetupTab extends React.Component { Object.keys(saveAlerts).forEach((key) => { const { message, error } = saveAlerts[key]; if (message) { - // Alert won't expire during archiving; will expire 2s after completion + // Alert won't expire during archiving; will expire 4s after completion // Alert won't expire when an error has occurred; // will be hidden next time save modal opens const alertExpires = (error || message === 'archiving...') ? 1e7 : 4000; From 0ff560b22cd37f398e3cf50bc89125a4d2a24f5a Mon Sep 17 00:00:00 2001 From: James Douglass Date: Tue, 8 Oct 2024 13:54:55 -0700 Subject: [PATCH 10/60] Removing numpy version cap that prevented numpy 2 installation. RE:#1641 --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index cf2d3aeb76..d20cae2cfa 100644 --- a/requirements.txt +++ b/requirements.txt @@ -13,7 +13,7 @@ GDAL>=3.4.2 Pyro4==4.77 # pip-only pandas>=1.2.1 -numpy>=1.11.0,!=1.16.0,<2.0 +numpy>=1.11.0,!=1.16.0 Rtree>=0.8.2,!=0.9.1 shapely>=2.0.0 scipy>=1.9.0,!=1.12.* From 5e7df9416ee4ccefacc1f83682ca25342e20eb56 Mon Sep 17 00:00:00 2001 From: James Douglass Date: Tue, 8 Oct 2024 14:02:48 -0700 Subject: [PATCH 11/60] Adding note to HISTORY. RE#1641 --- HISTORY.rst | 3 +++ 1 file changed, 3 insertions(+) diff --git a/HISTORY.rst b/HISTORY.rst index 5ab3fd4117..96d7c3bee1 100644 --- a/HISTORY.rst +++ b/HISTORY.rst @@ -37,6 +37,9 @@ Unreleased Changes ------------------ +* General + * InVEST has been updated to build against numpy 2. + https://github.com/natcap/invest/issues/1641 * Workbench * Several small updates to the model input form UI to improve usability and visual consistency (https://github.com/natcap/invest/issues/912) From 670cb7d3709a3cee61dffe17f4f3f746cc715559 Mon Sep 17 00:00:00 2001 From: James Douglass Date: Thu, 10 Oct 2024 15:04:19 -0700 Subject: [PATCH 12/60] Trying to lfs-checkout to the right revision. RE:#1641 --- Makefile | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Makefile b/Makefile index 234e814bbf..66a657c904 100644 --- a/Makefile +++ b/Makefile @@ -199,7 +199,7 @@ $(GIT_SAMPLE_DATA_REPO_PATH): | $(DATA_DIR) git -C $(GIT_SAMPLE_DATA_REPO_PATH) lfs install git -C $(GIT_SAMPLE_DATA_REPO_PATH) checkout $(GIT_SAMPLE_DATA_REPO_REV) git -C $(GIT_SAMPLE_DATA_REPO_PATH) lfs fetch - git -C $(GIT_SAMPLE_DATA_REPO_PATH) lfs checkout + git -C $(GIT_SAMPLE_DATA_REPO_PATH) lfs checkout $(GIT_SAMPLE_DATA_REPO_REV) $(GIT_TEST_DATA_REPO_PATH): | $(DATA_DIR) -git clone $(GIT_TEST_DATA_REPO) $(GIT_TEST_DATA_REPO_PATH) @@ -207,7 +207,7 @@ $(GIT_TEST_DATA_REPO_PATH): | $(DATA_DIR) git -C $(GIT_TEST_DATA_REPO_PATH) lfs install git -C $(GIT_TEST_DATA_REPO_PATH) checkout $(GIT_TEST_DATA_REPO_REV) git -C $(GIT_TEST_DATA_REPO_PATH) lfs fetch - git -C $(GIT_TEST_DATA_REPO_PATH) lfs checkout + git -C $(GIT_TEST_DATA_REPO_PATH) lfs checkout $(GIT_TEST_DATA_REPO_REV) fetch: $(GIT_UG_REPO_PATH) $(GIT_SAMPLE_DATA_REPO_PATH) $(GIT_TEST_DATA_REPO_PATH) From 56eb76de1548ca471bd3241d985229b4e56746e6 Mon Sep 17 00:00:00 2001 From: James Douglass Date: Thu, 10 Oct 2024 15:12:10 -0700 Subject: [PATCH 13/60] Printing the subwatersheds vector for debugging. RE:#1641 --- scripts/invest-autovalidate.py | 19 ++++++++++++------- 1 file changed, 12 insertions(+), 7 deletions(-) diff --git a/scripts/invest-autovalidate.py b/scripts/invest-autovalidate.py index 02ba843db9..5fe221d5c9 100644 --- a/scripts/invest-autovalidate.py +++ b/scripts/invest-autovalidate.py @@ -1,14 +1,14 @@ #!python -import os -import tempfile -import logging import argparse -import unittest import glob import importlib -import shutil +import logging +import os import pprint +import shutil +import tempfile +import unittest from natcap.invest import datastack @@ -82,8 +82,13 @@ def main(sampledatadir): model_warnings = [] # define here in case of uncaught exception. try: LOGGER.info('validating %s ', datastack_path) - model_warnings = getattr( - model_module, 'validate')(paramset.args) + try: + model_warnings = getattr( + model_module, 'validate')(paramset.args) + except RuntimeError: + with open('data/invest-sample-data/Annual_Water_Yield/subwatersheds_gura.shp') as shp: + print(shp.read()) + raise except AttributeError as err: # If there was no validate function, don't crash but raise it later. model_warnings = err From 595db9551289e7e455de3ad16ee173cd7de96497 Mon Sep 17 00:00:00 2001 From: James Douglass Date: Thu, 10 Oct 2024 15:20:19 -0700 Subject: [PATCH 14/60] Attempting to read the binary file before printing. RE:#1641 --- scripts/invest-autovalidate.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/scripts/invest-autovalidate.py b/scripts/invest-autovalidate.py index 5fe221d5c9..4dfd88fb24 100644 --- a/scripts/invest-autovalidate.py +++ b/scripts/invest-autovalidate.py @@ -86,7 +86,8 @@ def main(sampledatadir): model_warnings = getattr( model_module, 'validate')(paramset.args) except RuntimeError: - with open('data/invest-sample-data/Annual_Water_Yield/subwatersheds_gura.shp') as shp: + with open(('data/invest-sample-data/Annual_Water_Yield/' + 'subwatersheds_gura.shp'), 'rb') as shp: print(shp.read()) raise except AttributeError as err: From 3c12306ba588769fe70772ab179b1c3596323524 Mon Sep 17 00:00:00 2001 From: James Douglass Date: Thu, 10 Oct 2024 15:39:37 -0700 Subject: [PATCH 15/60] Restoring autovalidate script to its prior state. RE:#1641 --- scripts/invest-autovalidate.py | 10 ++-------- 1 file changed, 2 insertions(+), 8 deletions(-) diff --git a/scripts/invest-autovalidate.py b/scripts/invest-autovalidate.py index 4dfd88fb24..b2ff133bf2 100644 --- a/scripts/invest-autovalidate.py +++ b/scripts/invest-autovalidate.py @@ -82,14 +82,8 @@ def main(sampledatadir): model_warnings = [] # define here in case of uncaught exception. try: LOGGER.info('validating %s ', datastack_path) - try: - model_warnings = getattr( - model_module, 'validate')(paramset.args) - except RuntimeError: - with open(('data/invest-sample-data/Annual_Water_Yield/' - 'subwatersheds_gura.shp'), 'rb') as shp: - print(shp.read()) - raise + model_warnings = getattr( + model_module, 'validate')(paramset.args) except AttributeError as err: # If there was no validate function, don't crash but raise it later. model_warnings = err From 5d42450a15dac6ae433c3cdb87ad61fefb3cebb4 Mon Sep 17 00:00:00 2001 From: James Douglass Date: Thu, 10 Oct 2024 15:40:10 -0700 Subject: [PATCH 16/60] Capping GDAL to less than 3.9 to see if that fixes the weird opening issues. RE:#1641 --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index d20cae2cfa..353cecd332 100644 --- a/requirements.txt +++ b/requirements.txt @@ -10,7 +10,7 @@ # scripts/convert-requirements-to-conda-yml.py as though it can only be found # on pip. -GDAL>=3.4.2 +GDAL>=3.4.2,<3.9 Pyro4==4.77 # pip-only pandas>=1.2.1 numpy>=1.11.0,!=1.16.0 From e550127f06789815dcfdf9375b78c44564a1338e Mon Sep 17 00:00:00 2001 From: James Douglass Date: Thu, 10 Oct 2024 16:36:31 -0700 Subject: [PATCH 17/60] Capping the gdal version as an experiment. RE:#1641 --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 353cecd332..54d9633b64 100644 --- a/requirements.txt +++ b/requirements.txt @@ -10,7 +10,7 @@ # scripts/convert-requirements-to-conda-yml.py as though it can only be found # on pip. -GDAL>=3.4.2,<3.9 +GDAL>=3.4.2,<3.8.5 Pyro4==4.77 # pip-only pandas>=1.2.1 numpy>=1.11.0,!=1.16.0 From 632d9ba61706ec8911134a52297d5082f4da57a9 Mon Sep 17 00:00:00 2001 From: James Douglass Date: Thu, 10 Oct 2024 16:48:53 -0700 Subject: [PATCH 18/60] Double-checking paths are as we expect them --- scripts/invest-autovalidate.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/scripts/invest-autovalidate.py b/scripts/invest-autovalidate.py index b2ff133bf2..c788e69ab4 100644 --- a/scripts/invest-autovalidate.py +++ b/scripts/invest-autovalidate.py @@ -81,7 +81,9 @@ def main(sampledatadir): model_warnings = [] # define here in case of uncaught exception. try: - LOGGER.info('validating %s ', datastack_path) + LOGGER.info('validating %s ', os.path.abspath(datastack_path)) + filepath = r"D:\a\invest\invest\data\invest-sample-data\Annual_Water_Yield\watershed_gura.shp" + print(f"File exists {filepath}: ", os.path.exists(filepath)) model_warnings = getattr( model_module, 'validate')(paramset.args) except AttributeError as err: From 82d81abce9d8436c2bcf3b944d91e972424a7a0a Mon Sep 17 00:00:00 2001 From: James Douglass Date: Thu, 10 Oct 2024 17:07:33 -0700 Subject: [PATCH 19/60] Using logging to record debug info. RE:#1641 --- scripts/invest-autovalidate.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/invest-autovalidate.py b/scripts/invest-autovalidate.py index c788e69ab4..8e607e104e 100644 --- a/scripts/invest-autovalidate.py +++ b/scripts/invest-autovalidate.py @@ -83,7 +83,7 @@ def main(sampledatadir): try: LOGGER.info('validating %s ', os.path.abspath(datastack_path)) filepath = r"D:\a\invest\invest\data\invest-sample-data\Annual_Water_Yield\watershed_gura.shp" - print(f"File exists {filepath}: ", os.path.exists(filepath)) + LOGGER.info(f"File exists {filepath}: ", os.path.exists(filepath)) model_warnings = getattr( model_module, 'validate')(paramset.args) except AttributeError as err: From a9199a619f99175ac01da433c9ffdfae11f4381c Mon Sep 17 00:00:00 2001 From: James Douglass Date: Fri, 11 Oct 2024 11:19:48 -0700 Subject: [PATCH 20/60] Updating exception handling for rasters in validation. RE:#1645 --- HISTORY.rst | 5 +++++ src/natcap/invest/validation.py | 6 +++++- 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/HISTORY.rst b/HISTORY.rst index c72a0147ea..ddcd9f5005 100644 --- a/HISTORY.rst +++ b/HISTORY.rst @@ -37,6 +37,11 @@ Unreleased Changes ------------------ +* General + * Updating validation to handle a change in exceptions raised by GDAL in + ``pygeoprocessing.get_raster_info`` and + ``pygeoprocessing.get_vector_info``. + https://github.com/natcap/invest/issues/1645 * Urban Nature Access * The model now works as expected when the user provides an LULC raster that does not have a nodata value defined. diff --git a/src/natcap/invest/validation.py b/src/natcap/invest/validation.py index 01d22526bf..c9c64cf085 100644 --- a/src/natcap/invest/validation.py +++ b/src/natcap/invest/validation.py @@ -799,7 +799,11 @@ def check_spatial_overlap(spatial_filepaths_list, for filepath in spatial_filepaths_list: try: info = pygeoprocessing.get_raster_info(filepath) - except ValueError: + except (ValueError, RuntimeError): + # ValueError is raised by PyGeoprocessing < 3.4.4 when the file is + # not a raster. + # RuntimeError is raised by GDAL in PyGeoprocessing >= 3.4.4 when + # the file is not a raster. info = pygeoprocessing.get_vector_info(filepath) if info['projection_wkt'] is None: From f3137f68dcf7a4c3317d8c9b6014edd6cd27ec41 Mon Sep 17 00:00:00 2001 From: James Douglass Date: Fri, 11 Oct 2024 11:53:31 -0700 Subject: [PATCH 21/60] Adding a context manager for GDAL configuration options. RE:#1645 --- .../invest/forest_carbon_edge_effect.py | 8 +++++--- src/natcap/invest/utils.py | 19 +++++++++++++++++++ 2 files changed, 24 insertions(+), 3 deletions(-) diff --git a/src/natcap/invest/forest_carbon_edge_effect.py b/src/natcap/invest/forest_carbon_edge_effect.py index bb0fb399d0..a7569250b3 100644 --- a/src/natcap/invest/forest_carbon_edge_effect.py +++ b/src/natcap/invest/forest_carbon_edge_effect.py @@ -765,9 +765,11 @@ def _build_spatial_index( local_model_dir, 'local_carbon_shape.shp') lulc_projection_wkt = pygeoprocessing.get_raster_info( base_raster_path)['projection_wkt'] - pygeoprocessing.reproject_vector( - tropical_forest_edge_carbon_model_vector_path, lulc_projection_wkt, - carbon_model_reproject_path) + + with utils._set_gdal_configuration('OGR_ENABLE_PARTIAL_REPROJECTION', 'TRUE'): + pygeoprocessing.reproject_vector( + tropical_forest_edge_carbon_model_vector_path, lulc_projection_wkt, + carbon_model_reproject_path) model_vector = gdal.OpenEx(carbon_model_reproject_path) model_layer = model_vector.GetLayer() diff --git a/src/natcap/invest/utils.py b/src/natcap/invest/utils.py index f34c25d9e5..dcae5734dd 100644 --- a/src/natcap/invest/utils.py +++ b/src/natcap/invest/utils.py @@ -121,6 +121,25 @@ def capture_gdal_logging(): gdal.PopErrorHandler() +@contextlib.contextmanager +def _set_gdal_configuration(opt, value): + """Temporarily set a GDAL configuration option. + + Args: + opt (string): The GDAL configuration option to set. + value (string): The value to set the option to. + + Returns: + ``None`` + """ + prior_value = gdal.GetConfigOption(opt) + gdal.SetConfigOption(opt, value) + try: + yield + finally: + gdal.SetConfigOption(opt, prior_value) + + def _format_time(seconds): """Render the integer number of seconds as a string. Returns a string.""" hours, remainder = divmod(seconds, 3600) From 1d9d6deba0f43f0139d3dbbd6cb273c0ffb54446 Mon Sep 17 00:00:00 2001 From: James Douglass Date: Fri, 11 Oct 2024 11:54:00 -0700 Subject: [PATCH 22/60] Adding some debugging in SWY. RE:#1645 --- .../invest/seasonal_water_yield/seasonal_water_yield.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/natcap/invest/seasonal_water_yield/seasonal_water_yield.py b/src/natcap/invest/seasonal_water_yield/seasonal_water_yield.py index 25228b3db4..78ec0d12d6 100644 --- a/src/natcap/invest/seasonal_water_yield/seasonal_water_yield.py +++ b/src/natcap/invest/seasonal_water_yield/seasonal_water_yield.py @@ -632,6 +632,7 @@ def execute(args): # ValueError when n_workers is an empty string. # TypeError when n_workers is None. n_workers = -1 # Synchronous mode. + LOGGER.debug('n_workers: %s', n_workers) task_graph = taskgraph.TaskGraph( os.path.join(args['workspace_dir'], 'taskgraph_cache'), n_workers, reporting_interval=5) @@ -642,6 +643,9 @@ def execute(args): (_INTERMEDIATE_BASE_FILES, intermediate_output_dir)], file_suffix) LOGGER.info('Checking that the AOI is not the output aggregate vector') + LOGGER.debug("aoi_path: %s", args['aoi_path']) + LOGGER.debug("aggregate_vector_path: %s", + os.path.normpath(file_registry['aggregate_vector_path'])) if (os.path.normpath(args['aoi_path']) == os.path.normpath(file_registry['aggregate_vector_path'])): raise ValueError( From ce88ef16dc55cb32e4414b0d24d062d545606721 Mon Sep 17 00:00:00 2001 From: James Douglass Date: Fri, 11 Oct 2024 12:07:59 -0700 Subject: [PATCH 23/60] Correcting a test filepath in SWY. RE:#1645 --- tests/test_seasonal_water_yield_regression.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_seasonal_water_yield_regression.py b/tests/test_seasonal_water_yield_regression.py index 899fc1446b..e5eafa0e98 100644 --- a/tests/test_seasonal_water_yield_regression.py +++ b/tests/test_seasonal_water_yield_regression.py @@ -495,7 +495,7 @@ def test_duplicate_aoi_assertion(self): args = { 'workspace_dir': self.workspace_dir, 'aoi_path': os.path.join( - self.workspace_dir, 'aggregated_results_foo.shp'), + self.workspace_dir, 'aggregated_results_swy_foo.shp'), 'results_suffix': 'foo', 'alpha_m': '1/12', 'beta_i': '1.0', From 2246ecd3a0c6b282d4b3c973bc4bd6c5bf7fac0b Mon Sep 17 00:00:00 2001 From: James Douglass Date: Fri, 11 Oct 2024 12:50:15 -0700 Subject: [PATCH 24/60] Noting the change to FCEE in HISTORY. RE:#1645 --- HISTORY.rst | 3 +++ 1 file changed, 3 insertions(+) diff --git a/HISTORY.rst b/HISTORY.rst index ddcd9f5005..bab7cba932 100644 --- a/HISTORY.rst +++ b/HISTORY.rst @@ -42,6 +42,9 @@ Unreleased Changes ``pygeoprocessing.get_raster_info`` and ``pygeoprocessing.get_vector_info``. https://github.com/natcap/invest/issues/1645 +* Forest Carbon Edge Effects + * Updating vector reprojection to allow partial reprojection. Related to + https://github.com/natcap/invest/issues/1645 * Urban Nature Access * The model now works as expected when the user provides an LULC raster that does not have a nodata value defined. From b978f36f97a5db7a8887fabb5686e5f7c8477843 Mon Sep 17 00:00:00 2001 From: James Douglass Date: Fri, 11 Oct 2024 12:56:05 -0700 Subject: [PATCH 25/60] Fixing RST syntax. RE:#1645 --- HISTORY.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/HISTORY.rst b/HISTORY.rst index bab7cba932..a88c1f709b 100644 --- a/HISTORY.rst +++ b/HISTORY.rst @@ -44,7 +44,7 @@ Unreleased Changes https://github.com/natcap/invest/issues/1645 * Forest Carbon Edge Effects * Updating vector reprojection to allow partial reprojection. Related to - https://github.com/natcap/invest/issues/1645 + https://github.com/natcap/invest/issues/1645 * Urban Nature Access * The model now works as expected when the user provides an LULC raster that does not have a nodata value defined. From f82f01d7ccfd56c00771420e99628c3572c4ea6e Mon Sep 17 00:00:00 2001 From: dcdenu4 Date: Fri, 11 Oct 2024 16:29:22 -0400 Subject: [PATCH 26/60] Restrict pygeoprocessing to see if this the issue. --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index cf2d3aeb76..b60c525f7c 100644 --- a/requirements.txt +++ b/requirements.txt @@ -17,7 +17,7 @@ numpy>=1.11.0,!=1.16.0,<2.0 Rtree>=0.8.2,!=0.9.1 shapely>=2.0.0 scipy>=1.9.0,!=1.12.* -pygeoprocessing>=2.4.2 # pip-only +pygeoprocessing>=2.4.2,<2.4.5 # pip-only taskgraph>=0.11.0 psutil>=5.6.6 chardet>=3.0.4 From 844efbd27eecf192107815bd86588e8d624d6715 Mon Sep 17 00:00:00 2001 From: Emily Davis Date: Mon, 14 Oct 2024 17:56:20 -0600 Subject: [PATCH 27/60] Output HQ rarity values to CSV (in addition to raster) --- src/natcap/invest/habitat_quality.py | 28 ++++++++++++++++++++++------ 1 file changed, 22 insertions(+), 6 deletions(-) diff --git a/src/natcap/invest/habitat_quality.py b/src/natcap/invest/habitat_quality.py index 301c18cfc7..91de611327 100644 --- a/src/natcap/invest/habitat_quality.py +++ b/src/natcap/invest/habitat_quality.py @@ -743,13 +743,16 @@ def execute(args): intermediate_output_dir, f'new_cover{lulc_key}{file_suffix}.tif') - rarity_path = os.path.join( + rarity_raster_path = os.path.join( output_dir, f'rarity{lulc_key}{file_suffix}.tif') + rarity_csv_path = os.path.join( + output_dir, f'rarity{lulc_key}{file_suffix}.csv') + _ = task_graph.add_task( func=_compute_rarity_operation, args=((lulc_base_path, 1), (lulc_path, 1), (new_cover_path, 1), - rarity_path), + rarity_raster_path, rarity_csv_path), dependent_task_list=[align_task], task_name=f'rarity{lulc_time}') @@ -773,7 +776,7 @@ def _calculate_habitat_quality(deg_hab_raster_list, quality_out_path, ksq): pygeoprocessing.raster_map( op=lambda degradation, habitat: ( habitat * (1 - (degradation**_SCALING_PARAM) / - (degradation**_SCALING_PARAM + ksq))), + (degradation**_SCALING_PARAM + ksq))), rasters=deg_hab_raster_list, target_path=quality_out_path) @@ -829,7 +832,8 @@ def total_degradation(*arrays): def _compute_rarity_operation( - base_lulc_path_band, lulc_path_band, new_cover_path, rarity_path): + base_lulc_path_band, lulc_path_band, new_cover_path, + rarity_raster_path, rarity_csv_path): """Calculate habitat rarity. Output rarity values will be an index from 0 - 1 where: @@ -846,7 +850,8 @@ def _compute_rarity_operation( new_cover_path (tuple): a 2 tuple for the path to intermediate raster file for trimming ``lulc_path_band`` to ``base_lulc_path_band`` of the form (path, band index). - rarity_path (string): path to output rarity raster. + rarity_raster_path (string): path to output rarity raster. + rarity_csv_path (string): path to output rarity CSV. Returns: None @@ -895,13 +900,24 @@ def _compute_rarity_operation( code_index[code] = 0.0 pygeoprocessing.reclassify_raster( - new_cover_path, code_index, rarity_path, gdal.GDT_Float32, + new_cover_path, code_index, rarity_raster_path, gdal.GDT_Float32, _OUT_NODATA) + _generate_rarity_csv(code_index, rarity_csv_path) + LOGGER.info('Finished rarity computation on' f' {os.path.basename(lulc_path_band[0])} land cover.') +def _generate_rarity_csv(rarity_dict, target_csv_path): + cols = ['LULC_code', 'rarity_value'] + with open(target_csv_path, 'w') as csvfile: + csvfile.write(str(','.join(cols) + '\n')) + for lulc_code in rarity_dict: + row = [str(lulc_code), str(rarity_dict[lulc_code])] + csvfile.write(str(','.join(row) + '\n')) + + def _raster_pixel_count(raster_path_band): """Count unique pixel values in single band raster. From 20f179ca26ba1c60729ffea040962793a5d2bbf8 Mon Sep 17 00:00:00 2001 From: James Douglass Date: Tue, 15 Oct 2024 12:24:31 -0700 Subject: [PATCH 28/60] Bumping pygeoprocessing requirement to at least 2.4.6 RE:#1645 --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index cf2d3aeb76..43ce18bc31 100644 --- a/requirements.txt +++ b/requirements.txt @@ -17,7 +17,7 @@ numpy>=1.11.0,!=1.16.0,<2.0 Rtree>=0.8.2,!=0.9.1 shapely>=2.0.0 scipy>=1.9.0,!=1.12.* -pygeoprocessing>=2.4.2 # pip-only +pygeoprocessing>=2.4.6 # pip-only taskgraph>=0.11.0 psutil>=5.6.6 chardet>=3.0.4 From 751c751960bb1df313899f42734f63cb231369a1 Mon Sep 17 00:00:00 2001 From: Emily Davis Date: Tue, 15 Oct 2024 13:37:56 -0600 Subject: [PATCH 29/60] Sort CSV output by LULC code --- src/natcap/invest/habitat_quality.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/natcap/invest/habitat_quality.py b/src/natcap/invest/habitat_quality.py index 91de611327..61dd6dcd5c 100644 --- a/src/natcap/invest/habitat_quality.py +++ b/src/natcap/invest/habitat_quality.py @@ -910,10 +910,11 @@ def _compute_rarity_operation( def _generate_rarity_csv(rarity_dict, target_csv_path): + lulc_codes = sorted(rarity_dict) cols = ['LULC_code', 'rarity_value'] with open(target_csv_path, 'w') as csvfile: csvfile.write(str(','.join(cols) + '\n')) - for lulc_code in rarity_dict: + for lulc_code in lulc_codes: row = [str(lulc_code), str(rarity_dict[lulc_code])] csvfile.write(str(','.join(row) + '\n')) From ec61361617c7d51afc49ac3a778baea209dfc424 Mon Sep 17 00:00:00 2001 From: Emily Davis Date: Tue, 15 Oct 2024 13:50:35 -0600 Subject: [PATCH 30/60] Remove unused lines --- src/natcap/invest/habitat_quality.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/src/natcap/invest/habitat_quality.py b/src/natcap/invest/habitat_quality.py index 61dd6dcd5c..025effff92 100644 --- a/src/natcap/invest/habitat_quality.py +++ b/src/natcap/invest/habitat_quality.py @@ -862,7 +862,6 @@ def _compute_rarity_operation( base_lulc_path_band[0]) base_pixel_size = base_raster_info['pixel_size'] base_area = float(abs(base_pixel_size[0]) * abs(base_pixel_size[1])) - base_nodata = base_raster_info['nodata'][0] lulc_code_count_b = _raster_pixel_count(base_lulc_path_band) @@ -870,7 +869,6 @@ def _compute_rarity_operation( lulc_raster_info = pygeoprocessing.get_raster_info(lulc_path_band[0]) lulc_pixel_size = lulc_raster_info['pixel_size'] lulc_area = float(abs(lulc_pixel_size[0]) * abs(lulc_pixel_size[1])) - lulc_nodata = lulc_raster_info['nodata'][0] # Trim cover_x to the mask of base. pygeoprocessing.raster_map( From 8c1dbaaa8077dbf67b075fcf5662ed99ab9e42d6 Mon Sep 17 00:00:00 2001 From: Emily Davis Date: Tue, 15 Oct 2024 14:02:22 -0600 Subject: [PATCH 31/60] Add docstring & update HISTORY --- HISTORY.rst | 2 ++ src/natcap/invest/habitat_quality.py | 10 ++++++++++ 2 files changed, 12 insertions(+) diff --git a/HISTORY.rst b/HISTORY.rst index 3a8ad6d10c..7c3f98c1cc 100644 --- a/HISTORY.rst +++ b/HISTORY.rst @@ -57,6 +57,8 @@ Unreleased Changes * Habitat Quality * Access raster is now generated from the reprojected access vector (https://github.com/natcap/invest/issues/1615). + * Rarity values are now output in CSV format (as well as in raster format) + (https://github.com/natcap/invest/issues/721). * Urban Flood Risk * Fields present on the input AOI vector are now retained in the output. (https://github.com/natcap/invest/issues/1600) diff --git a/src/natcap/invest/habitat_quality.py b/src/natcap/invest/habitat_quality.py index 025effff92..40b0c05e6e 100644 --- a/src/natcap/invest/habitat_quality.py +++ b/src/natcap/invest/habitat_quality.py @@ -908,6 +908,16 @@ def _compute_rarity_operation( def _generate_rarity_csv(rarity_dict, target_csv_path): + """Generate CSV containing rarity values by LULC code. + + Args: + rarity_dict (dict): dictionary containing LULC codes (as keys) + and their associated rarity values (as values). + target_csv_path (string): path to output CSV. + + Returns: + None + """ lulc_codes = sorted(rarity_dict) cols = ['LULC_code', 'rarity_value'] with open(target_csv_path, 'w') as csvfile: From c28041c49c523c5829816ed6cd1e68459574d672 Mon Sep 17 00:00:00 2001 From: James Douglass Date: Tue, 15 Oct 2024 13:31:54 -0700 Subject: [PATCH 32/60] Handling numpy 2 build requirements in pyproject.toml. RE:#1641 --- pyproject.toml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index fadad29609..d91de5af85 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -46,7 +46,8 @@ invest = "natcap.invest.cli:main" # available at runtime. requires = [ 'setuptools>=61', 'wheel', 'setuptools_scm>=8.0', 'cython>=3.0.0', 'babel', - 'oldest-supported-numpy' + 'oldest-supported-numpy; python_version<=3.8', + 'numpy>=2; python_version>3.8', # numpy 2 only available for 3.9+ ] build-backend = "setuptools.build_meta" From e7c7c1a60f5a20cd0b83241023bb89814dd060a3 Mon Sep 17 00:00:00 2001 From: James Douglass Date: Tue, 15 Oct 2024 13:34:02 -0700 Subject: [PATCH 33/60] Correcting a toml syntax issue. RE:#1641 --- pyproject.toml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index d91de5af85..aec28f1257 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -46,8 +46,8 @@ invest = "natcap.invest.cli:main" # available at runtime. requires = [ 'setuptools>=61', 'wheel', 'setuptools_scm>=8.0', 'cython>=3.0.0', 'babel', - 'oldest-supported-numpy; python_version<=3.8', - 'numpy>=2; python_version>3.8', # numpy 2 only available for 3.9+ + 'oldest-supported-numpy; python_version<="3.8"', + 'numpy>=2; python_version>"3.8"', # numpy 2 only available for 3.9+ ] build-backend = "setuptools.build_meta" From 78e1653f1d4c5d27fb056cdd7c55db273559505c Mon Sep 17 00:00:00 2001 From: Emily Davis Date: Tue, 15 Oct 2024 15:20:38 -0600 Subject: [PATCH 34/60] Update tests --- tests/test_habitat_quality.py | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/tests/test_habitat_quality.py b/tests/test_habitat_quality.py index 0924dc76c8..ae359f7645 100644 --- a/tests/test_habitat_quality.py +++ b/tests/test_habitat_quality.py @@ -5,6 +5,7 @@ import unittest import numpy +import pandas import pygeoprocessing from osgeo import gdal from osgeo import ogr @@ -245,6 +246,14 @@ def test_habitat_quality_presence_absence_regression(self): # so we should exclude those new nodata pixel values. assert_array_sum(raster_path, assert_value, include_nodata=False) + for csv_filename in ['rarity_c_regression.csv', + 'rarity_f_regression.csv']: + csv_path = os.path.join(args['workspace_dir'], csv_filename) + rarity_table = pandas.read_csv(csv_path) + assert 'LULC_code' in rarity_table.columns + self.assertAlmostEqual(rarity_table['rarity_value'].sum(), + 0.6667, 4) + def test_habitat_quality_regression_different_projections(self): """Habitat Quality: base regression test with simplified data.""" from natcap.invest import habitat_quality @@ -2105,3 +2114,5 @@ def test_habitat_quality_validate_missing_fut_column(self): header='column', header_name='fut_path') )] self.assertEqual(validate_result, expected) + +# @TODO: ¿add rarity CSV test? From dc633024790a56263a21aee8d2540a80d72b537d Mon Sep 17 00:00:00 2001 From: James Douglass Date: Tue, 15 Oct 2024 16:22:53 -0700 Subject: [PATCH 35/60] Bumping the UG repo rev for RST syntax. An RST error in a footnote in Wave Energy was breaking the binary builds in the main InVEST repository. This is now addressed. RE:#1641 --- Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Makefile b/Makefile index 66a657c904..ecd8eff096 100644 --- a/Makefile +++ b/Makefile @@ -10,7 +10,7 @@ GIT_TEST_DATA_REPO_REV := 324abde73e1d770ad75921466ecafd1ec6297752 GIT_UG_REPO := https://github.com/natcap/invest.users-guide GIT_UG_REPO_PATH := doc/users-guide -GIT_UG_REPO_REV := 0404bc5d4d43085cdc58f50f8fc29944b10cefb1 +GIT_UG_REPO_REV := f203ec069f9f03560c9a85b268e67ebb6b994953 ENV = "./env" ifeq ($(OS),Windows_NT) From e711300a78e597c8be48ef43b2606b3611b0ca5a Mon Sep 17 00:00:00 2001 From: James Douglass Date: Tue, 15 Oct 2024 16:27:03 -0700 Subject: [PATCH 36/60] Adding a numpy>2 constraint for testing our builds. RE:#1641 --- constraints_tests.txt | 3 +++ 1 file changed, 3 insertions(+) diff --git a/constraints_tests.txt b/constraints_tests.txt index 299273430e..9de9a0b5f2 100644 --- a/constraints_tests.txt +++ b/constraints_tests.txt @@ -13,3 +13,6 @@ GDAL<3.8.5 # root cause, let's cap the versions to those that work. # https://github.com/natcap/invest/issues/1622 #pyinstaller<6.10 + +# Test whether InVEST will build with numpy 2 +numpy>2 From f229373670a80a3b3ff59fa6cbbb77f6c7eaeb3d Mon Sep 17 00:00:00 2001 From: James Douglass Date: Tue, 15 Oct 2024 16:31:10 -0700 Subject: [PATCH 37/60] Removing the GDAL upper bound for numpy2 compatibility. RE:#1641 --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index f9e3bcc39a..0ab53af935 100644 --- a/requirements.txt +++ b/requirements.txt @@ -10,7 +10,7 @@ # scripts/convert-requirements-to-conda-yml.py as though it can only be found # on pip. -GDAL>=3.4.2,<3.8.5 +GDAL>=3.4.2 Pyro4==4.77 # pip-only pandas>=1.2.1 numpy>=1.11.0,!=1.16.0 From 9fdb381530a4edc44b8d2addea6c17cca7cc443d Mon Sep 17 00:00:00 2001 From: James Douglass Date: Tue, 15 Oct 2024 16:35:31 -0700 Subject: [PATCH 38/60] Removing a no-longer-needed GDAL constraint. RE:#1641 --- constraints_tests.txt | 3 --- 1 file changed, 3 deletions(-) diff --git a/constraints_tests.txt b/constraints_tests.txt index 9de9a0b5f2..4f733bdab3 100644 --- a/constraints_tests.txt +++ b/constraints_tests.txt @@ -6,9 +6,6 @@ # occur with regular use of invest. https://github.com/OSGeo/gdal/issues/8497 GDAL!=3.6.*,!=3.7.* -# https://github.com/natcap/pygeoprocessing/issues/387 -GDAL<3.8.5 - # Pyinstaller 6.10 breaks our windows builds. Until we can figure out the # root cause, let's cap the versions to those that work. # https://github.com/natcap/invest/issues/1622 From b49dea2f7afb2ccf2b4448e34873f370c3154158 Mon Sep 17 00:00:00 2001 From: James Douglass Date: Tue, 15 Oct 2024 16:41:32 -0700 Subject: [PATCH 39/60] Removing numpy>2 constraint so that environments will resolve on python 3.8. RE:#1641 --- constraints_tests.txt | 3 --- 1 file changed, 3 deletions(-) diff --git a/constraints_tests.txt b/constraints_tests.txt index 4f733bdab3..b0056a12e1 100644 --- a/constraints_tests.txt +++ b/constraints_tests.txt @@ -10,6 +10,3 @@ GDAL!=3.6.*,!=3.7.* # root cause, let's cap the versions to those that work. # https://github.com/natcap/invest/issues/1622 #pyinstaller<6.10 - -# Test whether InVEST will build with numpy 2 -numpy>2 From 206c8f06cb82c0c8e0375792ce852c2efd78ebf8 Mon Sep 17 00:00:00 2001 From: James Douglass Date: Tue, 15 Oct 2024 16:48:06 -0700 Subject: [PATCH 40/60] Converting numpy.in1d to numpy.isin for numpy2 compatibility. RE:#1641 --- src/natcap/invest/scenario_gen_proximity.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/natcap/invest/scenario_gen_proximity.py b/src/natcap/invest/scenario_gen_proximity.py index bb62ce8fae..5f23f95269 100644 --- a/src/natcap/invest/scenario_gen_proximity.py +++ b/src/natcap/invest/scenario_gen_proximity.py @@ -490,7 +490,7 @@ def _convert_landscape( def _mask_base_op(lulc_array): """Create a mask of valid non-base pixels only.""" - base_mask = numpy.in1d( + base_mask = numpy.isin( lulc_array.flatten(), focal_landcover_codes).reshape( lulc_array.shape) if invert_mask: @@ -535,7 +535,7 @@ def _combine_masks(base_distance_array, non_base_distance_array): # turn inside and outside masks into a single mask def _mask_to_convertible_codes(distance_from_base_edge, lulc): """Mask out the distance transform to a set of lucodes.""" - convertible_mask = numpy.in1d( + convertible_mask = numpy.isin( lulc.flatten(), convertible_type_list).reshape(lulc.shape) return numpy.where( convertible_mask, distance_from_base_edge, From a43131a9acd7eccd7b150dbe88bd587c436cc451 Mon Sep 17 00:00:00 2001 From: James Douglass Date: Tue, 15 Oct 2024 17:04:29 -0700 Subject: [PATCH 41/60] Updating UFRM for numpy2 compatibility. RE:#1641 --- src/natcap/invest/urban_flood_risk_mitigation.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/natcap/invest/urban_flood_risk_mitigation.py b/src/natcap/invest/urban_flood_risk_mitigation.py index a6dbfa5690..63ec19298e 100644 --- a/src/natcap/invest/urban_flood_risk_mitigation.py +++ b/src/natcap/invest/urban_flood_risk_mitigation.py @@ -591,7 +591,7 @@ def _write_summary_vector( # This is the service_built equation. target_feature.SetField( - 'serv_blt', ( + 'serv_blt', float( damage_sum * runoff_ret_vol_stats[feature_id]['sum'])) target_feature.SetField( @@ -936,7 +936,8 @@ def validate(args, limit_to=None): nan_mask = cn_df.isna() if nan_mask.any(axis=None): nan_lucodes = nan_mask[nan_mask.any(axis=1)].index - lucode_list = list(nan_lucodes.values) + # Convert numpy dtype values to native python types + lucode_list = [i.item() for i in nan_lucodes.values] validation_warnings.append(( ['curve_number_table_path'], f'Missing curve numbers for lucode(s) {lucode_list}')) From 98904f3dd463520886e620341452f81a722e323f Mon Sep 17 00:00:00 2001 From: James Douglass Date: Tue, 15 Oct 2024 17:15:31 -0700 Subject: [PATCH 42/60] Updating UCM tests for numerical outputs. The numpy2 update was producing slightly different numerical outputs, but also we weren't asserting that they were numerically close, just that they had the same number of digits after the decimal ... which wasn't working well because most of these values were large. RE:#1641 --- src/natcap/invest/urban_cooling_model.py | 6 ++-- tests/test_ucm.py | 38 ++++++++++-------------- 2 files changed, 18 insertions(+), 26 deletions(-) diff --git a/src/natcap/invest/urban_cooling_model.py b/src/natcap/invest/urban_cooling_model.py index eea80d8a12..ed3084016e 100644 --- a/src/natcap/invest/urban_cooling_model.py +++ b/src/natcap/invest/urban_cooling_model.py @@ -963,13 +963,13 @@ def calculate_uhi_result_vector( if cc_stats[feature_id]['count'] > 0: mean_cc = ( cc_stats[feature_id]['sum'] / cc_stats[feature_id]['count']) - feature.SetField('avg_cc', mean_cc) + feature.SetField('avg_cc', float(mean_cc)) mean_t_air = None if t_air_stats[feature_id]['count'] > 0: mean_t_air = ( t_air_stats[feature_id]['sum'] / t_air_stats[feature_id]['count']) - feature.SetField('avg_tmp_v', mean_t_air) + feature.SetField('avg_tmp_v', float(mean_t_air)) if mean_t_air: feature.SetField( @@ -979,7 +979,7 @@ def calculate_uhi_result_vector( wbgt = ( wbgt_stats[feature_id]['sum'] / wbgt_stats[feature_id]['count']) - feature.SetField('avg_wbgt_v', wbgt) + feature.SetField('avg_wbgt_v', float(wbgt)) if light_loss_stats and light_loss_stats[feature_id]['count'] > 0: light_loss = ( diff --git a/tests/test_ucm.py b/tests/test_ucm.py index 1696cd4e58..ea6ef583b7 100644 --- a/tests/test_ucm.py +++ b/tests/test_ucm.py @@ -1,12 +1,12 @@ """InVEST Urban Heat Island Mitigation model tests.""" -import unittest -import tempfile -import shutil import os +import shutil +import tempfile +import unittest import numpy -from osgeo import gdal import pandas +from osgeo import gdal REGRESSION_DATA = os.path.join( os.path.dirname(__file__), '..', 'data', 'invest-test-data', 'ucm') @@ -79,11 +79,9 @@ def test_ucm_regression_factors(self): for key, expected_value in expected_results.items(): actual_value = float(results_feature.GetField(key)) # These accumulated values (esp. avd_eng_cn) are accumulated - # and may differ past about 4 decimal places. - self.assertAlmostEqual( - actual_value, expected_value, places=4, - msg='%s should be close to %f, actual: %f' % ( - key, expected_value, actual_value)) + # and may differ slightly from expected regression values. + numpy.testing.assert_allclose(actual_value, expected_value, + rtol=1e-4) finally: results_layer = None results_vector = None @@ -110,10 +108,7 @@ def test_ucm_regression_factors(self): # Expected energy savings is an accumulated value and may differ # past about 4 decimal places. - self.assertAlmostEqual( - energy_sav, expected_energy_sav, places=4, msg=( - '%f should be close to %f' % ( - energy_sav, expected_energy_sav))) + numpy.testing.assert_allclose(energy_sav, expected_energy_sav, rtol=1e-4) self.assertEqual(n_nonetype, 119) finally: buildings_layer = None @@ -151,10 +146,8 @@ def test_ucm_regression_factors(self): # These accumulated values are accumulated # and may differ past about 4 decimal places. - self.assertAlmostEqual( - energy_sav, expected_energy_sav, places=4, msg=( - '%f should be close to %f' % ( - energy_sav, expected_energy_sav))) + numpy.testing.assert_allclose(energy_sav, expected_energy_sav, + rtol=1e-4) self.assertEqual(n_nonetype, 119) finally: buildings_layer = None @@ -215,11 +208,9 @@ def test_ucm_regression_intensity(self): for key, expected_value in expected_results.items(): actual_value = float(results_feature.GetField(key)) # These accumulated values (esp. avd_eng_cn) are accumulated - # and may differ past about 4 decimal places. - self.assertAlmostEqual( - actual_value, expected_value, places=4, - msg='%s should be close to %f, actual: %f' % ( - key, expected_value, actual_value)) + # and may differ slightly. + numpy.testing.assert_allclose(actual_value, expected_value, + rtol=1e-4) finally: results_layer = None results_vector = None @@ -335,7 +326,8 @@ def test_missing_lulc_value_in_table(self): def test_bad_args(self): """UCM: test validation of bad arguments.""" - from natcap.invest import urban_cooling_model, validation + from natcap.invest import urban_cooling_model + from natcap.invest import validation args = { 'workspace_dir': self.workspace_dir, 'results_suffix': 'test_suffix', From 8f13be5764b60d1098e1b820ab9cde9dc841516d Mon Sep 17 00:00:00 2001 From: James Douglass Date: Tue, 15 Oct 2024 17:18:43 -0700 Subject: [PATCH 43/60] Fixing an exception related to numpy types. This is part of the numpy2 migration. RE:#1641 --- src/natcap/invest/seasonal_water_yield/seasonal_water_yield.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/natcap/invest/seasonal_water_yield/seasonal_water_yield.py b/src/natcap/invest/seasonal_water_yield/seasonal_water_yield.py index 78ec0d12d6..728859a2d7 100644 --- a/src/natcap/invest/seasonal_water_yield/seasonal_water_yield.py +++ b/src/natcap/invest/seasonal_water_yield/seasonal_water_yield.py @@ -1238,7 +1238,7 @@ def cn_op(lulc_array, soil_group_array): # if lulc_array value not in lulc_to_soil[soil_group_id]['lulc_values'] # then numpy.digitize will not bin properly and cause an IndexError # during the reshaping call - lulc_unique = set(numpy.unique(lulc_array)) + lulc_unique = set(i.item() for i in numpy.unique(lulc_array)) if not lulc_unique.issubset(lucodes_set): # cast to list to conform with similar error messages in InVEST missing_lulc_values = sorted(lulc_unique.difference(lucodes_set)) From 41fcb647a3e2a775a0aec1db1d4348c8a4d46109 Mon Sep 17 00:00:00 2001 From: James Douglass Date: Tue, 15 Oct 2024 17:24:40 -0700 Subject: [PATCH 44/60] Converting numpy types to python types for numpy2. RE:#1641 --- src/natcap/invest/recreation/recmodel_client.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/src/natcap/invest/recreation/recmodel_client.py b/src/natcap/invest/recreation/recmodel_client.py index c695eef54a..5250eabbe9 100644 --- a/src/natcap/invest/recreation/recmodel_client.py +++ b/src/natcap/invest/recreation/recmodel_client.py @@ -1029,10 +1029,12 @@ def _raster_sum_mean( numpy.array(fid_raster_values['sum']) / numpy.array(fid_raster_values['count'])) predictor_results = dict( - zip(fid_raster_values['fid'], mean_results)) + zip(fid_raster_values['fid'], + (i.item() for i in mean_results))) else: predictor_results = dict( - zip(fid_raster_values['fid'], fid_raster_values['sum'])) + zip(fid_raster_values['fid'], + (i.item() for i in fid_raster_values['sum']))) with open(predictor_target_path, 'w') as jsonfile: json.dump(predictor_results, jsonfile) From b250662ce6a69b18f4dbe8d0a12eba965f54cce8 Mon Sep 17 00:00:00 2001 From: James Douglass Date: Tue, 15 Oct 2024 17:27:00 -0700 Subject: [PATCH 45/60] Converting types for numpy2 migration. RE:#1641 --- src/natcap/invest/pollination.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/src/natcap/invest/pollination.py b/src/natcap/invest/pollination.py index 60def5ec3e..e6b6895592 100644 --- a/src/natcap/invest/pollination.py +++ b/src/natcap/invest/pollination.py @@ -1036,28 +1036,28 @@ def execute(args): # this is YT from the user's guide (y_tot) farm_feature.SetField( _TOTAL_FARM_YIELD_FIELD_ID, - 1 - nu * ( + float(1 - nu * ( 1 - total_farm_results[fid]['sum'] / - float(total_farm_results[fid]['count']))) + float(total_farm_results[fid]['count'])))) # this is PYW ('pdep_y_w') farm_feature.SetField( _POLLINATOR_PROPORTION_FARM_YIELD_FIELD_ID, - (wild_pollinator_yield_aggregate[fid]['sum'] / + float(wild_pollinator_yield_aggregate[fid]['sum'] / float(wild_pollinator_yield_aggregate[fid]['count']))) # this is YW ('y_wild') farm_feature.SetField( _WILD_POLLINATOR_FARM_YIELD_FIELD_ID, - nu * (wild_pollinator_yield_aggregate[fid]['sum'] / - float(wild_pollinator_yield_aggregate[fid]['count']))) + float(nu * (wild_pollinator_yield_aggregate[fid]['sum'] / + float(wild_pollinator_yield_aggregate[fid]['count'])))) # this is PAT ('p_abund') farm_season = farm_feature.GetField(_FARM_SEASON_FIELD) farm_feature.SetField( _POLLINATOR_ABUNDANCE_FARM_FIELD_ID, - pollinator_abundance_results[farm_season][fid]['sum'] / - float(pollinator_abundance_results[farm_season][fid]['count'])) + float(pollinator_abundance_results[farm_season][fid]['sum'] / + float(pollinator_abundance_results[farm_season][fid]['count']))) target_farm_layer.SetFeature(farm_feature) target_farm_layer.SyncToDisk() From ab90c460cd05a18561db70f0d4c8b15d1ed3ebfe Mon Sep 17 00:00:00 2001 From: James Douglass Date: Tue, 15 Oct 2024 17:28:55 -0700 Subject: [PATCH 46/60] Updating FCEE for numpy2 compatibility. RE:#1641 --- src/natcap/invest/forest_carbon_edge_effect.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/natcap/invest/forest_carbon_edge_effect.py b/src/natcap/invest/forest_carbon_edge_effect.py index a7569250b3..aa7a394ab8 100644 --- a/src/natcap/invest/forest_carbon_edge_effect.py +++ b/src/natcap/invest/forest_carbon_edge_effect.py @@ -583,13 +583,14 @@ def _aggregate_carbon_map( for poly_feat in target_aggregate_layer: poly_fid = poly_feat.GetFID() poly_feat.SetField( - 'c_sum', serviceshed_stats[poly_fid]['sum']) + 'c_sum', float(serviceshed_stats[poly_fid]['sum'])) # calculates mean pixel value per ha in for each feature in AOI poly_geom = poly_feat.GetGeometryRef() poly_area_ha = poly_geom.GetArea() / 1e4 # converts m^2 to hectare poly_geom = None poly_feat.SetField( - 'c_ha_mean', serviceshed_stats[poly_fid]['sum']/poly_area_ha) + 'c_ha_mean', + float(serviceshed_stats[poly_fid]['sum'] / poly_area_ha)) target_aggregate_layer.SetFeature(poly_feat) target_aggregate_layer.CommitTransaction() From 63fb8883da9ebf44520d78ae1c019f0de3208a3b Mon Sep 17 00:00:00 2001 From: James Douglass Date: Tue, 15 Oct 2024 17:31:58 -0700 Subject: [PATCH 47/60] Using assert_allclose for better numerical comparisons. RE:#1641 --- tests/test_urban_nature_access.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/tests/test_urban_nature_access.py b/tests/test_urban_nature_access.py index 4e5141f0b6..a3d3d7b43c 100644 --- a/tests/test_urban_nature_access.py +++ b/tests/test_urban_nature_access.py @@ -412,7 +412,7 @@ def test_split_urban_nature(self): ) for fieldname, expected_value in expected_values.items(): numpy.testing.assert_allclose( - admin_feature.GetField(fieldname), expected_value) + admin_feature.GetField(fieldname), expected_value, rtol=1e-6) # The sum of the under-and-oversupplied populations should be equal # to the total population count. @@ -603,8 +603,8 @@ def test_radii_by_pop_group(self): set(defn.GetName() for defn in summary_layer.schema), set(expected_field_values.keys())) for fieldname, expected_value in expected_field_values.items(): - self.assertAlmostEqual( - expected_value, summary_feature.GetField(fieldname)) + numpy.testing.assert_allclose( + expected_value, summary_feature.GetField(fieldname), rtol=1e-6) output_dir = os.path.join(args['workspace_dir'], 'output') self._assert_urban_nature(os.path.join( @@ -679,8 +679,8 @@ def test_radii_by_pop_group_exponential_kernal(self): set(defn.GetName() for defn in summary_layer.schema), set(expected_field_values.keys())) for fieldname, expected_value in expected_field_values.items(): - self.assertAlmostEqual( - expected_value, summary_feature.GetField(fieldname)) + numpy.testing.assert_allclose( + expected_value, summary_feature.GetField(fieldname), rtol=1e-6) output_dir = os.path.join(args['workspace_dir'], 'output') self._assert_urban_nature(os.path.join( From 292c3e2d0fa3a54bcd4e6be5b510125a6a6a1064 Mon Sep 17 00:00:00 2001 From: James Douglass Date: Tue, 15 Oct 2024 18:45:08 -0700 Subject: [PATCH 48/60] Cleaning up misc. code from debugging. RE:#1641 --- scripts/invest-autovalidate.py | 2 -- src/natcap/invest/urban_nature_access.py | 1 + 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/scripts/invest-autovalidate.py b/scripts/invest-autovalidate.py index 8e607e104e..d4a1a1a1de 100644 --- a/scripts/invest-autovalidate.py +++ b/scripts/invest-autovalidate.py @@ -82,8 +82,6 @@ def main(sampledatadir): model_warnings = [] # define here in case of uncaught exception. try: LOGGER.info('validating %s ', os.path.abspath(datastack_path)) - filepath = r"D:\a\invest\invest\data\invest-sample-data\Annual_Water_Yield\watershed_gura.shp" - LOGGER.info(f"File exists {filepath}: ", os.path.exists(filepath)) model_warnings = getattr( model_module, 'validate')(paramset.args) except AttributeError as err: diff --git a/src/natcap/invest/urban_nature_access.py b/src/natcap/invest/urban_nature_access.py index 4c8261cc5e..136575a956 100644 --- a/src/natcap/invest/urban_nature_access.py +++ b/src/natcap/invest/urban_nature_access.py @@ -2548,6 +2548,7 @@ def _warp_lulc(source_lulc_path, target_lulc_path, target_pixel_size, 'near', target_bb=target_bounding_box, target_projection_wkt=source_raster_info['projection_wkt']) + # if there is no defined nodata, set a default value if target_nodata is None: # Guarantee that our nodata cannot be represented by the datatype - # select a nodata value that's out of range. From 89d236b1f97542e418014b4dd163fea5e8c323ba Mon Sep 17 00:00:00 2001 From: James Douglass Date: Wed, 16 Oct 2024 09:10:16 -0700 Subject: [PATCH 49/60] Removing code left over from debugging. RE:#1641 --- Makefile | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Makefile b/Makefile index ecd8eff096..f225958acd 100644 --- a/Makefile +++ b/Makefile @@ -199,7 +199,7 @@ $(GIT_SAMPLE_DATA_REPO_PATH): | $(DATA_DIR) git -C $(GIT_SAMPLE_DATA_REPO_PATH) lfs install git -C $(GIT_SAMPLE_DATA_REPO_PATH) checkout $(GIT_SAMPLE_DATA_REPO_REV) git -C $(GIT_SAMPLE_DATA_REPO_PATH) lfs fetch - git -C $(GIT_SAMPLE_DATA_REPO_PATH) lfs checkout $(GIT_SAMPLE_DATA_REPO_REV) + git -C $(GIT_SAMPLE_DATA_REPO_PATH) lfs checkout $(GIT_TEST_DATA_REPO_PATH): | $(DATA_DIR) -git clone $(GIT_TEST_DATA_REPO) $(GIT_TEST_DATA_REPO_PATH) @@ -207,7 +207,7 @@ $(GIT_TEST_DATA_REPO_PATH): | $(DATA_DIR) git -C $(GIT_TEST_DATA_REPO_PATH) lfs install git -C $(GIT_TEST_DATA_REPO_PATH) checkout $(GIT_TEST_DATA_REPO_REV) git -C $(GIT_TEST_DATA_REPO_PATH) lfs fetch - git -C $(GIT_TEST_DATA_REPO_PATH) lfs checkout $(GIT_TEST_DATA_REPO_REV) + git -C $(GIT_TEST_DATA_REPO_PATH) lfs checkout fetch: $(GIT_UG_REPO_PATH) $(GIT_SAMPLE_DATA_REPO_PATH) $(GIT_TEST_DATA_REPO_PATH) From 75107e75a7dbdedfc9731197e5fe020777499755 Mon Sep 17 00:00:00 2001 From: James Douglass Date: Wed, 16 Oct 2024 09:11:27 -0700 Subject: [PATCH 50/60] Clarifying intent in pyproject.toml RE:#1641 --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index aec28f1257..b16e94eead 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -47,7 +47,7 @@ invest = "natcap.invest.cli:main" requires = [ 'setuptools>=61', 'wheel', 'setuptools_scm>=8.0', 'cython>=3.0.0', 'babel', 'oldest-supported-numpy; python_version<="3.8"', - 'numpy>=2; python_version>"3.8"', # numpy 2 only available for 3.9+ + 'numpy>=2; python_version>="3.9"', # numpy 2 only available for 3.9+ ] build-backend = "setuptools.build_meta" From c163f48ae3a2d2c1178283ed7449133f72cf654b Mon Sep 17 00:00:00 2001 From: Emily Davis Date: Wed, 16 Oct 2024 11:50:36 -0600 Subject: [PATCH 51/60] Update HQ model spec --- src/natcap/invest/habitat_quality.py | 76 +++++++++++++++++++++++++--- 1 file changed, 68 insertions(+), 8 deletions(-) diff --git a/src/natcap/invest/habitat_quality.py b/src/natcap/invest/habitat_quality.py index 40b0c05e6e..d2a244fad4 100644 --- a/src/natcap/invest/habitat_quality.py +++ b/src/natcap/invest/habitat_quality.py @@ -213,36 +213,36 @@ "output": { "type": "directory", "contents": { - "deg_sum_out_c.tif": { + "deg_sum_c.tif": { "about": ( "Relative level of habitat degradation on the current " "landscape."), "bands": {1: {"type": "ratio"}} }, - "deg_sum_out_f.tif": { + "deg_sum_f.tif": { "about": ( "Relative level of habitat degradation on the future " "landscape."), "bands": {1: {"type": "ratio"}}, "created_if": "lulc_fut_path" }, - "quality_out_c.tif": { + "quality_c.tif": { "about": ( "Relative level of habitat quality on the current " "landscape."), "bands": {1: {"type": "ratio"}} }, - "quality_out_f.tif": { + "quality_f.tif": { "about": ( "Relative level of habitat quality on the future " "landscape."), "bands": {1: {"type": "ratio"}}, "created_if": "lulc_fut_path" }, - "rarity_out_c.tif": { + "rarity_c.tif": { "about": ( "Relative habitat rarity on the current landscape " - "vis-a-vis the baseline map. The grid cell’s values " + "vis-a-vis the baseline map. The grid cell's values " "are defined between a range of 0 and 1 where 0.5 " "indicates no abundance change between the baseline " "and current or projected map. Values between 0 and 0.5 " @@ -258,10 +258,10 @@ "created_if": "lulc_bas_path", "bands": {1: {"type": "ratio"}} }, - "rarity_out_f.tif": { + "rarity_f.tif": { "about": ( "Relative habitat rarity on the future landscape " - "vis-a-vis the baseline map. The grid cell’s values " + "vis-a-vis the baseline map. The grid cell's values " "are defined between a range of 0 and 1 where 0.5 " "indicates no abundance change between the baseline " "and current or projected map. Values between 0 and " @@ -278,6 +278,66 @@ "created_if": "lulc_bas_path and lulc_fut_path", "bands": {1: {"type": "ratio"}} }, + "rarity_c.csv": { + "about": ("Table of rarity values by LULC code for the " + "current landscape"), + "index_col": "LULC_code", + "columns": { + "LULC_code": { + "type": "number", + "about": "LULC class" + }, + "rarity_value": { + "type": "number", + "about": ( + "Relative habitat rarity on the current landscape " + "vis-a-vis the baseline map. The rarity values " + "are defined between a range of 0 and 1 where 0.5 " + "indicates no abundance change between the baseline " + "and current or projected map. Values between 0 and 0.5 " + "indicate a habitat is more abundant and the closer " + "the value is to 0 the lesser the likelihood that the " + "preservation of that habitat type on the current or " + "future landscape is important to biodiversity conservation. " + "Values between 0.5 and 1 indicate a habitat is less " + "abundant and the closer the value is to 1 the greater " + "the likelihood that the preservation of that habitat " + "type on the current or future landscape is important " + "to biodiversity conservation."), + }, + }, + "created_if": "lulc_bas_path", + }, + "rarity_f.csv": { + "about": ("Table of rarity values by LULC code for the " + "future landscape"), + "index_col": "LULC_code", + "columns": { + "LULC_code": { + "type": "number", + "about": "LULC class" + }, + "rarity_value": { + "type": "number", + "about": ( + "Relative habitat rarity on the future landscape " + "vis-a-vis the baseline map. The rarity values " + "are defined between a range of 0 and 1 where 0.5 " + "indicates no abundance change between the baseline " + "and current or projected map. Values between 0 and 0.5 " + "indicate a habitat is more abundant and the closer " + "the value is to 0 the lesser the likelihood that the " + "preservation of that habitat type on the current or " + "future landscape is important to biodiversity conservation. " + "Values between 0.5 and 1 indicate a habitat is less " + "abundant and the closer the value is to 1 the greater " + "the likelihood that the preservation of that habitat " + "type on the current or future landscape is important " + "to biodiversity conservation."), + }, + }, + "created_if": "lulc_bas_path and lulc_fut_path", + }, } }, "intermediate": { From 30db861f617959afb509fd6c95e435cd64e6fe84 Mon Sep 17 00:00:00 2001 From: Emily Davis Date: Wed, 16 Oct 2024 12:23:37 -0600 Subject: [PATCH 52/60] Update HQ model spec with units --- src/natcap/invest/habitat_quality.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/natcap/invest/habitat_quality.py b/src/natcap/invest/habitat_quality.py index d2a244fad4..7b9c66df97 100644 --- a/src/natcap/invest/habitat_quality.py +++ b/src/natcap/invest/habitat_quality.py @@ -289,6 +289,7 @@ }, "rarity_value": { "type": "number", + "units": u.none, "about": ( "Relative habitat rarity on the current landscape " "vis-a-vis the baseline map. The rarity values " @@ -319,6 +320,7 @@ }, "rarity_value": { "type": "number", + "units": u.none, "about": ( "Relative habitat rarity on the future landscape " "vis-a-vis the baseline map. The rarity values " From ca46378cdb2bdbe218b7fb79e1c4fd8a8318d570 Mon Sep 17 00:00:00 2001 From: Emily Davis Date: Wed, 16 Oct 2024 12:32:04 -0600 Subject: [PATCH 53/60] Update HQ model spec with more units --- src/natcap/invest/habitat_quality.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/src/natcap/invest/habitat_quality.py b/src/natcap/invest/habitat_quality.py index 7b9c66df97..7435a96d51 100644 --- a/src/natcap/invest/habitat_quality.py +++ b/src/natcap/invest/habitat_quality.py @@ -285,7 +285,8 @@ "columns": { "LULC_code": { "type": "number", - "about": "LULC class" + "units": u.none, + "about": "LULC class", }, "rarity_value": { "type": "number", @@ -316,7 +317,8 @@ "columns": { "LULC_code": { "type": "number", - "about": "LULC class" + "units": u.none, + "about": "LULC class", }, "rarity_value": { "type": "number", From 926c1ae388b60586fdb98038668dae1224afbd8a Mon Sep 17 00:00:00 2001 From: Emily Davis Date: Wed, 16 Oct 2024 16:15:39 -0600 Subject: [PATCH 54/60] Update ug revision to match what's on main --- Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Makefile b/Makefile index 234e814bbf..f225958acd 100644 --- a/Makefile +++ b/Makefile @@ -10,7 +10,7 @@ GIT_TEST_DATA_REPO_REV := 324abde73e1d770ad75921466ecafd1ec6297752 GIT_UG_REPO := https://github.com/natcap/invest.users-guide GIT_UG_REPO_PATH := doc/users-guide -GIT_UG_REPO_REV := 0404bc5d4d43085cdc58f50f8fc29944b10cefb1 +GIT_UG_REPO_REV := f203ec069f9f03560c9a85b268e67ebb6b994953 ENV = "./env" ifeq ($(OS),Windows_NT) From e809edd4140f4c712ba7f9b2df97973bec2db60b Mon Sep 17 00:00:00 2001 From: Emily Davis Date: Fri, 18 Oct 2024 09:14:14 -0600 Subject: [PATCH 55/60] Update column name, comments, docstring --- src/natcap/invest/habitat_quality.py | 16 ++++++++-------- tests/test_habitat_quality.py | 2 -- 2 files changed, 8 insertions(+), 10 deletions(-) diff --git a/src/natcap/invest/habitat_quality.py b/src/natcap/invest/habitat_quality.py index 7435a96d51..a2b2f4a76d 100644 --- a/src/natcap/invest/habitat_quality.py +++ b/src/natcap/invest/habitat_quality.py @@ -280,10 +280,10 @@ }, "rarity_c.csv": { "about": ("Table of rarity values by LULC code for the " - "current landscape"), - "index_col": "LULC_code", + "current landscape."), + "index_col": "lulc_code", "columns": { - "LULC_code": { + "lulc_code": { "type": "number", "units": u.none, "about": "LULC class", @@ -312,10 +312,10 @@ }, "rarity_f.csv": { "about": ("Table of rarity values by LULC code for the " - "future landscape"), - "index_col": "LULC_code", + "future landscape."), + "index_col": "lulc_code", "columns": { - "LULC_code": { + "lulc_code": { "type": "number", "units": u.none, "about": "LULC class", @@ -898,7 +898,7 @@ def total_degradation(*arrays): def _compute_rarity_operation( base_lulc_path_band, lulc_path_band, new_cover_path, rarity_raster_path, rarity_csv_path): - """Calculate habitat rarity. + """Calculate habitat rarity and generate raster and CSV output. Output rarity values will be an index from 0 - 1 where: pixel > 0.5 - more rare @@ -983,7 +983,7 @@ def _generate_rarity_csv(rarity_dict, target_csv_path): None """ lulc_codes = sorted(rarity_dict) - cols = ['LULC_code', 'rarity_value'] + cols = ['lulc_code', 'rarity_value'] with open(target_csv_path, 'w') as csvfile: csvfile.write(str(','.join(cols) + '\n')) for lulc_code in lulc_codes: diff --git a/tests/test_habitat_quality.py b/tests/test_habitat_quality.py index ae359f7645..e6312e6985 100644 --- a/tests/test_habitat_quality.py +++ b/tests/test_habitat_quality.py @@ -2114,5 +2114,3 @@ def test_habitat_quality_validate_missing_fut_column(self): header='column', header_name='fut_path') )] self.assertEqual(validate_result, expected) - -# @TODO: ¿add rarity CSV test? From 93eddac6819043bcd2ada2bf0f6b948b33ac1c8d Mon Sep 17 00:00:00 2001 From: Emily Davis Date: Fri, 18 Oct 2024 09:29:55 -0600 Subject: [PATCH 56/60] Use csv.writer to generate CSV output --- src/natcap/invest/habitat_quality.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/src/natcap/invest/habitat_quality.py b/src/natcap/invest/habitat_quality.py index a2b2f4a76d..c167bca01f 100644 --- a/src/natcap/invest/habitat_quality.py +++ b/src/natcap/invest/habitat_quality.py @@ -1,6 +1,7 @@ # coding=UTF-8 """InVEST Habitat Quality model.""" import collections +import csv import logging import os @@ -983,12 +984,11 @@ def _generate_rarity_csv(rarity_dict, target_csv_path): None """ lulc_codes = sorted(rarity_dict) - cols = ['lulc_code', 'rarity_value'] - with open(target_csv_path, 'w') as csvfile: - csvfile.write(str(','.join(cols) + '\n')) + with open(target_csv_path, 'w', newline='') as csvfile: + writer = csv.writer(csvfile, delimiter=',') + writer.writerow(['lulc_code', 'rarity_value']) for lulc_code in lulc_codes: - row = [str(lulc_code), str(rarity_dict[lulc_code])] - csvfile.write(str(','.join(row) + '\n')) + writer.writerow([lulc_code, rarity_dict[lulc_code]]) def _raster_pixel_count(raster_path_band): From c56523b909be4454a74fb0e2a4ccaea29c75c184 Mon Sep 17 00:00:00 2001 From: Emily Davis Date: Fri, 18 Oct 2024 10:35:15 -0600 Subject: [PATCH 57/60] Update HQ rarity CSV test --- tests/test_habitat_quality.py | 34 ++++++++++++++++++++++++++-------- 1 file changed, 26 insertions(+), 8 deletions(-) diff --git a/tests/test_habitat_quality.py b/tests/test_habitat_quality.py index e6312e6985..2359cfeef1 100644 --- a/tests/test_habitat_quality.py +++ b/tests/test_habitat_quality.py @@ -1,11 +1,11 @@ """Module for Regression Testing the InVEST Habitat Quality model.""" +import csv import os import shutil import tempfile import unittest import numpy -import pandas import pygeoprocessing from osgeo import gdal from osgeo import ogr @@ -246,13 +246,32 @@ def test_habitat_quality_presence_absence_regression(self): # so we should exclude those new nodata pixel values. assert_array_sum(raster_path, assert_value, include_nodata=False) - for csv_filename in ['rarity_c_regression.csv', - 'rarity_f_regression.csv']: + # Based on the scenarios used to generate the rasters above, + # rarity values are calculated as follows: + # For LULC 1, rarity = 1.0 - (5000 / (10000 + 5000)) = 0.6667. + # For LULC 2 and 3, rarity = 0.0 because they are not in the baseline. + expected_csv_values = { + 'rarity_c_regression.csv': [ + (1, 0.6667, 4), + (2, 0.0, 0), + ], + 'rarity_f_regression.csv': [ + (1, 0.6667, 4), + (3, 0.0, 0), + ], + } + for csv_filename in expected_csv_values.keys(): csv_path = os.path.join(args['workspace_dir'], csv_filename) - rarity_table = pandas.read_csv(csv_path) - assert 'LULC_code' in rarity_table.columns - self.assertAlmostEqual(rarity_table['rarity_value'].sum(), - 0.6667, 4) + with open(csv_path, newline='') as csvfile: + reader = csv.DictReader(csvfile, delimiter=',') + self.assertEqual(reader.fieldnames, + ['lulc_code', 'rarity_value']) + for (exp_lulc, exp_rarity, + places_to_round) in expected_csv_values[csv_filename]: + row = next(reader) + self.assertEqual(int(row['lulc_code']), exp_lulc) + self.assertAlmostEqual(float(row['rarity_value']), + exp_rarity, places_to_round) def test_habitat_quality_regression_different_projections(self): """Habitat Quality: base regression test with simplified data.""" @@ -1056,7 +1075,6 @@ def test_habitat_quality_case_insensitivty(self): habitat_quality.execute(args) # Reasonable to just check quality out in this case - #assert_array_sum( assert_array_sum( os.path.join(args['workspace_dir'], 'quality_c.tif'), 5852.088) From e1698aba6e06bd2e8bc0c93351d5ff631c494206 Mon Sep 17 00:00:00 2001 From: davemfish Date: Tue, 22 Oct 2024 08:50:23 -0400 Subject: [PATCH 58/60] move a test to be near other related tests. #1657 --- tests/test_coastal_vulnerability.py | 84 ++++++++++++++--------------- 1 file changed, 42 insertions(+), 42 deletions(-) diff --git a/tests/test_coastal_vulnerability.py b/tests/test_coastal_vulnerability.py index 0b3be853a2..7cfcd9cb13 100644 --- a/tests/test_coastal_vulnerability.py +++ b/tests/test_coastal_vulnerability.py @@ -1358,6 +1358,48 @@ def test_aoi_invalid_geometry(self): aoi_path, landmass_path, model_resolution, target_vector_path, polygon_pickle, lines_pickle, lines_rtree) + def test_prepare_landmass_invalid_geometry(self): + """CV: test handling invalid geometries in landmass vector.""" + from natcap.invest import coastal_vulnerability + aoi_path = os.path.join(self.workspace_dir, 'aoi.geojson') + srs = osr.SpatialReference() + srs.ImportFromEPSG(26910) # UTM Zone 10N + wkt = srs.ExportToWkt() + + aoi_geometries = [Polygon([ + (-200, -200), (200, -200), (200, 200), (-200, 200), (-200, -200)])] + pygeoprocessing.shapely_geometry_to_vector( + aoi_geometries, aoi_path, wkt, 'GeoJSON') + + landmass_vector_path = os.path.join(self.workspace_dir, 'vector.gpkg') + n_features = make_vector_of_invalid_geoms(landmass_vector_path) + + target_polygon_pickle_path = os.path.join( + self.workspace_dir, 'polygon.pickle') + target_lines_pickle_path = os.path.join( + self.workspace_dir, 'lines.pickle') + target_rtree_path = os.path.join(self.workspace_dir, 'rtree.dat') + # Create rtree files to exercise the function's logic of removing + # pre-exisiting files + target_rtree_path_base = os.path.splitext(target_rtree_path)[0] + open(target_rtree_path, 'a').close() + open(target_rtree_path_base + '.idx', 'a').close() + + model_resolution = 100 + target_vector_path = os.path.join( + self.workspace_dir, 'temp-shore-pts.gpkg') + coastal_vulnerability.prepare_landmass_line_index_and_interpolate_shore_points( + aoi_path, landmass_vector_path, model_resolution, + target_vector_path, target_polygon_pickle_path, + target_lines_pickle_path, target_rtree_path) + + with open(target_polygon_pickle_path, 'rb') as polygon_file: + shapely_geom = pickle.load(polygon_file) + + # Expect 1 input geometry to be skipped, and the rest to be in + # shapely_geom_list. + self.assertTrue(len(shapely_geom.geoms) == n_features - 1) + def test_no_wwiii_coverage(self): """CV: test exception when shore points are outside max wwiii dist.""" from natcap.invest import coastal_vulnerability @@ -1434,48 +1476,6 @@ def test_projected_wwiii_input(self): layer = None vector = None - def test_prepare_landmass_invalid_geometry(self): - """CV: test handling invalid geometries in landmass vector.""" - from natcap.invest import coastal_vulnerability - aoi_path = os.path.join(self.workspace_dir, 'aoi.geojson') - srs = osr.SpatialReference() - srs.ImportFromEPSG(26910) # UTM Zone 10N - wkt = srs.ExportToWkt() - - aoi_geometries = [Polygon([ - (-200, -200), (200, -200), (200, 200), (-200, 200), (-200, -200)])] - pygeoprocessing.shapely_geometry_to_vector( - aoi_geometries, aoi_path, wkt, 'GeoJSON') - - landmass_vector_path = os.path.join(self.workspace_dir, 'vector.gpkg') - n_features = make_vector_of_invalid_geoms(landmass_vector_path) - - target_polygon_pickle_path = os.path.join( - self.workspace_dir, 'polygon.pickle') - target_lines_pickle_path = os.path.join( - self.workspace_dir, 'lines.pickle') - target_rtree_path = os.path.join(self.workspace_dir, 'rtree.dat') - # Create rtree files to exercise the function's logic of removing - # pre-exisiting files - target_rtree_path_base = os.path.splitext(target_rtree_path)[0] - open(target_rtree_path, 'a').close() - open(target_rtree_path_base + '.idx', 'a').close() - - model_resolution = 100 - target_vector_path = os.path.join( - self.workspace_dir, 'temp-shore-pts.gpkg') - coastal_vulnerability.prepare_landmass_line_index_and_interpolate_shore_points( - aoi_path, landmass_vector_path, model_resolution, - target_vector_path, target_polygon_pickle_path, - target_lines_pickle_path, target_rtree_path) - - with open(target_polygon_pickle_path, 'rb') as polygon_file: - shapely_geom = pickle.load(polygon_file) - - # Expect 1 input geometry to be skipped, and the rest to be in - # shapely_geom_list. - self.assertTrue(len(shapely_geom.geoms) == n_features - 1) - def test_clip_project_already_projected_raster(self): """CV: test clip_and_project_raster on an already projected raster.""" from natcap.invest import coastal_vulnerability From 2b956f9f689a79f51a793b28b79eefbdd032b1d1 Mon Sep 17 00:00:00 2001 From: davemfish Date: Tue, 22 Oct 2024 09:29:45 -0400 Subject: [PATCH 59/60] fixed a shapely 2.0 incompatibility; modified a test to cover it. #1657 --- HISTORY.rst | 18 +++++++++++------- src/natcap/invest/coastal_vulnerability.py | 22 ++++++++-------------- tests/test_coastal_vulnerability.py | 9 +++++++-- 3 files changed, 26 insertions(+), 23 deletions(-) diff --git a/HISTORY.rst b/HISTORY.rst index a8bcf05148..71214591b0 100644 --- a/HISTORY.rst +++ b/HISTORY.rst @@ -44,13 +44,6 @@ Unreleased Changes ``pygeoprocessing.get_raster_info`` and ``pygeoprocessing.get_vector_info``. https://github.com/natcap/invest/issues/1645 -* Forest Carbon Edge Effects - * Updating vector reprojection to allow partial reprojection. Related to - https://github.com/natcap/invest/issues/1645 -* Urban Nature Access - * The model now works as expected when the user provides an LULC raster - that does not have a nodata value defined. - https://github.com/natcap/invest/issues/1293 * Workbench * Several small updates to the model input form UI to improve usability and visual consistency (https://github.com/natcap/invest/issues/912). @@ -64,6 +57,13 @@ Unreleased Changes (https://github.com/natcap/invest/issues/1609). * Improved error handling when a datastack cannot be saved with relative paths across drives (https://github.com/natcap/invest/issues/1608). +* Coastal Vulnerability + * Fixed a regression where an AOI with multiple features could raise a + TypeError after intersecting with the landmass polygon. + https://github.com/natcap/invest/issues/1657 +* Forest Carbon Edge Effects + * Updating vector reprojection to allow partial reprojection. Related to + https://github.com/natcap/invest/issues/1645 * Habitat Quality * Access raster is now generated from the reprojected access vector (https://github.com/natcap/invest/issues/1615). @@ -72,6 +72,10 @@ Unreleased Changes * Urban Flood Risk * Fields present on the input AOI vector are now retained in the output. (https://github.com/natcap/invest/issues/1600) +* Urban Nature Access + * The model now works as expected when the user provides an LULC raster + that does not have a nodata value defined. + https://github.com/natcap/invest/issues/1293 3.14.2 (2024-05-29) ------------------- diff --git a/src/natcap/invest/coastal_vulnerability.py b/src/natcap/invest/coastal_vulnerability.py index ff30d3a24e..a1637451a6 100644 --- a/src/natcap/invest/coastal_vulnerability.py +++ b/src/natcap/invest/coastal_vulnerability.py @@ -21,6 +21,7 @@ from osgeo import gdal from osgeo import ogr from osgeo import osr +from shapely.geometry import LineString, MultiLineString from shapely.geometry.base import BaseMultipartGeometry from shapely.strtree import STRtree @@ -1104,7 +1105,6 @@ def prepare_landmass_line_index_and_interpolate_shore_points( # Get shapely geometries from landmass landmass_polygon_shapely_list = _ogr_to_geometry_list(landmass_vector_path) landmass_shapely = shapely.ops.unary_union(landmass_polygon_shapely_list) - landmass_polygon_shapely_list = None # store polygon geom for point-in-poly check later in ray-casting @@ -1170,19 +1170,13 @@ def prepare_landmass_line_index_and_interpolate_shore_points( if aoi_shapely_prepped.intersects(landmass_line): intersected_shapely_geom = aoi_shapely.intersection( landmass_line) - if intersected_shapely_geom.geom_type == 'LineString': - lines_in_aoi_list.append(intersected_shapely_geom) - elif intersected_shapely_geom.geom_type == 'MultiLineString': - shapely_geom_explode = [ - shapely.geometry.LineString(x) - for x in intersected_shapely_geom] - - lines_in_aoi_list.extend(shapely_geom_explode) - else: - # intersection could generate a point geom - # or if somehow the intersection is empty, - # type will be GeometryCollection. - continue + # intersection could generate a point geom, + # or if somehow the intersection is empty, + # type will be GeometryCollection. + if isinstance(intersected_shapely_geom, + (LineString, MultiLineString)): + lines_in_aoi_list.extend( + _list_geometry(intersected_shapely_geom)) # if none of the lines were disjoint before this linemerge, # unioned_line will now be a LineString. diff --git a/tests/test_coastal_vulnerability.py b/tests/test_coastal_vulnerability.py index 7cfcd9cb13..506d9b478f 100644 --- a/tests/test_coastal_vulnerability.py +++ b/tests/test_coastal_vulnerability.py @@ -1184,17 +1184,22 @@ def test_zero_shorepoints_created(self): def test_aoi_multiple_features(self): """CV: test shore point creation in AOI with multiple features.""" from natcap.invest import coastal_vulnerability - workspace_dir = self.workspace_dir + # workspace_dir = self.workspace_dir + workspace_dir = 'scratch/cv_test' aoi_path = os.path.join(workspace_dir, 'aoi.geojson') srs = osr.SpatialReference() srs.ImportFromEPSG(26910) # UTM Zone 10N wkt = srs.ExportToWkt() + # These two disjoint AOI polygons intersect the same landmass line + # segment. This tests an edge case where a MultiLineString + # geometry is created when landmass lines are clipped by the AOI. poly_a = Polygon([ (-200, -200), (-100, -200), (-100, -100), (-200, -100), (-200, -200)]) poly_b = Polygon([ - (100, 100), (200, 100), (200, 200), (100, 200), (100, 100)]) + (100, -200), (200, -200), (200, -100), (100, -100), + (100, -200)]) pygeoprocessing.shapely_geometry_to_vector( [poly_a, poly_b], aoi_path, wkt, 'GeoJSON') From b5a681b4e2f8d2ac788a72df332066f187d1376f Mon Sep 17 00:00:00 2001 From: davemfish Date: Tue, 22 Oct 2024 09:30:21 -0400 Subject: [PATCH 60/60] reverting a change for debugging. #1657 --- tests/test_coastal_vulnerability.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/tests/test_coastal_vulnerability.py b/tests/test_coastal_vulnerability.py index 506d9b478f..a6e6f60887 100644 --- a/tests/test_coastal_vulnerability.py +++ b/tests/test_coastal_vulnerability.py @@ -1184,8 +1184,7 @@ def test_zero_shorepoints_created(self): def test_aoi_multiple_features(self): """CV: test shore point creation in AOI with multiple features.""" from natcap.invest import coastal_vulnerability - # workspace_dir = self.workspace_dir - workspace_dir = 'scratch/cv_test' + workspace_dir = self.workspace_dir aoi_path = os.path.join(workspace_dir, 'aoi.geojson') srs = osr.SpatialReference()