Skip to content

Commit

Permalink
Multiprocessing implementation.
Browse files Browse the repository at this point in the history
  • Loading branch information
romainsacchi committed Jul 29, 2023
1 parent 7a361bc commit fd0716e
Show file tree
Hide file tree
Showing 18 changed files with 946 additions and 3,118 deletions.
Binary file modified dev/profile.prof
Binary file not shown.
14 changes: 12 additions & 2 deletions dev/test.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,18 @@

scenarios = [
{"model": "remind", "pathway": "SSP1-Base", "year": 2005},
{"model": "image", "pathway": "SSP2-Base", "year": 2100},
{"model": "remind", "pathway": "SSP2-Base", "year": 2010},
{"model": "remind", "pathway": "SSP5-Base", "year": 2015},
{"model": "remind", "pathway": "SSP5-Base", "year": 2020},
{"model": "remind", "pathway": "SSP5-Base", "year": 2025},
{"model": "remind", "pathway": "SSP5-Base", "year": 2030},
{"model": "remind", "pathway": "SSP5-Base", "year": 2040},
{"model": "remind", "pathway": "SSP5-Base", "year": 2050},
{"model": "remind", "pathway": "SSP5-Base", "year": 2060},
{"model": "remind", "pathway": "SSP5-Base", "year": 2070},
{"model": "remind", "pathway": "SSP5-Base", "year": 2080},
{"model": "remind", "pathway": "SSP5-Base", "year": 2090},
{"model": "remind", "pathway": "SSP5-Base", "year": 2100},
]

ndb = NewDatabase(
Expand All @@ -18,4 +28,4 @@
)

ndb.update_all()
ndb.write_datapackage(name="my_dp")
ndb.write_superstructure_db_to_brightway(name="my_dp")
2,524 changes: 89 additions & 2,435 deletions dev/test_premise.ipynb

Large diffs are not rendered by default.

49 changes: 30 additions & 19 deletions premise/cement.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,20 +16,31 @@

from .transformation import BaseTransformation, Dict, IAMDataCollection, List, np, ws
from .utils import DATA_DIR
from .logger import create_logger

LOG_CONFIG = DATA_DIR / "utils" / "logging" / "logconfig.yaml"
# directory for log files
DIR_LOG_REPORT = Path.cwd() / "export" / "logs"
# if DIR_LOG_REPORT folder does not exist
# we create it
if not Path(DIR_LOG_REPORT).exists():
Path(DIR_LOG_REPORT).mkdir(parents=True, exist_ok=True)
logger = create_logger("cement")

with open(LOG_CONFIG, "r") as f:
config = yaml.safe_load(f.read())
logging.config.dictConfig(config)

logger = logging.getLogger("cement")
def _update_cement(scenario, version, system_model, modified_datasets):
cement = Cement(
database=scenario["database"],
model=scenario["model"],
pathway=scenario["pathway"],
iam_data=scenario["iam data"],
year=scenario["year"],
version=version,
system_model=system_model,
modified_datasets=modified_datasets,
)

if scenario["iam data"].cement_markets is not None:
cement.add_datasets_to_database()
scenario["database"] = cement.database
modified_datasets = cement.modified_datasets
else:
print("No cement markets found in IAM data. Skipping.")

return scenario, modified_datasets


class Cement(BaseTransformation):
Expand Down Expand Up @@ -453,9 +464,9 @@ def add_datasets_to_database(self) -> None:
:return: Does not return anything. Modifies in place.
"""

print("Start integration of cement data...")
#print("Start integration of cement data...")

print("Create new clinker production datasets and delete old datasets")
#print("Create new clinker production datasets and delete old datasets")

clinker_prod_datasets = list(self.build_clinker_production_datasets().values())
self.database.extend(clinker_prod_datasets)
Expand All @@ -475,7 +486,7 @@ def add_datasets_to_database(self) -> None:
)
)

print("Create new clinker market datasets and delete old datasets")
#print("Create new clinker market datasets and delete old datasets")
clinker_market_datasets = list(
self.fetch_proxies(
name="market for clinker",
Expand All @@ -501,7 +512,7 @@ def add_datasets_to_database(self) -> None:
)
)

print("Create new cement market datasets")
#print("Create new cement market datasets")

# cement markets
markets = ws.get_many(
Expand Down Expand Up @@ -542,10 +553,10 @@ def add_datasets_to_database(self) -> None:

self.database.extend(new_datasets)

print(
"Create new cement production datasets and "
"adjust electricity consumption"
)
#print(
# "Create new cement production datasets and "
# "adjust electricity consumption"
#)
# cement production
production = ws.get_many(
self.database,
Expand Down
10 changes: 0 additions & 10 deletions premise/data/fuels/fuel_markets.yml
Original file line number Diff line number Diff line change
@@ -1,8 +1,3 @@
petrol, unleaded:
name: market for petrol, unleaded
reference product: petrol, unleaded
unit: kilogram
lhv: 42.6
petrol, low-sulfur:
name: market for petrol, low-sulfur
reference product: petrol, low-sulfur
Expand All @@ -13,11 +8,6 @@ diesel, low-sulfur:
reference product: diesel, low-sulfur
unit: kilogram
lhv: 43
diesel:
name: market for diesel
reference product: diesel
unit: kilogram
lhv: 43
natural gas:
name: market for natural gas, high pressure
reference product: natural gas, high pressure
Expand Down
2 changes: 2 additions & 0 deletions premise/data_collection.py
Original file line number Diff line number Diff line change
Expand Up @@ -756,6 +756,8 @@ def __fetch_market_data(

# back-fill nans
market_data = market_data.bfill(dim="year")
# fill NaNs with zeros
market_data = market_data.fillna(0)

return market_data

Expand Down
37 changes: 24 additions & 13 deletions premise/direct_air_capture.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,20 +10,10 @@
import yaml

from .utils import DATA_DIR
from .logger import create_logger

LOG_CONFIG = DATA_DIR / "utils" / "logging" / "logconfig.yaml"
# directory for log files
DIR_LOG_REPORT = Path.cwd() / "export" / "logs"
# if DIR_LOG_REPORT folder does not exist
# we create it
if not Path(DIR_LOG_REPORT).exists():
Path(DIR_LOG_REPORT).mkdir(parents=True, exist_ok=True)
logger = create_logger("dac")

with open(LOG_CONFIG, "r") as f:
config = yaml.safe_load(f.read())
logging.config.dictConfig(config)

logger = logging.getLogger("dac")

import numpy as np

Expand All @@ -47,6 +37,27 @@ def fetch_mapping(filepath: str) -> dict:
return mapping


def _update_dac(scenario, version, system_model, modified_datasets):
dac = DirectAirCapture(
database=scenario["database"],
iam_data=scenario["iam data"],
model=scenario["model"],
pathway=scenario["pathway"],
year=scenario["year"],
version=version,
system_model=system_model,
modified_datasets=modified_datasets,
)

if scenario["iam data"].dac_markets is not None:
dac.generate_dac_activities()
scenario["database"] = dac.database
modified_datasets = dac.modified_datasets
else:
print("No DAC markets found in IAM data. Skipping.")

return scenario, modified_datasets

class DirectAirCapture(BaseTransformation):
"""
Class that modifies DAC and DACCS inventories and markets
Expand Down Expand Up @@ -94,7 +105,7 @@ def generate_dac_activities(self) -> None:
modifies the original datasets to include the heat source, and adds the modified datasets to the database.
"""
print("Generate region-specific direct air capture processes.")
#print("Generate region-specific direct air capture processes.")

# get original dataset
for ds_list in self.carbon_storage.values():
Expand Down
Loading

0 comments on commit fd0716e

Please sign in to comment.