Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Inherit much of geojson parser from GMT #8

Merged
merged 12 commits into from
Feb 4, 2025
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ repos:
types_or: [yaml, markdown, css, scss]
# https://docs.astral.sh/ruff/integrations/#pre-commit
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.6.9
rev: v0.9.4
hooks:
# Run the linter
- id: ruff
Expand Down
892 changes: 666 additions & 226 deletions poetry.lock

Large diffs are not rendered by default.

20 changes: 9 additions & 11 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -10,23 +10,21 @@ packages = [{ include = "urbanopt_des", from = "." }]
python = ">=3.10,<3.13"
geopandas = "^1.0.1"
# release mode
modelica-builder = "^0.6.0"
# geojson-modelica-translator = "^0.9.1"
# pre-release mode, use github
# modelica-builder = { git = "https://github.com/urbanopt/modelica-builder.git", branch = "develop" }
geojson-modelica-translator = { git = "https://github.com/urbanopt/geojson-modelica-translator.git", branch = "develop" }
# dev mode
# modelica-builder = { path = "../modelica-builder", develop = true }
buildingspy = "^5.1.0"
# geojson-modelica-translator = { path = "../geojson-modelica-translator", develop = true }

[tool.poetry.group.dev.dependencies]
pre-commit = "^3.7.0"
autopep8 = "~2.0"
coveralls = "~3.3"
mypy = "~1.6"
pytest = "~7.4"
pytest-cov = "~4.1"
pre-commit = "~=4.0"
coveralls = "~=4.0"
mypy = "~1.14"
pytest = "~=8.3"
pytest-cov = "~6.0"

[build-system]
requires = ["poetry-core"]
requires = ["poetry-core>=2.0.1"]
build-backend = "poetry.core.masonry.api"

# TODO: add in other Ruff configs from CBL or GMT
Expand Down
Binary file modified tests/data/DistrictEnergySystem.mat.zip
Binary file not shown.
4 changes: 2 additions & 2 deletions tests/test_geojson.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import unittest
from pathlib import Path

from urbanopt_des.urbanopt_geojson import URBANoptGeoJSON
from urbanopt_des.urbanopt_geojson import DESGeoJSON
Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I renamed the uo-des geojson parsing class, to not conflict with the one from the GMT. Should it be different?Happy to change the name to something else if that makes more sense.

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think we should move all the methods out of DESGeoJSON... looks like the only one remaining is create_aggregated_representation. Can we move that over to GMT's GeoJSON?



class GeoJsonTest(unittest.TestCase):
Expand All @@ -12,7 +12,7 @@ def setUp(self):
def test_load_geojson(self):
"""Simple test to make sure we can load the geojson file"""
filename = self.data_dir / "nrel_campus.json"
geojson = URBANoptGeoJSON(filename)
geojson = DESGeoJSON(filename)

assert "Outdoor Test Facility" in geojson.get_building_names()
assert "Research Support Facility" in geojson.get_building_names()
13 changes: 9 additions & 4 deletions tests/test_urbanopt_des.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,10 +40,9 @@ def test_resample_and_convert_to_df(self):
(self.output_dir / f"power_{interval}min.csv").unlink()

data = ModelicaResults(mat_filename, self.output_dir)
data.resample_and_convert_to_df()
# This test file for some reason is missing several hours. Eventually
# use a new data file
self.assertEqual(data.min_60.shape[0], 8751)
data.resample_and_convert_to_df(["all_buildings"])
# Should have hourly data for a full year
self.assertEqual(data.min_60.shape[0], 8761)

# save the dataframes
data.save_dataframes()
Expand All @@ -54,3 +53,9 @@ def test_resample_and_convert_to_df(self):

# check the sum of the Total Boilers, should be 1376600000 +/- 1E6
self.assertAlmostEqual(data.min_60["Total Boilers"].sum(), 1400000000, delta=1e8)

# verify that the ETS Pump Electricity is reported in this model since
# the 4G systems should have ETS pumps now (which are in the building)
self.assertTrue("ETS Pump Electricity Total" in data.min_60.columns)
print(data.min_60["ETS Pump Electricity Total"].sum())
self.assertGreater(data.min_60["ETS Pump Electricity Total"].sum(), 0)
49 changes: 31 additions & 18 deletions urbanopt_des/modelica_results.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
import json
from datetime import datetime, timedelta
from pathlib import Path
from typing import Union

import numpy as np
import pandas as pd
Expand Down Expand Up @@ -217,18 +216,18 @@ def retrieve_variable_data(self, variable_name: str, len_of_time: int, default_v

def resample_and_convert_to_df(
self,
building_ids: Union[list[str], None] = None,
other_vars: Union[list[str], None] = None,
building_ids: list[str] | None = None,
other_vars: list[str] | None = None,
year_of_data: int = 2017,
) -> None:
"""The Modelica data (self.modelica_data) are stored in a Reader object and the timesteps are non ideal for comparison across models. The method handles
a very specific set of variables which are extracted from the Reader object. After the data are stored in a DataFrame with the correct timesteps and units,
then the data will be resampled to 5min, 15min, and 60min.

Args:
building_ids (Union[list[str], None], optional): Name of the buildings to process out of the Modelica data. Defaults to None.
other_vars (Union[list[str], None], optional): Other variables to extract and store in the dataframe. Defaults to None.
year_of_data (int, optional): Year of the data, should match the URBANopt/OpenStudio/EnergyPlus value and correct starting day of week. Defaults to 2017.
building_ids (list[str] | None): Name of the buildings to process out of the Modelica data. Defaults to None.
other_vars (list[str] | None): Other variables to extract and store in the dataframe. Defaults to None.
year_of_data (int): Year of the data, should match the URBANopt/OpenStudio/EnergyPlus value and correct starting day of week. Defaults to 2017.

Raises:
Exception: errors
Expand Down Expand Up @@ -319,7 +318,6 @@ def resample_and_convert_to_df(
boiler_data: dict[str, list[float]] = {}
# 1. get the variables of all the boilers
boiler_vars = self.modelica_data.varNames(r"heaPla.*boiHotWat.boi.\d..QFue_flow")
print(boiler_vars)
# 2. get the data for all the chillers or default to 1 pump set to 0
if len(boiler_vars) > 0:
for var_id, boiler_var in enumerate(boiler_vars):
Expand All @@ -332,7 +330,6 @@ def resample_and_convert_to_df(

# Other heating plant data
heating_plant_pumps: dict[str, list[float]] = {}

# 1. get the variables of all the condenser water pumps, which is in e.g., cooPla_67e4a0e1.pumCW.P[1]
heating_plant_pumps_vars = self.modelica_data.varNames(r"heaPla.*pumHW.P.\d.")
# 2. get the data for all the pumps or default to 1 pump set to 0
Expand All @@ -351,30 +348,46 @@ def resample_and_convert_to_df(

agg_columns: dict[str, list[str]] = {
"ETS Heat Pump Electricity Total": [],
"ETS Pump CHW Electricity Total": [],
"ETS Pump HHW Electricity Total": [],
"ETS Pump Electricity Total": [],
"ETS Thermal Cooling Total": [],
"ETS Thermal Heating Total": [],
}
for n_b in range(1, n_buildings + 1):
# get the building name
# get the building name as this is what is in the Modelica results
building_id = building_ids[n_b - 1]
# Note that these P.*.u variables do not have units defined in the vars, but they are Watts
ets_pump_data = self.retrieve_variable_data(f"PPumETS.u[{n_b}]", len(time1))

# ETS heat pump power
ets_hp_data = self.retrieve_variable_data(f"PHeaPump.u[{n_b}]", len(time1))

# Thermal Energy to buildings
# ETS pump data - disFloCoo is on the building_id, not the building number.
ets_pump_data = self.retrieve_variable_data(f"PPumETS.u[{n_b}]", len(time1)) # This is ambient / 5g pump
ets_pump_chw_data = self.retrieve_variable_data(f"TimeSerLoa_{building_id}.disFloCoo.PPum", len(time1))
ets_pump_hhw_data = self.retrieve_variable_data(f"TimeSerLoa_{building_id}.disFloHea.PPum", len(time1))

# Thermal energy to buildings
ets_q_cooling = self.retrieve_variable_data(f"bui[{n_b}].QCoo_flow", len(time1))
ets_q_heating = self.retrieve_variable_data(f"bui[{n_b}].QHea_flow", len(time1))

agg_columns["ETS Pump Electricity Total"].append(f"ETS Pump Electricity Building {building_id}")
agg_columns["ETS Heat Pump Electricity Total"].append(f"ETS Heat Pump Electricity Building {building_id}")
agg_columns["ETS Thermal Cooling Total"].append(f"ETS Thermal Cooling Building {building_id}")
agg_columns["ETS Thermal Heating Total"].append(f"ETS Thermal Heating Building {building_id}")
building_data[f"ETS Pump Electricity Building {building_id}"] = ets_pump_data
building_data[f"ETS Pump CHW Electricity Building {building_id}"] = ets_pump_chw_data
building_data[f"ETS Pump HHW Electricity Building {building_id}"] = ets_pump_hhw_data
building_data[f"ETS Heat Pump Electricity Building {building_id}"] = ets_hp_data
building_data[f"ETS Thermal Cooling Building {building_id}"] = ets_q_cooling
building_data[f"ETS Thermal Heating Building {building_id}"] = ets_q_heating

# Add variables to aggregations - these keys have to be defined above too.
# ETS Pump has CHW, HHW, and then total. -- total includes ambient + hhw + chw
agg_columns["ETS Heat Pump Electricity Total"].append(f"ETS Heat Pump Electricity Building {building_id}")
agg_columns["ETS Pump CHW Electricity Total"].append(f"ETS Pump CHW Electricity Building {building_id}")
agg_columns["ETS Pump HHW Electricity Total"].append(f"ETS Pump CHW Electricity Building {building_id}")
agg_columns["ETS Pump Electricity Total"].append(f"ETS Pump Electricity Building {building_id}")
agg_columns["ETS Pump Electricity Total"].append(f"ETS Pump CHW Electricity Building {building_id}")
agg_columns["ETS Pump Electricity Total"].append(f"ETS Pump HHW Electricity Building {building_id}")
agg_columns["ETS Thermal Cooling Total"].append(f"ETS Thermal Cooling Building {building_id}")
agg_columns["ETS Thermal Heating Total"].append(f"ETS Thermal Heating Building {building_id}")

# Add in chiller aggregations
agg_columns["Chillers Total"] = []
for n_c in range(1, len(chiller_data.keys()) + 1):
Expand Down Expand Up @@ -480,7 +493,7 @@ def resample_and_convert_to_df(

def combine_with_openstudio_results(
self,
building_ids: Union[list[str], None],
building_ids: list[str] | None,
openstudio_df: pd.DataFrame,
openstudio_df_15: pd.DataFrame,
) -> None:
Expand All @@ -489,7 +502,7 @@ def combine_with_openstudio_results(
HVAC related.

Args:
building_ids (Union[list[str], None]): Name of the buildings
building_ids (list[str] | None): Name of the buildings
openstudio_df (pd.DataFrame): dataframe of URBANopt/OpenStudio hourly results
openstudio_df_15 (pd.DataFrame): dataframe of URBANopt/OpenStudio 15min results
Returns:
Expand Down
17 changes: 7 additions & 10 deletions urbanopt_des/urbanopt_analysis.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,13 +4,12 @@
import json
import math
from pathlib import Path
from typing import Tuple, Union

import pandas as pd

from .emissions import HourlyEmissionsData
from .modelica_results import ModelicaResults
from .urbanopt_geojson import URBANoptGeoJSON
from .urbanopt_geojson import DESGeoJSON
from .urbanopt_results import URBANoptResults


Expand All @@ -35,7 +34,7 @@ def __init__(self, geojson_file: Path, analysis_dir: Path, year_of_data: int = 2
"""
self.geojson_file = geojson_file
if geojson_file.exists():
self.geojson = URBANoptGeoJSON(geojson_file)
self.geojson = DESGeoJSON(geojson_file)
else:
raise Exception(f"GeoJSON file does not exist: {geojson_file}")

Expand Down Expand Up @@ -276,8 +275,6 @@ def resample_actual_data(self) -> None:
for building_id in self.geojson.get_building_ids():
meters = self.geojson.get_meters_for_building(building_id)
for meter in meters:
# print(f"Processing meter {meter} for building {building_id}")

meter_readings = self.geojson.get_meter_readings_for_building(building_id, meter)
# add the meter_type to all the json objects
[meter_reading.update({"meter_type": meter, "building_id": building_id}) for meter_reading in meter_readings]
Expand Down Expand Up @@ -342,8 +339,8 @@ def resample_actual_data(self) -> None:

def resample_and_convert_modelica_results(
self,
building_ids: Union[list[str], None] = None,
other_vars: Union[list[str], None] = None,
building_ids: list[str] | None = None,
other_vars: list[str] | None = None,
) -> None:
"""Run the resample and convert method for each of the analyses in the modelica object

Expand Down Expand Up @@ -756,7 +753,7 @@ def update_geojson_from_seed_data(self, **kwargs) -> dict:
}

new_dict = None
# load the GeoJSON file as a dictionary, NOT an URBANoptGeoJSON object.
# load the GeoJSON file as a dictionary, NOT an DESGeoJSON object.
with open(self.geojson_file) as f:
geojson = json.load(f)
# insert project dict and move to after the type object
Expand Down Expand Up @@ -1168,7 +1165,7 @@ def create_summary_results(self) -> None:
return True

@classmethod
def get_list_of_valid_result_folders(cls, root_analysis_path: Path) -> Tuple[dict, dict]:
def get_list_of_valid_result_folders(cls, root_analysis_path: Path) -> (dict, dict):
"""Parse through the root_analysis_path and return a dict of valid
result folders that can be loaded and processed. Also return dict of
folders that have simulation errors or empty results
Expand All @@ -1177,7 +1174,7 @@ def get_list_of_valid_result_folders(cls, root_analysis_path: Path) -> Tuple[dic
root_analysis_path (Path): Analysis folder to analyze.

Returns:
Tuple[list, list]: Tuple of lists, first is a dict of valid results, second list is bad or empty results
(dict, dict): Tuple of dicts, first is a dict of valid results, second is bad or empty results
"""
results = {}
bad_or_empty_results = {}
Expand Down
Loading