From cab579d4893a2a7827df00907ebf40a25b31bd44 Mon Sep 17 00:00:00 2001 From: Alessandro Kreslin Date: Thu, 16 Jan 2025 14:36:41 -0500 Subject: [PATCH] feat: add service date range to schema + expose on endpoints (#884) * added service date range --- .../feeds/impl/models/gtfs_dataset_impl.py | 2 + .../feeds/impl/models/latest_dataset_impl.py | 2 + .../models/search_feed_item_result_impl.py | 2 + api/tests/test_data/extra_test_data.json | 16 +- api/tests/test_data/test_datasets.json | 20 +- .../unittest/models/test_basic_feed_impl.py | 4 +- .../unittest/models/test_gtfs_dataset_impl.py | 6 +- .../unittest/models/test_gtfs_feed_impl.py | 6 +- .../models/test_latest_dataset_impl.py | 6 +- .../test_search_feed_item_result_impl.py | 4 + docs/DatabaseCatalogAPI.yaml | 20 ++ liquibase/changelog.xml | 2 + liquibase/changes/feat_880.sql | 2 + liquibase/changes/feat_880_2.sql | 175 ++++++++++++++++++ web-app/src/app/services/feeds/types.ts | 24 +++ 15 files changed, 278 insertions(+), 13 deletions(-) create mode 100644 liquibase/changes/feat_880.sql create mode 100644 liquibase/changes/feat_880_2.sql diff --git a/api/src/feeds/impl/models/gtfs_dataset_impl.py b/api/src/feeds/impl/models/gtfs_dataset_impl.py index 219e97cd2..37644707d 100644 --- a/api/src/feeds/impl/models/gtfs_dataset_impl.py +++ b/api/src/feeds/impl/models/gtfs_dataset_impl.py @@ -49,4 +49,6 @@ def from_orm(cls, gtfs_dataset: Gtfsdataset | None) -> GtfsDataset | None: hash=gtfs_dataset.hash, bounding_box=BoundingBoxImpl.from_orm(gtfs_dataset.bounding_box), validation_report=cls.from_orm_latest_validation_report(gtfs_dataset.validation_reports), + service_date_range_start=gtfs_dataset.service_date_range_start, + service_date_range_end=gtfs_dataset.service_date_range_end, ) diff --git a/api/src/feeds/impl/models/latest_dataset_impl.py b/api/src/feeds/impl/models/latest_dataset_impl.py index 419f04264..82f7809ce 100644 --- a/api/src/feeds/impl/models/latest_dataset_impl.py +++ b/api/src/feeds/impl/models/latest_dataset_impl.py @@ -52,6 +52,8 @@ def from_orm(cls, dataset: Gtfsdataset | None) -> LatestDataset | None: hosted_url=dataset.hosted_url, bounding_box=BoundingBoxImpl.from_orm(dataset.bounding_box), downloaded_at=dataset.downloaded_at, + service_date_range_start=dataset.service_date_range_start, + service_date_range_end=dataset.service_date_range_end, hash=dataset.hash, validation_report=validation_report, ) diff --git a/api/src/feeds/impl/models/search_feed_item_result_impl.py b/api/src/feeds/impl/models/search_feed_item_result_impl.py index ab3a98700..274f4d87d 100644 --- a/api/src/feeds/impl/models/search_feed_item_result_impl.py +++ b/api/src/feeds/impl/models/search_feed_item_result_impl.py @@ -44,6 +44,8 @@ def from_orm_gtfs(cls, feed_search_row): hosted_url=feed_search_row.latest_dataset_hosted_url, downloaded_at=feed_search_row.latest_dataset_downloaded_at, hash=feed_search_row.latest_dataset_hash, + service_date_range_start=feed_search_row.latest_dataset_service_date_range_start, + service_date_range_end=feed_search_row.latest_dataset_service_date_range_end, ) if feed_search_row.latest_dataset_id else None, diff --git a/api/tests/test_data/extra_test_data.json b/api/tests/test_data/extra_test_data.json index ff3e65385..921979d85 100644 --- a/api/tests/test_data/extra_test_data.json +++ b/api/tests/test_data/extra_test_data.json @@ -7,7 +7,9 @@ "hosted_url": "https://example.com/dataset-1", "hash": "hash", "downloaded_at": "2024-01-31T00:00:00+00:00", - "feed_stable_id": "mdb-1" + "feed_stable_id": "mdb-1", + "service_date_range_start": "2024-01-01", + "service_date_range_end":"2025-01-01" }, { "id": "dataset-2", @@ -16,7 +18,9 @@ "hosted_url": "https://example.com/dataset-2", "hash": "hash", "downloaded_at": "2024-02-01T00:00:00+00:00", - "feed_stable_id": "mdb-1" + "feed_stable_id": "mdb-1", + "service_date_range_start": "2024-01-01", + "service_date_range_end":"2025-01-01" }, { "id": "dataset-3", @@ -25,7 +29,9 @@ "hosted_url": "https://example.com/dataset-3", "hash": "hash", "downloaded_at": "2024-02-02T00:00:00+00:00", - "feed_stable_id": "mdb-10" + "feed_stable_id": "mdb-10", + "service_date_range_start": "2024-01-01", + "service_date_range_end":"2025-01-01" }, { "id": "dataset-4", @@ -34,7 +40,9 @@ "hosted_url": "https://example.com/dataset-4", "hash": "hash", "downloaded_at": "2024-02-03T00:00:00+00:00", - "feed_stable_id": "mdb-10" + "feed_stable_id": "mdb-10", + "service_date_range_start": "2024-01-01", + "service_date_range_end":"2025-01-01" } ], "validation_reports": [ diff --git a/api/tests/test_data/test_datasets.json b/api/tests/test_data/test_datasets.json index 003c6fe5c..9deef005d 100644 --- a/api/tests/test_data/test_datasets.json +++ b/api/tests/test_data/test_datasets.json @@ -8,7 +8,9 @@ "downloaded_at": "2024-02-01T00:00:00Z", "hash": "hash-1", "latest": true, - "bounding_box": "POLYGON((-122.75 36.8, -122.75 37.8, -121.75 37.8, -121.75 36.8, -122.75 36.8))" + "bounding_box": "POLYGON((-122.75 36.8, -122.75 37.8, -121.75 37.8, -121.75 36.8, -122.75 36.8))", + "service_date_range_start": "2024-01-01", + "service_date_range_end":"2025-01-01" }, { "id": "dataset-2", @@ -17,7 +19,9 @@ "downloaded_at": "2024-01-01T00:00:00Z", "hash": "hash-2", "latest": false, - "bounding_box": "POLYGON((-122.75 36.8, -122.75 37.8, -121.75 37.8, -121.75 36.8, -122.75 36.8))" + "bounding_box": "POLYGON((-122.75 36.8, -122.75 37.8, -121.75 37.8, -121.75 36.8, -122.75 36.8))", + "service_date_range_start": "2024-01-01", + "service_date_range_end":"2025-01-01" }, { "id": "dataset-3", @@ -26,7 +30,9 @@ "downloaded_at": "2024-01-01T00:00:00Z", "hash": "hash-3", "latest": true, - "bounding_box": "POLYGON((-122.75 36.8, -122.75 37.8, -121.75 37.8, -121.75 36.8, -122.75 36.8))" + "bounding_box": "POLYGON((-122.75 36.8, -122.75 37.8, -121.75 37.8, -121.75 36.8, -122.75 36.8))", + "service_date_range_start": "2024-01-01", + "service_date_range_end":"2025-01-01" }, { "id": "dataset-4", @@ -35,7 +41,9 @@ "downloaded_at": "2024-02-01T00:00:00Z", "hash": "hash-4", "latest": false, - "bounding_box": "POLYGON((-122.75 36.8, -122.75 37.8, -121.75 37.8, -121.75 36.8, -122.75 36.8))" + "bounding_box": "POLYGON((-122.75 36.8, -122.75 37.8, -121.75 37.8, -121.75 36.8, -122.75 36.8))", + "service_date_range_start": "2024-01-01", + "service_date_range_end":"2025-01-01" }, { "id": "dataset-5", @@ -44,7 +52,9 @@ "downloaded_at": "2024-01-01T00:00:00Z", "hash": "hash-5", "latest": true, - "bounding_box": "POLYGON((-122.75 36.8, -122.75 37.8, -121.75 37.8, -121.75 36.8, -122.75 36.8))" + "bounding_box": "POLYGON((-122.75 36.8, -122.75 37.8, -121.75 37.8, -121.75 36.8, -122.75 36.8))", + "service_date_range_start": "2024-01-01", + "service_date_range_end":"2025-01-01" } ], "validation_reports": [ diff --git a/api/tests/unittest/models/test_basic_feed_impl.py b/api/tests/unittest/models/test_basic_feed_impl.py index 95881a866..8f23c0539 100644 --- a/api/tests/unittest/models/test_basic_feed_impl.py +++ b/api/tests/unittest/models/test_basic_feed_impl.py @@ -1,6 +1,6 @@ import copy import unittest -from datetime import datetime +from datetime import datetime, date from database_gen.sqlacodegen_models import ( Feed, @@ -64,6 +64,8 @@ downloaded_at="downloaded_at", hash="hash", bounding_box="bounding_box", + service_date_range_start=date(2024, 1, 1), + service_date_range_end=date(2025, 1, 1), validation_reports=[ Validationreport( id="id", diff --git a/api/tests/unittest/models/test_gtfs_dataset_impl.py b/api/tests/unittest/models/test_gtfs_dataset_impl.py index fbd20aee0..d640e89b7 100644 --- a/api/tests/unittest/models/test_gtfs_dataset_impl.py +++ b/api/tests/unittest/models/test_gtfs_dataset_impl.py @@ -1,5 +1,5 @@ import unittest -from datetime import datetime +from datetime import datetime, date from geoalchemy2 import WKTElement @@ -42,6 +42,8 @@ def test_from_orm(self): Validationreport(validator_version="0.2.0"), Validationreport(validator_version="1.1.1"), ], + service_date_range_start=date(2024, 1, 1), + service_date_range_end=date(2025, 1, 1), ) result = GtfsDatasetImpl.from_orm(orm) assert result.id == "stable_id" @@ -56,5 +58,7 @@ def test_from_orm(self): assert result.bounding_box.minimum_longitude == 3.0 assert result.bounding_box.maximum_longitude == 4.0 assert result.validation_report.validator_version == "1.1.1" + assert result.service_date_range_start == date(2024, 1, 1) + assert result.service_date_range_end == date(2025, 1, 1) assert GtfsDatasetImpl.from_orm(None) is None diff --git a/api/tests/unittest/models/test_gtfs_feed_impl.py b/api/tests/unittest/models/test_gtfs_feed_impl.py index 7c902eb59..8667da58f 100644 --- a/api/tests/unittest/models/test_gtfs_feed_impl.py +++ b/api/tests/unittest/models/test_gtfs_feed_impl.py @@ -1,6 +1,6 @@ import copy import unittest -from datetime import datetime +from datetime import datetime, date from geoalchemy2 import WKTElement @@ -87,6 +87,8 @@ def create_test_notice(notice_code: str, total_notices: int, severity: str): note="note", downloaded_at=datetime(year=2022, month=12, day=31, hour=13, minute=45, second=56), hash="hash", + service_date_range_start=date(2024, 1, 1), + service_date_range_end=date(2025, 1, 1), bounding_box=WKTElement(POLYGON, srid=4326), latest=True, validation_reports=[ @@ -169,6 +171,8 @@ def create_test_notice(notice_code: str, total_notices: int, severity: str): unique_warning_count=4, unique_info_count=2, ), + service_date_range_start="2024-01-01", + service_date_range_end="2025-01-01", ), ) diff --git a/api/tests/unittest/models/test_latest_dataset_impl.py b/api/tests/unittest/models/test_latest_dataset_impl.py index 9a31b8843..c0f1fdd22 100644 --- a/api/tests/unittest/models/test_latest_dataset_impl.py +++ b/api/tests/unittest/models/test_latest_dataset_impl.py @@ -1,5 +1,5 @@ import unittest -from datetime import datetime +from datetime import datetime, date from geoalchemy2 import WKTElement @@ -23,6 +23,8 @@ def test_from_orm(self): downloaded_at=now, hash="hash", bounding_box=WKTElement(POLYGON, srid=4326), + service_date_range_start=date(2024, 1, 1), + service_date_range_end=date(2025, 1, 1), validation_reports=[ Validationreport(validator_version="1.0.0"), Validationreport( @@ -50,6 +52,8 @@ def test_from_orm(self): minimum_longitude=3.0, maximum_longitude=4.0, ), + service_date_range_start=date(2024, 1, 1), + service_date_range_end=date(2025, 1, 1), validation_report={ "validator_version": "1.2.0", "total_error": 3, diff --git a/api/tests/unittest/models/test_search_feed_item_result_impl.py b/api/tests/unittest/models/test_search_feed_item_result_impl.py index db699ddae..a5c199c6f 100644 --- a/api/tests/unittest/models/test_search_feed_item_result_impl.py +++ b/api/tests/unittest/models/test_search_feed_item_result_impl.py @@ -40,6 +40,8 @@ def __init__(self, **kwargs): latest_dataset_downloaded_at=downloaded_at, latest_dataset_bounding_box=None, latest_dataset_hash="latest_dataset_hash", + latest_dataset_service_date_range_start="2030-10-10", + latest_dataset_service_date_range_end="2031-10-10", external_ids=[], redirect_ids=[], feed_reference_ids=[], @@ -80,6 +82,8 @@ def test_from_orm_gtfs(self): hosted_url=item.latest_dataset_hosted_url, downloaded_at=item.latest_dataset_downloaded_at, hash=item.latest_dataset_hash, + service_date_range_start=item.latest_dataset_service_date_range_start, + service_date_range_end=item.latest_dataset_service_date_range_end, ), ) assert result == expected diff --git a/docs/DatabaseCatalogAPI.yaml b/docs/DatabaseCatalogAPI.yaml index 5c96f72b8..c170fe107 100644 --- a/docs/DatabaseCatalogAPI.yaml +++ b/docs/DatabaseCatalogAPI.yaml @@ -579,6 +579,16 @@ components: description: A hash of the dataset. type: string example: ad3805c4941cd37881ff40c342e831b5f5224f3d8a9a2ec3ac197d3652c78e42 + service_date_range_start: + description: The start date of the service date range for the dataset. + type: string + example: 2023-07-10 + format: date + service_date_range_end: + description: The start date of the service date range for the dataset. + type: string + example: 2023-07-10 + format: date validation_report: type: object properties: @@ -767,6 +777,16 @@ components: $ref: "#/components/schemas/BoundingBox" validation_report: $ref: "#/components/schemas/ValidationReport" + service_date_range_start: + description: The start date of the service date range for the dataset. + type: string + example: 2023-07-10 + format: date + service_date_range_end: + description: The start date of the service date range for the dataset. + type: string + example: 2023-07-10 + format: date BoundingBox: description: Bounding box of the dataset when it was first added to the catalog. diff --git a/liquibase/changelog.xml b/liquibase/changelog.xml index d8242d739..89dfe9d0d 100644 --- a/liquibase/changelog.xml +++ b/liquibase/changelog.xml @@ -34,4 +34,6 @@ + + \ No newline at end of file diff --git a/liquibase/changes/feat_880.sql b/liquibase/changes/feat_880.sql new file mode 100644 index 000000000..bef6f9b86 --- /dev/null +++ b/liquibase/changes/feat_880.sql @@ -0,0 +1,2 @@ +ALTER TABLE gtfsdataset ADD COLUMN service_date_range_start DATE DEFAULT NULL; +ALTER TABLE gtfsdataset ADD COLUMN service_date_range_end DATE DEFAULT NULL; diff --git a/liquibase/changes/feat_880_2.sql b/liquibase/changes/feat_880_2.sql new file mode 100644 index 000000000..b9290b768 --- /dev/null +++ b/liquibase/changes/feat_880_2.sql @@ -0,0 +1,175 @@ +-- Dropping the materialized view if it exists as we cannot update it +DROP MATERIALIZED VIEW IF EXISTS FeedSearch; + +CREATE MATERIALIZED VIEW FeedSearch AS +SELECT + -- feed + Feed.stable_id AS feed_stable_id, + Feed.id AS feed_id, + Feed.data_type, + Feed.status, + Feed.feed_name, + Feed.note, + Feed.feed_contact_email, + -- source + Feed.producer_url, + Feed.authentication_info_url, + Feed.authentication_type, + Feed.api_key_parameter_name, + Feed.license_url, + Feed.provider, + Feed.operational_status, + -- official status + Latest_official_status.is_official AS official, + -- latest_dataset + Latest_dataset.id AS latest_dataset_id, + Latest_dataset.hosted_url AS latest_dataset_hosted_url, + Latest_dataset.downloaded_at AS latest_dataset_downloaded_at, + Latest_dataset.bounding_box AS latest_dataset_bounding_box, + Latest_dataset.hash AS latest_dataset_hash, + Latest_dataset.service_date_range_start AS latest_dataset_service_date_range_start, + Latest_dataset.service_date_range_end AS latest_dataset_service_date_range_end, + -- external_ids + ExternalIdJoin.external_ids, + -- redirect_ids + RedirectingIdJoin.redirect_ids, + -- feed gtfs_rt references + FeedReferenceJoin.feed_reference_ids, + -- feed gtfs_rt entities + EntityTypeFeedJoin.entities, + -- locations + FeedLocationJoin.locations, + -- translations + FeedCountryTranslationJoin.translations AS country_translations, + FeedSubdivisionNameTranslationJoin.translations AS subdivision_name_translations, + FeedMunicipalityTranslationJoin.translations AS municipality_translations, + -- full-text searchable document + setweight(to_tsvector('english', coalesce(unaccent(Feed.feed_name), '')), 'C') || + setweight(to_tsvector('english', coalesce(unaccent(Feed.provider), '')), 'C') || + setweight(to_tsvector('english', coalesce(unaccent(( + SELECT string_agg( + coalesce(location->>'country_code', '') || ' ' || + coalesce(location->>'country', '') || ' ' || + coalesce(location->>'subdivision_name', '') || ' ' || + coalesce(location->>'municipality', ''), + ' ' + ) + FROM json_array_elements(FeedLocationJoin.locations) AS location + )), '')), 'A') || + setweight(to_tsvector('english', coalesce(unaccent(( + SELECT string_agg( + coalesce(translation->>'value', ''), + ' ' + ) + FROM json_array_elements(FeedCountryTranslationJoin.translations) AS translation + )), '')), 'A') || + setweight(to_tsvector('english', coalesce(unaccent(( + SELECT string_agg( + coalesce(translation->>'value', ''), + ' ' + ) + FROM json_array_elements(FeedSubdivisionNameTranslationJoin.translations) AS translation + )), '')), 'A') || + setweight(to_tsvector('english', coalesce(unaccent(( + SELECT string_agg( + coalesce(translation->>'value', ''), + ' ' + ) + FROM json_array_elements(FeedMunicipalityTranslationJoin.translations) AS translation + )), '')), 'A') AS document +FROM Feed +LEFT JOIN ( + SELECT * + FROM gtfsdataset + WHERE latest = true +) AS Latest_dataset ON Latest_dataset.feed_id = Feed.id AND Feed.data_type = 'gtfs' +LEFT JOIN ( + SELECT + feed_id, + json_agg(json_build_object('external_id', associated_id, 'source', source)) AS external_ids + FROM externalid + GROUP BY feed_id +) AS ExternalIdJoin ON ExternalIdJoin.feed_id = Feed.id +LEFT JOIN ( + SELECT + gtfs_rt_feed_id, + array_agg(FeedReferenceJoinInnerQuery.stable_id) AS feed_reference_ids + FROM FeedReference + LEFT JOIN Feed AS FeedReferenceJoinInnerQuery ON FeedReferenceJoinInnerQuery.id = FeedReference.gtfs_feed_id + GROUP BY gtfs_rt_feed_id +) AS FeedReferenceJoin ON FeedReferenceJoin.gtfs_rt_feed_id = Feed.id AND Feed.data_type = 'gtfs_rt' +LEFT JOIN ( + SELECT + target_id, + json_agg(json_build_object('target_id', target_id, 'comment', redirect_comment)) AS redirect_ids + FROM RedirectingId + GROUP BY target_id +) AS RedirectingIdJoin ON RedirectingIdJoin.target_id = Feed.id +LEFT JOIN ( + SELECT + LocationFeed.feed_id, + json_agg(json_build_object('country', country, 'country_code', country_code, 'subdivision_name', + subdivision_name, 'municipality', municipality)) AS locations + FROM Location + LEFT JOIN LocationFeed ON LocationFeed.location_id = Location.id + GROUP BY LocationFeed.feed_id +) AS FeedLocationJoin ON FeedLocationJoin.feed_id = Feed.id +LEFT JOIN ( + SELECT DISTINCT ON (feed_id) * + FROM officialstatushistory + ORDER BY feed_id, timestamp DESC +) AS Latest_official_status ON Latest_official_status.feed_id = Feed.id +LEFT JOIN ( + SELECT + LocationFeed.feed_id, + json_agg(json_build_object('value', Translation.value, 'key', Translation.key)) AS translations + FROM Location + LEFT JOIN Translation ON Location.country = Translation.key + LEFT JOIN LocationFeed ON LocationFeed.location_id = Location.id + WHERE Translation.language_code = 'en' + AND Translation.type = 'country' + AND Location.country IS NOT NULL + GROUP BY LocationFeed.feed_id +) AS FeedCountryTranslationJoin ON FeedCountryTranslationJoin.feed_id = Feed.id +LEFT JOIN ( + SELECT + LocationFeed.feed_id, + json_agg(json_build_object('value', Translation.value, 'key', Translation.key)) AS translations + FROM Location + LEFT JOIN Translation ON Location.subdivision_name = Translation.key + LEFT JOIN LocationFeed ON LocationFeed.location_id = Location.id + WHERE Translation.language_code = 'en' + AND Translation.type = 'subdivision_name' + AND Location.subdivision_name IS NOT NULL + GROUP BY LocationFeed.feed_id +) AS FeedSubdivisionNameTranslationJoin ON FeedSubdivisionNameTranslationJoin.feed_id = Feed.id +LEFT JOIN ( + SELECT + LocationFeed.feed_id, + json_agg(json_build_object('value', Translation.value, 'key', Translation.key)) AS translations + FROM Location + LEFT JOIN Translation ON Location.municipality = Translation.key + LEFT JOIN LocationFeed ON LocationFeed.location_id = Location.id + WHERE Translation.language_code = 'en' + AND Translation.type = 'municipality' + AND Location.municipality IS NOT NULL + GROUP BY LocationFeed.feed_id +) AS FeedMunicipalityTranslationJoin ON FeedMunicipalityTranslationJoin.feed_id = Feed.id +LEFT JOIN ( + SELECT + feed_id, + array_agg(entity_name) AS entities + FROM EntityTypeFeed + GROUP BY feed_id +) AS EntityTypeFeedJoin ON EntityTypeFeedJoin.feed_id = Feed.id AND Feed.data_type = 'gtfs_rt' +; + + +-- This index allows concurrent refresh on the materialized view avoiding table locks +CREATE UNIQUE INDEX idx_unique_feed_id ON FeedSearch(feed_id); + +-- Indices for feedsearch view optimization +CREATE INDEX feedsearch_document_idx ON FeedSearch USING GIN(document); +CREATE INDEX feedsearch_feed_stable_id ON FeedSearch(feed_stable_id); +CREATE INDEX feedsearch_data_type ON FeedSearch(data_type); +CREATE INDEX feedsearch_status ON FeedSearch(status); diff --git a/web-app/src/app/services/feeds/types.ts b/web-app/src/app/services/feeds/types.ts index 84e2ac029..35051f730 100644 --- a/web-app/src/app/services/feeds/types.ts +++ b/web-app/src/app/services/feeds/types.ts @@ -276,6 +276,18 @@ export interface components { * @example ad3805c4941cd37881ff40c342e831b5f5224f3d8a9a2ec3ac197d3652c78e42 */ hash?: string; + /** + * Format: date + * @description The start date of the service date range for the dataset. + * @example "2023-07-10T00:00:00.000Z" + */ + service_date_range_start?: string; + /** + * Format: date + * @description The start date of the service date range for the dataset. + * @example "2023-07-10T00:00:00.000Z" + */ + service_date_range_end?: string; validation_report?: { /** @example 10 */ total_error?: number; @@ -401,6 +413,18 @@ export interface components { hash?: string; bounding_box?: components['schemas']['BoundingBox']; validation_report?: components['schemas']['ValidationReport']; + /** + * Format: date + * @description The start date of the service date range for the dataset. + * @example "2023-07-10T00:00:00.000Z" + */ + service_date_range_start?: string; + /** + * Format: date + * @description The start date of the service date range for the dataset. + * @example "2023-07-10T00:00:00.000Z" + */ + service_date_range_end?: string; }; /** @description Bounding box of the dataset when it was first added to the catalog. */ BoundingBox: {