Skip to content

Commit

Permalink
Migrate from datapackage to frictionless
Browse files Browse the repository at this point in the history
Re #491
  • Loading branch information
soininen committed Feb 13, 2025
1 parent 0886a0f commit 3da6a3d
Show file tree
Hide file tree
Showing 3 changed files with 21 additions and 24 deletions.
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ requires-python = ">=3.9"
dependencies = [
"SQLAlchemy >=1.4, <1.5",
"alembic >=1.7",
"datapackage >=1.15.2",
"frictionless >=5.18",
"python-dateutil >=2.8.1",
"numpy >=1.20.2",
"scipy >=1.7.1",
Expand Down
24 changes: 13 additions & 11 deletions spinedb_api/spine_io/importers/datapackage_reader.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,10 +11,9 @@
######################################################################################################################

""" Contains DataPackageConnector class. """
from itertools import chain
from itertools import chain, islice
import threading
from datapackage import Package
import tabulator.exceptions
import frictionless
from ...exception import ConnectorError
from .reader import SourceConnection

Expand Down Expand Up @@ -63,7 +62,7 @@ def connect_to_source(self, source, **extras):
**extras: ignored
"""
if source:
self._datapackage = Package(source)
self._datapackage = frictionless.Package(source)
self._filename = source

def disconnect(self):
Expand Down Expand Up @@ -95,11 +94,12 @@ def get_data_iterator(self, table, options, max_rows=-1):
"""
if not self._datapackage:
return iter([]), []
max_rows = max_rows if max_rows >= 0 else None

def iterator(r):
def iterator(i):
try:
yield from (item for row, item in enumerate(r.iter(cast=False)) if row != max_rows)
except tabulator.exceptions.TabulatorException as error:
yield from i
except frictionless.exception.FrictionlessException as error:
raise ConnectorError(str(error)) from error

has_header = options.get("has_header", True)
Expand All @@ -108,9 +108,11 @@ def iterator(r):
if resource.name is None:
resource.infer()
if table == resource.name:
if has_header:
header = resource.schema.field_names
return iterator(resource), header
return chain([resource.headers], iterator(resource)), None
with resource:
i = iterator(islice((row.to_list(json=True) for row in resource.row_stream), max_rows))
if has_header:
header = resource.header
return i, header
return chain([resource.header.labels], i), None
# table not found
return iter([]), []
19 changes: 7 additions & 12 deletions tests/spine_io/importers/test_datapackage_reader.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,18 +9,13 @@
# Public License for more details. You should have received a copy of the GNU Lesser General Public License along with
# this program. If not, see <http://www.gnu.org/licenses/>.
######################################################################################################################

"""
Contains unit tests for DatapackageConnector.
"""
from contextlib import contextmanager
import csv
from pathlib import Path
import pickle
from tempfile import TemporaryDirectory
import unittest
from datapackage import Package
from frictionless import Package, Resource
from spinedb_api.exception import ConnectorError
from spinedb_api.spine_io.importers.datapackage_reader import DataPackageConnector

Expand Down Expand Up @@ -67,10 +62,10 @@ def test_wrong_datapackage_encoding_raises_connector_error(self):
with open(csv_file_path, "wb") as csv_file:
for row in data:
csv_file.write(row)
package = Package(base_path=temp_dir)
package.add_resource({"path": str(csv_file_path.relative_to(temp_dir))})
package = Package(basepath=temp_dir)
package.add_resource(Resource(path=str(csv_file_path.relative_to(temp_dir))))
package_path = Path(temp_dir, "datapackage.json")
package.save(package_path)
package.to_json(package_path)
reader = DataPackageConnector(None)
reader.connect_to_source(str(package_path))
data_iterator, header = reader.get_data_iterator("test_data", {"has_header": False})
Expand All @@ -85,10 +80,10 @@ def test_datapackage(rows):
with open(csv_file_path, "w", newline="") as csv_file:
csv_writer = csv.writer(csv_file)
csv_writer.writerows(rows)
package = Package(base_path=temp_dir)
package.add_resource({"path": str(csv_file_path.relative_to(temp_dir))})
package = Package(basepath=temp_dir)
package.add_resource(Resource(path=str(csv_file_path.relative_to(temp_dir))))
package_path = Path(temp_dir, "datapackage.json")
package.save(package_path)
package.to_json(package_path)
yield package_path


Expand Down

0 comments on commit 3da6a3d

Please sign in to comment.