diff --git a/.github/workflows/build.yaml b/.github/workflows/build.yaml index 14f8f0a..05fed8e 100644 --- a/.github/workflows/build.yaml +++ b/.github/workflows/build.yaml @@ -40,13 +40,15 @@ jobs: - name: Install poetry dependencies run: poetry install if: steps.cached-poetry-dependencies.outputs.cache-hit != 'true' - - name: Execute black - run: poetry run black . --check --exclude tests -t py37 - - name: Execute mypy - run: poetry run mypy doltcli + - name: Format + run: | + poetry run make fmt + - name: Lint + run: | + poetry run make lint - name: Execute pytest run: | - poetry run pytest . --cov=doltcli --cov-report=term --cov-report xml + poetry run make test - uses: codecov/codecov-action@v1 if: ${{ matrix.python-version }} == '3.9' with: diff --git a/Makefile b/Makefile new file mode 100644 index 0000000..2e56353 --- /dev/null +++ b/Makefile @@ -0,0 +1,18 @@ +line_length = 95 +package = doltcli + +.PHONY: fmt +fmt: ## Format code with black and isort + black . --check -t py37 --line-length=${line_length} || ( black . -t py37 --line-length=${line_length} && false ) + isort . + +.PHONY: lint +lint: ## Run linters + mypy ${package} + flake8 ${package} \ + --max-line-length=${line_length} \ + --ignore=F401,E501 + +.PHONY: lint +test: ## Run tests + pytest tests --cov=${package} --cov-report=term --cov-report xml diff --git a/doltcli/__init__.py b/doltcli/__init__.py index 3a62005..513e6be 100644 --- a/doltcli/__init__.py +++ b/doltcli/__init__.py @@ -4,21 +4,13 @@ Dolt, DoltException, DoltHubContext, - _execute, KeyPair, Remote, Status, Table, + _execute, ) -from .types import ( - BranchT, - CommitT, - DoltT, - KeyPairT, - RemoteT, - StatusT, - TableT, -) +from .types import BranchT, CommitT, DoltT, KeyPairT, RemoteT, StatusT, TableT from .utils import ( CREATE, FORCE_CREATE, diff --git a/doltcli/branch_mixin.py b/doltcli/branch_mixin.py deleted file mode 100644 index a2e30b0..0000000 --- a/doltcli/branch_mixin.py +++ /dev/null @@ -1,15 +0,0 @@ -class BranchMixin: - def create_branch(self): - pass - - def delete_branch(self): - pass - - def move_branch(self): - pass - - def copy_branch(self): - pass - - def get_branches(self): - pass diff --git a/doltcli/config_mixin.py b/doltcli/config_mixin.py deleted file mode 100644 index a329114..0000000 --- a/doltcli/config_mixin.py +++ /dev/null @@ -1,12 +0,0 @@ -class ConfigMixin: - def list_configs(self): - pass - - def unset_configs(self): - pass - - def add_configs(self): - pass - - def get_config(self): - pass diff --git a/doltcli/creds_mixin.py b/doltcli/creds_mixin.py deleted file mode 100644 index 449b90e..0000000 --- a/doltcli/creds_mixin.py +++ /dev/null @@ -1,18 +0,0 @@ -class CredsMixin: - def new_creds(self): - pass - - def remove_creds(self): - pass - - def list_creds(self): - pass - - def check_creds(self): - pass - - def use_creds(self): - pass - - def import_creds(self): - pass diff --git a/doltcli/dolt.py b/doltcli/dolt.py index e6bfb14..281be5c 100644 --- a/doltcli/dolt.py +++ b/doltcli/dolt.py @@ -1,30 +1,15 @@ import csv +import datetime import json import logging import os +import shutil import tempfile from collections import OrderedDict -import datetime -import shutil from subprocess import PIPE, Popen -from typing import List, Dict, Tuple, Union, Optional, Callable, Any - -logger = logging.getLogger(__name__) - -SQL_OUTPUT_PARSERS = { - "csv": lambda fh: list(csv.DictReader(fh)), - "json": lambda fh: json.load(fh), -} +from typing import Any, Callable, Dict, List, Optional, Tuple, Union -from .types import ( - BranchT, - CommitT, - DoltT, - KeyPairT, - RemoteT, - StatusT, - TableT, -) +from .types import BranchT, CommitT, DoltT, KeyPairT, RemoteT, StatusT, TableT from .utils import ( read_columns, read_columns_sql, @@ -36,6 +21,14 @@ write_rows, ) +logger = logging.getLogger(__name__) + + +SQL_OUTPUT_PARSERS = { + "csv": lambda fh: list(csv.DictReader(fh)), + "json": lambda fh: json.load(fh), +} + class DoltException(Exception): @@ -144,7 +137,7 @@ def get_log_table_query( commit: Optional[str] = None, head: Optional[str] = None, ): - base = f""" + base = """ select dc.`commit_hash` as commit_hash, dca.`parent_hash` as parent_hash, @@ -161,7 +154,7 @@ def get_log_table_query( if commit is not None: base += f"\nWHERE dc.`commit_hash`='{commit}'" - base += f"\nORDER BY `date` DESC" + base += "\nORDER BY `date` DESC" if number is not None: base += f"\nLIMIT {number}" @@ -243,12 +236,8 @@ def __enter__(self): if self.db_path is None: raise ValueError("Cannot clone remote data without db_path set") if self.tables_to_read: - logger.info( - f"Running read-tables, creating a fresh copy of {self.db_path}" - ) - dolt = Dolt.read_tables( - self.db_path, "master", tables=self.tables_to_read - ) + logger.info(f"Running read-tables, creating a fresh copy of {self.db_path}") + dolt = Dolt.read_tables(self.db_path, "master", tables=self.tables_to_read) else: logger.info(f"Running clone, cloning remote {self.db_path}") dolt = Dolt.clone(self.db_path, self.path) @@ -280,11 +269,11 @@ def repo_name(self): @property def head(self): head_hash = "HASHOF('HEAD')" - head_commit = self.sql(f"select {head_hash} as hash", result_format="csv")[ - 0 - ].get("hash", None) + head_commit = self.sql(f"select {head_hash} as hash", result_format="csv")[0].get( + "hash", None + ) if not head_commit: - raise ValueError(f"Head not found") + raise ValueError("Head not found") return head_commit @property @@ -293,16 +282,16 @@ def working(self): f"select @@{self.repo_name}_working as working", result_format="csv" )[0].get("working", None) if not working: - raise ValueError(f"Working head not found") + raise ValueError("Working head not found") return working @property def active_branch(self): - active_branch = self.sql(f"select active_branch() as a", result_format="csv")[ - 0 - ].get("a", None) + active_branch = self.sql("select active_branch() as a", result_format="csv")[0].get( + "a", None + ) if not active_branch: - raise ValueError(f"Active branch not found") + raise ValueError("Active branch not found") return active_branch def execute( @@ -355,7 +344,7 @@ def init(repo_dir: Optional[str] = None, error: bool = False) -> "Dolt": try: _execute(["init"], cwd=repo_dir) - except DoltException as e: + except DoltException: if not error: return Dolt(repo_dir) return Dolt(repo_dir) @@ -406,7 +395,7 @@ def add(self, tables: Union[str, List[str]], **kwargs) -> Status: def reset( self, - tables: Union[str, List[str]], + tables: Union[str, List[str]] = [], hard: bool = False, soft: bool = False, **kwargs, @@ -419,19 +408,29 @@ def reset( :param soft: :return: """ + if not isinstance(tables, (str, list)): + raise ValueError(f"tables should be: Union[str, List[str]]; found {type(tables)}") + to_reset = to_list(tables) args = ["reset"] if hard and soft: - raise ValueError("Cannot reset hard and soft") + raise ValueError("Specify one of: hard=True, soft=True") + + if (hard or soft) and to_reset: + raise ValueError("Specify either hard/soft flag, or tables to reset") if hard: args.append("--hard") - if soft: + elif soft: + args.append("--soft") + elif not tables: args.append("--soft") + else: + args += to_reset - self.execute(args + to_reset, **kwargs) + self.execute(args, **kwargs) def commit( self, @@ -492,9 +491,7 @@ def merge( merge_conflict_pos = 2 if len(output) == 3 and "Fast-forward" in output[1]: - logger.info( - f"Completed fast-forward merge of {branch} into {current_branch.name}" - ) + logger.info(f"Completed fast-forward merge of {branch} into {current_branch.name}") return if len(output) == 5 and output[merge_conflict_pos].startswith("CONFLICT"): @@ -514,9 +511,7 @@ def merge( logger.info(message) status = self.status() - for table in list(status.added_tables.keys()) + list( - status.modified_tables.keys() - ): + for table in list(status.added_tables.keys()) + list(status.modified_tables.keys()): self.add(table) self.commit(message) @@ -558,7 +553,7 @@ def sql( if execute: if any([query, save, message, list_saved, batch, multi_db_dir]): - raise ValueError(f"Incompatible arguments provided") + raise ValueError("Incompatible arguments provided") args.extend(["--execute", str(execute)]) if multi_db_dir: @@ -575,9 +570,7 @@ def sql( # do something with result format if result_parser is not None: if query is None: - raise ValueError( - "Must provide a query in order to specify a result format" - ) + raise ValueError("Must provide a query in order to specify a result format") args.extend(["--query", query]) try: @@ -594,9 +587,7 @@ def sql( shutil.rmtree(d, ignore_errors=True, onerror=None) elif result_file is not None: if query is None: - raise ValueError( - "Must provide a query in order to specify a result format" - ) + raise ValueError("Must provide a query in order to specify a result format") args.extend(["--query", query]) args.extend(["--result-format", "csv"]) @@ -604,9 +595,7 @@ def sql( return output_file elif result_format in ["csv", "json"]: if query is None: - raise ValueError( - "Must provide a query in order to specify a result format" - ) + raise ValueError("Must provide a query in order to specify a result format") args.extend(["--query", query]) try: @@ -635,9 +624,7 @@ def log(self, number: Optional[int] = None, commit: Optional[str] = None) -> Dic """ res = read_rows_sql( self, - sql=Commit.get_log_table_query( - number=number, commit=commit, head=self.head - ), + sql=Commit.get_log_table_query(number=number, commit=commit, head=self.head), ) commits = Commit.parse_dolt_log_table(res) return commits @@ -798,7 +785,7 @@ def _get_branches(self) -> Tuple[Branch, List[Branch]]: dicts = read_rows_sql(self, sql="select * from dolt_branches") branches = [Branch(**d) for d in dicts] ab_dicts = read_rows_sql( - self, f"select * from dolt_branches where name = (select active_branch())" + self, "select * from dolt_branches where name = (select active_branch())" ) if len(ab_dicts) != 1: @@ -831,9 +818,7 @@ def checkout( :return: """ if tables and branch: - raise ValueError( - "No tables may be provided when creating a branch with checkout" - ) + raise ValueError("No tables may be provided when creating a branch with checkout") args = ["checkout"] if branch: @@ -995,9 +980,7 @@ def clone( return Dolt(new_dir) @classmethod - def _new_dir_helper( - cls, new_dir: Optional[str] = None, remote_url: Optional[str] = None - ): + def _new_dir_helper(cls, new_dir: Optional[str] = None, remote_url: Optional[str] = None): if not (new_dir or remote_url): raise ValueError("Provide either new_dir or remote_url") elif remote_url and not new_dir: @@ -1099,9 +1082,7 @@ def creds_ls(self) -> List[KeyPair]: return creds - def creds_check( - self, endpoint: Optional[str] = None, creds: Optional[str] = None - ) -> bool: + def creds_check(self, endpoint: Optional[str] = None, creds: Optional[str] = None) -> bool: """ Check that credentials authenticate with the specified endpoint, return True if authorized, False otherwise. :param endpoint: the endpoint to check @@ -1255,7 +1236,7 @@ def _config_helper( output = _execute(args, cwd).split("\n") result = {} - for line in [l for l in output if l and "=" in l]: + for line in [x for x in output if x is not None and "=" in x]: split = line.split(" = ") config_name, config_val = split[0], split[1] result[config_name] = config_val @@ -1295,9 +1276,7 @@ def ls(self, system: bool = False, all: bool = False, **kwargs) -> List[TableT]: if not line: pass split = line.lstrip().split() - tables.append( - Table(name=split[0], root=split[1], row_cnt=int(split[2])) - ) + tables.append(Table(name=split[0], root=split[1], row_cnt=int(split[2]))) if system_pos: for line in output[system_pos:]: diff --git a/doltcli/schema_mixin.py b/doltcli/schema_mixin.py deleted file mode 100644 index 9dd30de..0000000 --- a/doltcli/schema_mixin.py +++ /dev/null @@ -1,9 +0,0 @@ -class SchemaMixin: - def show_schema(self): - pass - - def import_schema(self): - pass - - def export_schema(self): - pass diff --git a/doltcli/schemas.py b/doltcli/schemas.py deleted file mode 100644 index e69de29..0000000 diff --git a/doltcli/table_mixin.py b/doltcli/table_mixin.py deleted file mode 100644 index 0333a62..0000000 --- a/doltcli/table_mixin.py +++ /dev/null @@ -1,15 +0,0 @@ -class TableMixin: - def import_table(self): - pass - - def export_table(self): - pass - - def copy_table(self): - pass - - def move_table(self): - pass - - def remove_table(self): - pass diff --git a/doltcli/tag_mixin.py b/doltcli/tag_mixin.py deleted file mode 100644 index 37e4002..0000000 --- a/doltcli/tag_mixin.py +++ /dev/null @@ -1,9 +0,0 @@ -class TagMixin: - def list_tag(self): - pass - - def create_tag(self): - pass - - def delete_tag(self): - pass diff --git a/doltcli/types.py b/doltcli/types.py index ce9712b..9d157ae 100644 --- a/doltcli/types.py +++ b/doltcli/types.py @@ -1,8 +1,7 @@ -from dataclasses import asdict, dataclass import datetime import json -from typing import Dict, List, Optional, Union, Callable, Any -import io +from dataclasses import asdict, dataclass +from typing import Any, Callable, Dict, List, Optional, Union class Encoder(json.JSONEncoder): @@ -123,7 +122,10 @@ def commit( ... def merge( - self, branch: str, message: Optional[str] = ..., squash: bool = False + self, + branch: str, + message: Optional[str] = ..., + squash: bool = False, ) -> None: ... diff --git a/doltcli/utils.py b/doltcli/utils.py index f17d415..d971d32 100644 --- a/doltcli/utils.py +++ b/doltcli/utils.py @@ -1,18 +1,16 @@ -from collections import defaultdict -from contextlib import contextmanager import csv import datetime -import logging import io +import logging import os import tempfile - -from typing import Any, Callable, Dict, Iterable, List, Set, Union, Optional - -logger = logging.getLogger() +from collections import defaultdict +from contextlib import contextmanager +from typing import Any, Callable, Dict, Iterable, List, Optional, Set, Union from .types import DoltT +logger = logging.getLogger() DOLT_PATH = "dolt" @@ -22,9 +20,7 @@ def set_dolt_path(path: str): DOLT_PATH = path -def read_columns( - dolt: DoltT, table: str, as_of: Optional[str] = None -) -> Dict[str, list]: +def read_columns(dolt: DoltT, table: str, as_of: Optional[str] = None) -> Dict[str, list]: return read_columns_sql(dolt, get_read_table_asof_query(table, as_of)) @@ -207,10 +203,7 @@ def _import_helper( dolt.execute(args + [fname]) if commit: - msg = ( - commit_message - or f"Committing write to table {table} in {import_mode} mode" - ) + msg = commit_message or f"Committing write to table {table} in {import_mode} mode" dolt.add(table) dolt.commit(msg, date=commit_date) finally: @@ -270,7 +263,7 @@ def detach_head(db, commit): if len(commit_branches) > 0: tmp_branch = commit_branches[0] if active_branch.hash != tmp_branch["hash"]: - swtiched = True + switched = True db.checkout(tmp_branch["name"]) else: tmp_branch = f"detached_HEAD_at_{commit[:5]}" diff --git a/poetry.lock b/poetry.lock index 80edfc8..8e13bcc 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,122 +1,162 @@ [[package]] -name = "appdirs" -version = "1.4.4" -description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." category = "dev" +description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +name = "appdirs" optional = false python-versions = "*" +version = "1.4.4" [[package]] -name = "atomicwrites" -version = "1.4.0" -description = "Atomic file writes." category = "dev" +description = "Atomic file writes." +name = "atomicwrites" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "1.4.0" [[package]] -name = "attrs" -version = "20.3.0" -description = "Classes Without Boilerplate" category = "dev" +description = "Classes Without Boilerplate" +name = "attrs" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +version = "21.2.0" [package.extras] -dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface", "furo", "sphinx", "pre-commit"] -docs = ["furo", "sphinx", "zope.interface"] -tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface"] -tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six"] +dev = ["coverage (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit"] +docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"] +tests = ["coverage (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface"] +tests_no_zope = ["coverage (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins"] [[package]] -name = "black" -version = "20.8b1" -description = "The uncompromising code formatter." category = "dev" +description = "The uncompromising code formatter." +name = "black" optional = false python-versions = ">=3.6" +version = "20.8b1" [package.dependencies] appdirs = "*" click = ">=7.1.2" -dataclasses = {version = ">=0.6", markers = "python_version < \"3.7\""} mypy-extensions = ">=0.4.3" pathspec = ">=0.6,<1" regex = ">=2020.1.8" toml = ">=0.10.1" typed-ast = ">=1.4.0" typing-extensions = ">=3.7.4" +dataclasses = {version = ">=0.6", markers = "python_version < \"3.7\""} [package.extras] colorama = ["colorama (>=0.4.3)"] d = ["aiohttp (>=3.3.2)", "aiohttp-cors"] [[package]] -name = "click" -version = "7.1.2" -description = "Composable command line interface toolkit" category = "dev" +description = "Composable command line interface toolkit" +name = "click" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = ">=3.6" +version = "8.0.1" + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} +importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} [[package]] -name = "colorama" -version = "0.4.4" -description = "Cross-platform colored terminal text." category = "dev" +description = "Cross-platform colored terminal text." +name = "colorama" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +version = "0.4.4" [[package]] -name = "coverage" -version = "5.5" -description = "Code coverage measurement for Python" category = "dev" +description = "Code coverage measurement for Python" +name = "coverage" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" +version = "5.5" [package.extras] toml = ["toml"] [[package]] +category = "main" +description = "A backport of the dataclasses module for Python 3.6" name = "dataclasses" +optional = false +python-versions = ">=3.6, <3.7" version = "0.8" -description = "A backport of the dataclasses module for Python 3.6" + +[[package]] category = "dev" +description = "the modular source code checker: pep8 pyflakes and co" +name = "flake8" optional = false -python-versions = ">=3.6, <3.7" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" +version = "3.9.2" + +[package.dependencies] +mccabe = ">=0.6.0,<0.7.0" +pycodestyle = ">=2.7.0,<2.8.0" +pyflakes = ">=2.3.0,<2.4.0" +importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} [[package]] -name = "importlib-metadata" -version = "3.7.3" -description = "Read metadata from Python packages" category = "dev" +description = "Read metadata from Python packages" +name = "importlib-metadata" optional = false python-versions = ">=3.6" +version = "4.5.0" [package.dependencies] -typing-extensions = {version = ">=3.6.4", markers = "python_version < \"3.8\""} zipp = ">=0.5" +typing-extensions = {version = ">=3.6.4", markers = "python_version < \"3.8\""} [package.extras] docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] -testing = ["pytest (>=3.5,!=3.7.3)", "pytest-checkdocs (>=1.2.3)", "pytest-flake8", "pytest-cov", "pytest-enabler", "packaging", "pep517", "pyfakefs", "flufl.flake8", "pytest-black (>=0.3.7)", "pytest-mypy", "importlib-resources (>=1.3)"] +testing = ["pytest (>=4.6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "packaging", "pep517", "pyfakefs", "flufl.flake8", "pytest-black (>=0.3.7)", "pytest-mypy", "importlib-resources (>=1.3)"] [[package]] +category = "dev" +description = "iniconfig: brain-dead simple config-ini parsing" name = "iniconfig" +optional = false +python-versions = "*" version = "1.1.1" -description = "iniconfig: brain-dead simple config-ini parsing" + +[[package]] +category = "dev" +description = "A Python utility / library to sort Python imports." +name = "isort" +optional = false +python-versions = ">=3.6.1,<4.0" +version = "5.9.1" + +[package.extras] +colors = ["colorama (>=0.4.3,<0.5.0)"] +pipfile_deprecated_finder = ["pipreqs", "requirementslib"] +plugins = ["setuptools"] +requirements_deprecated_finder = ["pipreqs", "pip-api"] + +[[package]] category = "dev" +description = "McCabe checker, plugin for flake8" +name = "mccabe" optional = false python-versions = "*" +version = "0.6.1" [[package]] -name = "mypy" -version = "0.800" -description = "Optional static typing for Python" category = "dev" +description = "Optional static typing for Python" +name = "mypy" optional = false python-versions = ">=3.5" +version = "0.800" [package.dependencies] mypy-extensions = ">=0.4.3,<0.5.0" @@ -127,39 +167,39 @@ typing-extensions = ">=3.7.4" dmypy = ["psutil (>=4.0)"] [[package]] -name = "mypy-extensions" -version = "0.4.3" -description = "Experimental type system extensions for programs checked with the mypy typechecker." category = "dev" +description = "Experimental type system extensions for programs checked with the mypy typechecker." +name = "mypy-extensions" optional = false python-versions = "*" +version = "0.4.3" [[package]] -name = "packaging" -version = "20.9" -description = "Core utilities for Python packages" category = "dev" +description = "Core utilities for Python packages" +name = "packaging" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "20.9" [package.dependencies] pyparsing = ">=2.0.2" [[package]] -name = "pathspec" -version = "0.8.1" -description = "Utility library for gitignore style pattern matching of file paths." category = "dev" +description = "Utility library for gitignore style pattern matching of file paths." +name = "pathspec" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +version = "0.8.1" [[package]] -name = "pluggy" -version = "0.13.1" -description = "plugin and hook calling mechanisms for python" category = "dev" +description = "plugin and hook calling mechanisms for python" +name = "pluggy" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "0.13.1" [package.dependencies] importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} @@ -168,106 +208,123 @@ importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} dev = ["pre-commit", "tox"] [[package]] +category = "dev" +description = "library with cross-python path, ini-parsing, io, code, log facilities" name = "py" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" version = "1.10.0" -description = "library with cross-python path, ini-parsing, io, code, log facilities" + +[[package]] category = "dev" +description = "Python style guide checker" +name = "pycodestyle" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "2.7.0" [[package]] -name = "pyparsing" -version = "2.4.7" -description = "Python parsing module" category = "dev" +description = "passive checker of Python programs" +name = "pyflakes" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "2.3.1" + +[[package]] +category = "dev" +description = "Python parsing module" +name = "pyparsing" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +version = "2.4.7" [[package]] -name = "pytest" -version = "6.2.2" -description = "pytest: simple powerful testing with Python" category = "dev" +description = "pytest: simple powerful testing with Python" +name = "pytest" optional = false python-versions = ">=3.6" +version = "6.2.4" [package.dependencies] -atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} attrs = ">=19.2.0" -colorama = {version = "*", markers = "sys_platform == \"win32\""} -importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} iniconfig = "*" packaging = "*" pluggy = ">=0.12,<1.0.0a1" py = ">=1.8.2" toml = "*" +atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} +colorama = {version = "*", markers = "sys_platform == \"win32\""} +importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} [package.extras] testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"] [[package]] -name = "pytest-cov" -version = "2.11.1" -description = "Pytest plugin for measuring coverage." category = "dev" +description = "Pytest plugin for measuring coverage." +name = "pytest-cov" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +version = "2.12.1" [package.dependencies] coverage = ">=5.2.1" pytest = ">=4.6" +toml = "*" [package.extras] -testing = ["fields", "hunter", "process-tests (==2.0.2)", "six", "pytest-xdist", "virtualenv"] +testing = ["fields", "hunter", "process-tests", "six", "pytest-xdist", "virtualenv"] [[package]] -name = "regex" -version = "2021.3.17" -description = "Alternative regular expression module, to replace re." category = "dev" +description = "Alternative regular expression module, to replace re." +name = "regex" optional = false python-versions = "*" +version = "2021.4.4" [[package]] -name = "toml" -version = "0.10.2" -description = "Python Library for Tom's Obvious, Minimal Language" category = "dev" +description = "Python Library for Tom's Obvious, Minimal Language" +name = "toml" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +version = "0.10.2" [[package]] -name = "typed-ast" -version = "1.4.2" -description = "a fork of Python 2 and 3 ast modules with type comment support" category = "dev" +description = "a fork of Python 2 and 3 ast modules with type comment support" +name = "typed-ast" optional = false python-versions = "*" +version = "1.4.3" [[package]] -name = "typing-extensions" -version = "3.7.4.3" -description = "Backported and Experimental Type Hints for Python 3.5+" category = "dev" +description = "Backported and Experimental Type Hints for Python 3.5+" +name = "typing-extensions" optional = false python-versions = "*" +version = "3.10.0.0" [[package]] -name = "zipp" -version = "3.4.1" -description = "Backport of pathlib-compatible object wrapper for zip files" category = "dev" +description = "Backport of pathlib-compatible object wrapper for zip files" +name = "zipp" optional = false python-versions = ">=3.6" +version = "3.4.1" [package.extras] docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] testing = ["pytest (>=4.6)", "pytest-checkdocs (>=1.2.3)", "pytest-flake8", "pytest-cov", "pytest-enabler", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy"] [metadata] +content-hash = "f396c42428c0c50297105a149fc7b32ce2f96f4187c07061dec1d7fe6a852ed0" lock-version = "1.1" -python-versions = "^3.6" -content-hash = "69bd5d5a73c6a65e339a0c999def8af2b5be98bf7c2c74192bc26830e4ea2ac8" +python-versions = ">=3.6.1,<4.0" [metadata.files] appdirs = [ @@ -279,15 +336,15 @@ atomicwrites = [ {file = "atomicwrites-1.4.0.tar.gz", hash = "sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a"}, ] attrs = [ - {file = "attrs-20.3.0-py2.py3-none-any.whl", hash = "sha256:31b2eced602aa8423c2aea9c76a724617ed67cf9513173fd3a4f03e3a929c7e6"}, - {file = "attrs-20.3.0.tar.gz", hash = "sha256:832aa3cde19744e49938b91fea06d69ecb9e649c93ba974535d08ad92164f700"}, + {file = "attrs-21.2.0-py2.py3-none-any.whl", hash = "sha256:149e90d6d8ac20db7a955ad60cf0e6881a3f20d37096140088356da6c716b0b1"}, + {file = "attrs-21.2.0.tar.gz", hash = "sha256:ef6aaac3ca6cd92904cdd0d83f629a15f18053ec84e6432106f7a4d04ae4f5fb"}, ] black = [ {file = "black-20.8b1.tar.gz", hash = "sha256:1c02557aa099101b9d21496f8a914e9ed2222ef70336404eeeac8edba836fbea"}, ] click = [ - {file = "click-7.1.2-py2.py3-none-any.whl", hash = "sha256:dacca89f4bfadd5de3d7489b7c8a566eee0d3676333fbb50030263894c38c0dc"}, - {file = "click-7.1.2.tar.gz", hash = "sha256:d2b5255c7c6349bc1bd1e59e08cd12acbbd63ce649f2588755783aa94dfb6b1a"}, + {file = "click-8.0.1-py3-none-any.whl", hash = "sha256:fba402a4a47334742d782209a7c79bc448911afe1149d07bdabdf480b3e2f4b6"}, + {file = "click-8.0.1.tar.gz", hash = "sha256:8c04c11192119b1ef78ea049e0a6f0463e4c48ef00a30160c704337586f3ad7a"}, ] colorama = [ {file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"}, @@ -351,14 +408,26 @@ dataclasses = [ {file = "dataclasses-0.8-py3-none-any.whl", hash = "sha256:0201d89fa866f68c8ebd9d08ee6ff50c0b255f8ec63a71c16fda7af82bb887bf"}, {file = "dataclasses-0.8.tar.gz", hash = "sha256:8479067f342acf957dc82ec415d355ab5edb7e7646b90dc6e2fd1d96ad084c97"}, ] +flake8 = [ + {file = "flake8-3.9.2-py2.py3-none-any.whl", hash = "sha256:bf8fd333346d844f616e8d47905ef3a3384edae6b4e9beb0c5101e25e3110907"}, + {file = "flake8-3.9.2.tar.gz", hash = "sha256:07528381786f2a6237b061f6e96610a4167b226cb926e2aa2b6b1d78057c576b"}, +] importlib-metadata = [ - {file = "importlib_metadata-3.7.3-py3-none-any.whl", hash = "sha256:b74159469b464a99cb8cc3e21973e4d96e05d3024d337313fedb618a6e86e6f4"}, - {file = "importlib_metadata-3.7.3.tar.gz", hash = "sha256:742add720a20d0467df2f444ae41704000f50e1234f46174b51f9c6031a1bd71"}, + {file = "importlib_metadata-4.5.0-py3-none-any.whl", hash = "sha256:833b26fb89d5de469b24a390e9df088d4e52e4ba33b01dc5e0e4f41b81a16c00"}, + {file = "importlib_metadata-4.5.0.tar.gz", hash = "sha256:b142cc1dd1342f31ff04bb7d022492b09920cb64fed867cd3ea6f80fe3ebd139"}, ] iniconfig = [ {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, ] +isort = [ + {file = "isort-5.9.1-py3-none-any.whl", hash = "sha256:8e2c107091cfec7286bc0f68a547d0ba4c094d460b732075b6fba674f1035c0c"}, + {file = "isort-5.9.1.tar.gz", hash = "sha256:83510593e07e433b77bd5bff0f6f607dbafa06d1a89022616f02d8b699cfcd56"}, +] +mccabe = [ + {file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"}, + {file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"}, +] mypy = [ {file = "mypy-0.800-cp35-cp35m-macosx_10_9_x86_64.whl", hash = "sha256:e1c84c65ff6d69fb42958ece5b1255394714e0aac4df5ffe151bc4fe19c7600a"}, {file = "mypy-0.800-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:947126195bfe4709c360e89b40114c6746ae248f04d379dca6f6ab677aa07641"}, @@ -403,101 +472,109 @@ py = [ {file = "py-1.10.0-py2.py3-none-any.whl", hash = "sha256:3b80836aa6d1feeaa108e046da6423ab8f6ceda6468545ae8d02d9d58d18818a"}, {file = "py-1.10.0.tar.gz", hash = "sha256:21b81bda15b66ef5e1a777a21c4dcd9c20ad3efd0b3f817e7a809035269e1bd3"}, ] +pycodestyle = [ + {file = "pycodestyle-2.7.0-py2.py3-none-any.whl", hash = "sha256:514f76d918fcc0b55c6680472f0a37970994e07bbb80725808c17089be302068"}, + {file = "pycodestyle-2.7.0.tar.gz", hash = "sha256:c389c1d06bf7904078ca03399a4816f974a1d590090fecea0c63ec26ebaf1cef"}, +] +pyflakes = [ + {file = "pyflakes-2.3.1-py2.py3-none-any.whl", hash = "sha256:7893783d01b8a89811dd72d7dfd4d84ff098e5eed95cfa8905b22bbffe52efc3"}, + {file = "pyflakes-2.3.1.tar.gz", hash = "sha256:f5bc8ecabc05bb9d291eb5203d6810b49040f6ff446a756326104746cc00c1db"}, +] pyparsing = [ {file = "pyparsing-2.4.7-py2.py3-none-any.whl", hash = "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b"}, {file = "pyparsing-2.4.7.tar.gz", hash = "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1"}, ] pytest = [ - {file = "pytest-6.2.2-py3-none-any.whl", hash = "sha256:b574b57423e818210672e07ca1fa90aaf194a4f63f3ab909a2c67ebb22913839"}, - {file = "pytest-6.2.2.tar.gz", hash = "sha256:9d1edf9e7d0b84d72ea3dbcdfd22b35fb543a5e8f2a60092dd578936bf63d7f9"}, + {file = "pytest-6.2.4-py3-none-any.whl", hash = "sha256:91ef2131a9bd6be8f76f1f08eac5c5317221d6ad1e143ae03894b862e8976890"}, + {file = "pytest-6.2.4.tar.gz", hash = "sha256:50bcad0a0b9c5a72c8e4e7c9855a3ad496ca6a881a3641b4260605450772c54b"}, ] pytest-cov = [ - {file = "pytest-cov-2.11.1.tar.gz", hash = "sha256:359952d9d39b9f822d9d29324483e7ba04a3a17dd7d05aa6beb7ea01e359e5f7"}, - {file = "pytest_cov-2.11.1-py2.py3-none-any.whl", hash = "sha256:bdb9fdb0b85a7cc825269a4c56b48ccaa5c7e365054b6038772c32ddcdc969da"}, + {file = "pytest-cov-2.12.1.tar.gz", hash = "sha256:261ceeb8c227b726249b376b8526b600f38667ee314f910353fa318caa01f4d7"}, + {file = "pytest_cov-2.12.1-py2.py3-none-any.whl", hash = "sha256:261bb9e47e65bd099c89c3edf92972865210c36813f80ede5277dceb77a4a62a"}, ] regex = [ - {file = "regex-2021.3.17-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:b97ec5d299c10d96617cc851b2e0f81ba5d9d6248413cd374ef7f3a8871ee4a6"}, - {file = "regex-2021.3.17-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:cb4ee827857a5ad9b8ae34d3c8cc51151cb4a3fe082c12ec20ec73e63cc7c6f0"}, - {file = "regex-2021.3.17-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:633497504e2a485a70a3268d4fc403fe3063a50a50eed1039083e9471ad0101c"}, - {file = "regex-2021.3.17-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:a59a2ee329b3de764b21495d78c92ab00b4ea79acef0f7ae8c1067f773570afa"}, - {file = "regex-2021.3.17-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:f85d6f41e34f6a2d1607e312820971872944f1661a73d33e1e82d35ea3305e14"}, - {file = "regex-2021.3.17-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:4651f839dbde0816798e698626af6a2469eee6d9964824bb5386091255a1694f"}, - {file = "regex-2021.3.17-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:39c44532d0e4f1639a89e52355b949573e1e2c5116106a395642cbbae0ff9bcd"}, - {file = "regex-2021.3.17-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:3d9a7e215e02bd7646a91fb8bcba30bc55fd42a719d6b35cf80e5bae31d9134e"}, - {file = "regex-2021.3.17-cp36-cp36m-win32.whl", hash = "sha256:159fac1a4731409c830d32913f13f68346d6b8e39650ed5d704a9ce2f9ef9cb3"}, - {file = "regex-2021.3.17-cp36-cp36m-win_amd64.whl", hash = "sha256:13f50969028e81765ed2a1c5fcfdc246c245cf8d47986d5172e82ab1a0c42ee5"}, - {file = "regex-2021.3.17-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b9d8d286c53fe0cbc6d20bf3d583cabcd1499d89034524e3b94c93a5ab85ca90"}, - {file = "regex-2021.3.17-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:201e2619a77b21a7780580ab7b5ce43835e242d3e20fef50f66a8df0542e437f"}, - {file = "regex-2021.3.17-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:d47d359545b0ccad29d572ecd52c9da945de7cd6cf9c0cfcb0269f76d3555689"}, - {file = "regex-2021.3.17-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:ea2f41445852c660ba7c3ebf7d70b3779b20d9ca8ba54485a17740db49f46932"}, - {file = "regex-2021.3.17-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:486a5f8e11e1f5bbfcad87f7c7745eb14796642323e7e1829a331f87a713daaa"}, - {file = "regex-2021.3.17-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:18e25e0afe1cf0f62781a150c1454b2113785401ba285c745acf10c8ca8917df"}, - {file = "regex-2021.3.17-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:a2ee026f4156789df8644d23ef423e6194fad0bc53575534101bb1de5d67e8ce"}, - {file = "regex-2021.3.17-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:4c0788010a93ace8a174d73e7c6c9d3e6e3b7ad99a453c8ee8c975ddd9965643"}, - {file = "regex-2021.3.17-cp37-cp37m-win32.whl", hash = "sha256:575a832e09d237ae5fedb825a7a5bc6a116090dd57d6417d4f3b75121c73e3be"}, - {file = "regex-2021.3.17-cp37-cp37m-win_amd64.whl", hash = "sha256:8e65e3e4c6feadf6770e2ad89ad3deb524bcb03d8dc679f381d0568c024e0deb"}, - {file = "regex-2021.3.17-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a0df9a0ad2aad49ea3c7f65edd2ffb3d5c59589b85992a6006354f6fb109bb18"}, - {file = "regex-2021.3.17-cp38-cp38-manylinux1_i686.whl", hash = "sha256:b98bc9db003f1079caf07b610377ed1ac2e2c11acc2bea4892e28cc5b509d8d5"}, - {file = "regex-2021.3.17-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:808404898e9a765e4058bf3d7607d0629000e0a14a6782ccbb089296b76fa8fe"}, - {file = "regex-2021.3.17-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:5770a51180d85ea468234bc7987f5597803a4c3d7463e7323322fe4a1b181578"}, - {file = "regex-2021.3.17-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:976a54d44fd043d958a69b18705a910a8376196c6b6ee5f2596ffc11bff4420d"}, - {file = "regex-2021.3.17-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:63f3ca8451e5ff7133ffbec9eda641aeab2001be1a01878990f6c87e3c44b9d5"}, - {file = "regex-2021.3.17-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:bcd945175c29a672f13fce13a11893556cd440e37c1b643d6eeab1988c8b209c"}, - {file = "regex-2021.3.17-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:3d9356add82cff75413bec360c1eca3e58db4a9f5dafa1f19650958a81e3249d"}, - {file = "regex-2021.3.17-cp38-cp38-win32.whl", hash = "sha256:f5d0c921c99297354cecc5a416ee4280bd3f20fd81b9fb671ca6be71499c3fdf"}, - {file = "regex-2021.3.17-cp38-cp38-win_amd64.whl", hash = "sha256:14de88eda0976020528efc92d0a1f8830e2fb0de2ae6005a6fc4e062553031fa"}, - {file = "regex-2021.3.17-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4c2e364491406b7888c2ad4428245fc56c327e34a5dfe58fd40df272b3c3dab3"}, - {file = "regex-2021.3.17-cp39-cp39-manylinux1_i686.whl", hash = "sha256:8bd4f91f3fb1c9b1380d6894bd5b4a519409135bec14c0c80151e58394a4e88a"}, - {file = "regex-2021.3.17-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:882f53afe31ef0425b405a3f601c0009b44206ea7f55ee1c606aad3cc213a52c"}, - {file = "regex-2021.3.17-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:07ef35301b4484bce843831e7039a84e19d8d33b3f8b2f9aab86c376813d0139"}, - {file = "regex-2021.3.17-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:360a01b5fa2ad35b3113ae0c07fb544ad180603fa3b1f074f52d98c1096fa15e"}, - {file = "regex-2021.3.17-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:709f65bb2fa9825f09892617d01246002097f8f9b6dde8d1bb4083cf554701ba"}, - {file = "regex-2021.3.17-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:c66221e947d7207457f8b6f42b12f613b09efa9669f65a587a2a71f6a0e4d106"}, - {file = "regex-2021.3.17-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:c782da0e45aff131f0bed6e66fbcfa589ff2862fc719b83a88640daa01a5aff7"}, - {file = "regex-2021.3.17-cp39-cp39-win32.whl", hash = "sha256:dc9963aacb7da5177e40874585d7407c0f93fb9d7518ec58b86e562f633f36cd"}, - {file = "regex-2021.3.17-cp39-cp39-win_amd64.whl", hash = "sha256:a0d04128e005142260de3733591ddf476e4902c0c23c1af237d9acf3c96e1b38"}, - {file = "regex-2021.3.17.tar.gz", hash = "sha256:4b8a1fb724904139149a43e172850f35aa6ea97fb0545244dc0b805e0154ed68"}, + {file = "regex-2021.4.4-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:619d71c59a78b84d7f18891fe914446d07edd48dc8328c8e149cbe0929b4e000"}, + {file = "regex-2021.4.4-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:47bf5bf60cf04d72bf6055ae5927a0bd9016096bf3d742fa50d9bf9f45aa0711"}, + {file = "regex-2021.4.4-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:281d2fd05555079448537fe108d79eb031b403dac622621c78944c235f3fcf11"}, + {file = "regex-2021.4.4-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:bd28bc2e3a772acbb07787c6308e00d9626ff89e3bfcdebe87fa5afbfdedf968"}, + {file = "regex-2021.4.4-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:7c2a1af393fcc09e898beba5dd59196edaa3116191cc7257f9224beaed3e1aa0"}, + {file = "regex-2021.4.4-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:c38c71df845e2aabb7fb0b920d11a1b5ac8526005e533a8920aea97efb8ec6a4"}, + {file = "regex-2021.4.4-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:96fcd1888ab4d03adfc9303a7b3c0bd78c5412b2bfbe76db5b56d9eae004907a"}, + {file = "regex-2021.4.4-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:ade17eb5d643b7fead300a1641e9f45401c98eee23763e9ed66a43f92f20b4a7"}, + {file = "regex-2021.4.4-cp36-cp36m-win32.whl", hash = "sha256:e8e5b509d5c2ff12f8418006d5a90e9436766133b564db0abaec92fd27fcee29"}, + {file = "regex-2021.4.4-cp36-cp36m-win_amd64.whl", hash = "sha256:11d773d75fa650cd36f68d7ca936e3c7afaae41b863b8c387a22aaa78d3c5c79"}, + {file = "regex-2021.4.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d3029c340cfbb3ac0a71798100ccc13b97dddf373a4ae56b6a72cf70dfd53bc8"}, + {file = "regex-2021.4.4-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:18c071c3eb09c30a264879f0d310d37fe5d3a3111662438889ae2eb6fc570c31"}, + {file = "regex-2021.4.4-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:4c557a7b470908b1712fe27fb1ef20772b78079808c87d20a90d051660b1d69a"}, + {file = "regex-2021.4.4-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:01afaf2ec48e196ba91b37451aa353cb7eda77efe518e481707e0515025f0cd5"}, + {file = "regex-2021.4.4-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:3a9cd17e6e5c7eb328517969e0cb0c3d31fd329298dd0c04af99ebf42e904f82"}, + {file = "regex-2021.4.4-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:90f11ff637fe8798933fb29f5ae1148c978cccb0452005bf4c69e13db951e765"}, + {file = "regex-2021.4.4-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:919859aa909429fb5aa9cf8807f6045592c85ef56fdd30a9a3747e513db2536e"}, + {file = "regex-2021.4.4-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:339456e7d8c06dd36a22e451d58ef72cef293112b559010db3d054d5560ef439"}, + {file = "regex-2021.4.4-cp37-cp37m-win32.whl", hash = "sha256:67bdb9702427ceddc6ef3dc382455e90f785af4c13d495f9626861763ee13f9d"}, + {file = "regex-2021.4.4-cp37-cp37m-win_amd64.whl", hash = "sha256:32e65442138b7b76dd8173ffa2cf67356b7bc1768851dded39a7a13bf9223da3"}, + {file = "regex-2021.4.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1e1c20e29358165242928c2de1482fb2cf4ea54a6a6dea2bd7a0e0d8ee321500"}, + {file = "regex-2021.4.4-cp38-cp38-manylinux1_i686.whl", hash = "sha256:314d66636c494ed9c148a42731b3834496cc9a2c4251b1661e40936814542b14"}, + {file = "regex-2021.4.4-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:6d1b01031dedf2503631d0903cb563743f397ccaf6607a5e3b19a3d76fc10480"}, + {file = "regex-2021.4.4-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:741a9647fcf2e45f3a1cf0e24f5e17febf3efe8d4ba1281dcc3aa0459ef424dc"}, + {file = "regex-2021.4.4-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:4c46e22a0933dd783467cf32b3516299fb98cfebd895817d685130cc50cd1093"}, + {file = "regex-2021.4.4-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:e512d8ef5ad7b898cdb2d8ee1cb09a8339e4f8be706d27eaa180c2f177248a10"}, + {file = "regex-2021.4.4-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:980d7be47c84979d9136328d882f67ec5e50008681d94ecc8afa8a65ed1f4a6f"}, + {file = "regex-2021.4.4-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:ce15b6d103daff8e9fee13cf7f0add05245a05d866e73926c358e871221eae87"}, + {file = "regex-2021.4.4-cp38-cp38-win32.whl", hash = "sha256:a91aa8619b23b79bcbeb37abe286f2f408d2f2d6f29a17237afda55bb54e7aac"}, + {file = "regex-2021.4.4-cp38-cp38-win_amd64.whl", hash = "sha256:c0502c0fadef0d23b128605d69b58edb2c681c25d44574fc673b0e52dce71ee2"}, + {file = "regex-2021.4.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:598585c9f0af8374c28edd609eb291b5726d7cbce16be6a8b95aa074d252ee17"}, + {file = "regex-2021.4.4-cp39-cp39-manylinux1_i686.whl", hash = "sha256:ee54ff27bf0afaf4c3b3a62bcd016c12c3fdb4ec4f413391a90bd38bc3624605"}, + {file = "regex-2021.4.4-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:7d9884d86dd4dd489e981d94a65cd30d6f07203d90e98f6f657f05170f6324c9"}, + {file = "regex-2021.4.4-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:bf5824bfac591ddb2c1f0a5f4ab72da28994548c708d2191e3b87dd207eb3ad7"}, + {file = "regex-2021.4.4-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:563085e55b0d4fb8f746f6a335893bda5c2cef43b2f0258fe1020ab1dd874df8"}, + {file = "regex-2021.4.4-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:b9c3db21af35e3b3c05764461b262d6f05bbca08a71a7849fd79d47ba7bc33ed"}, + {file = "regex-2021.4.4-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:3916d08be28a1149fb97f7728fca1f7c15d309a9f9682d89d79db75d5e52091c"}, + {file = "regex-2021.4.4-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:fd45ff9293d9274c5008a2054ecef86a9bfe819a67c7be1afb65e69b405b3042"}, + {file = "regex-2021.4.4-cp39-cp39-win32.whl", hash = "sha256:fa4537fb4a98fe8fde99626e4681cc644bdcf2a795038533f9f711513a862ae6"}, + {file = "regex-2021.4.4-cp39-cp39-win_amd64.whl", hash = "sha256:97f29f57d5b84e73fbaf99ab3e26134e6687348e95ef6b48cfd2c06807005a07"}, + {file = "regex-2021.4.4.tar.gz", hash = "sha256:52ba3d3f9b942c49d7e4bc105bb28551c44065f139a65062ab7912bef10c9afb"}, ] toml = [ {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, ] typed-ast = [ - {file = "typed_ast-1.4.2-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:7703620125e4fb79b64aa52427ec192822e9f45d37d4b6625ab37ef403e1df70"}, - {file = "typed_ast-1.4.2-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:c9aadc4924d4b5799112837b226160428524a9a45f830e0d0f184b19e4090487"}, - {file = "typed_ast-1.4.2-cp35-cp35m-manylinux2014_aarch64.whl", hash = "sha256:9ec45db0c766f196ae629e509f059ff05fc3148f9ffd28f3cfe75d4afb485412"}, - {file = "typed_ast-1.4.2-cp35-cp35m-win32.whl", hash = "sha256:85f95aa97a35bdb2f2f7d10ec5bbdac0aeb9dafdaf88e17492da0504de2e6400"}, - {file = "typed_ast-1.4.2-cp35-cp35m-win_amd64.whl", hash = "sha256:9044ef2df88d7f33692ae3f18d3be63dec69c4fb1b5a4a9ac950f9b4ba571606"}, - {file = "typed_ast-1.4.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:c1c876fd795b36126f773db9cbb393f19808edd2637e00fd6caba0e25f2c7b64"}, - {file = "typed_ast-1.4.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:5dcfc2e264bd8a1db8b11a892bd1647154ce03eeba94b461effe68790d8b8e07"}, - {file = "typed_ast-1.4.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:8db0e856712f79c45956da0c9a40ca4246abc3485ae0d7ecc86a20f5e4c09abc"}, - {file = "typed_ast-1.4.2-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:d003156bb6a59cda9050e983441b7fa2487f7800d76bdc065566b7d728b4581a"}, - {file = "typed_ast-1.4.2-cp36-cp36m-win32.whl", hash = "sha256:4c790331247081ea7c632a76d5b2a265e6d325ecd3179d06e9cf8d46d90dd151"}, - {file = "typed_ast-1.4.2-cp36-cp36m-win_amd64.whl", hash = "sha256:d175297e9533d8d37437abc14e8a83cbc68af93cc9c1c59c2c292ec59a0697a3"}, - {file = "typed_ast-1.4.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:cf54cfa843f297991b7388c281cb3855d911137223c6b6d2dd82a47ae5125a41"}, - {file = "typed_ast-1.4.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:b4fcdcfa302538f70929eb7b392f536a237cbe2ed9cba88e3bf5027b39f5f77f"}, - {file = "typed_ast-1.4.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:987f15737aba2ab5f3928c617ccf1ce412e2e321c77ab16ca5a293e7bbffd581"}, - {file = "typed_ast-1.4.2-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:37f48d46d733d57cc70fd5f30572d11ab8ed92da6e6b28e024e4a3edfb456e37"}, - {file = "typed_ast-1.4.2-cp37-cp37m-win32.whl", hash = "sha256:36d829b31ab67d6fcb30e185ec996e1f72b892255a745d3a82138c97d21ed1cd"}, - {file = "typed_ast-1.4.2-cp37-cp37m-win_amd64.whl", hash = "sha256:8368f83e93c7156ccd40e49a783a6a6850ca25b556c0fa0240ed0f659d2fe496"}, - {file = "typed_ast-1.4.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:963c80b583b0661918718b095e02303d8078950b26cc00b5e5ea9ababe0de1fc"}, - {file = "typed_ast-1.4.2-cp38-cp38-manylinux1_i686.whl", hash = "sha256:e683e409e5c45d5c9082dc1daf13f6374300806240719f95dc783d1fc942af10"}, - {file = "typed_ast-1.4.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:84aa6223d71012c68d577c83f4e7db50d11d6b1399a9c779046d75e24bed74ea"}, - {file = "typed_ast-1.4.2-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:a38878a223bdd37c9709d07cd357bb79f4c760b29210e14ad0fb395294583787"}, - {file = "typed_ast-1.4.2-cp38-cp38-win32.whl", hash = "sha256:a2c927c49f2029291fbabd673d51a2180038f8cd5a5b2f290f78c4516be48be2"}, - {file = "typed_ast-1.4.2-cp38-cp38-win_amd64.whl", hash = "sha256:c0c74e5579af4b977c8b932f40a5464764b2f86681327410aa028a22d2f54937"}, - {file = "typed_ast-1.4.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:07d49388d5bf7e863f7fa2f124b1b1d89d8aa0e2f7812faff0a5658c01c59aa1"}, - {file = "typed_ast-1.4.2-cp39-cp39-manylinux1_i686.whl", hash = "sha256:240296b27397e4e37874abb1df2a608a92df85cf3e2a04d0d4d61055c8305ba6"}, - {file = "typed_ast-1.4.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:d746a437cdbca200622385305aedd9aef68e8a645e385cc483bdc5e488f07166"}, - {file = "typed_ast-1.4.2-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:14bf1522cdee369e8f5581238edac09150c765ec1cb33615855889cf33dcb92d"}, - {file = "typed_ast-1.4.2-cp39-cp39-win32.whl", hash = "sha256:cc7b98bf58167b7f2db91a4327da24fb93368838eb84a44c472283778fc2446b"}, - {file = "typed_ast-1.4.2-cp39-cp39-win_amd64.whl", hash = "sha256:7147e2a76c75f0f64c4319886e7639e490fee87c9d25cb1d4faef1d8cf83a440"}, - {file = "typed_ast-1.4.2.tar.gz", hash = "sha256:9fc0b3cb5d1720e7141d103cf4819aea239f7d136acf9ee4a69b047b7986175a"}, + {file = "typed_ast-1.4.3-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:2068531575a125b87a41802130fa7e29f26c09a2833fea68d9a40cf33902eba6"}, + {file = "typed_ast-1.4.3-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:c907f561b1e83e93fad565bac5ba9c22d96a54e7ea0267c708bffe863cbe4075"}, + {file = "typed_ast-1.4.3-cp35-cp35m-manylinux2014_aarch64.whl", hash = "sha256:1b3ead4a96c9101bef08f9f7d1217c096f31667617b58de957f690c92378b528"}, + {file = "typed_ast-1.4.3-cp35-cp35m-win32.whl", hash = "sha256:dde816ca9dac1d9c01dd504ea5967821606f02e510438120091b84e852367428"}, + {file = "typed_ast-1.4.3-cp35-cp35m-win_amd64.whl", hash = "sha256:777a26c84bea6cd934422ac2e3b78863a37017618b6e5c08f92ef69853e765d3"}, + {file = "typed_ast-1.4.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f8afcf15cc511ada719a88e013cec87c11aff7b91f019295eb4530f96fe5ef2f"}, + {file = "typed_ast-1.4.3-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:52b1eb8c83f178ab787f3a4283f68258525f8d70f778a2f6dd54d3b5e5fb4341"}, + {file = "typed_ast-1.4.3-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:01ae5f73431d21eead5015997ab41afa53aa1fbe252f9da060be5dad2c730ace"}, + {file = "typed_ast-1.4.3-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:c190f0899e9f9f8b6b7863debfb739abcb21a5c054f911ca3596d12b8a4c4c7f"}, + {file = "typed_ast-1.4.3-cp36-cp36m-win32.whl", hash = "sha256:398e44cd480f4d2b7ee8d98385ca104e35c81525dd98c519acff1b79bdaac363"}, + {file = "typed_ast-1.4.3-cp36-cp36m-win_amd64.whl", hash = "sha256:bff6ad71c81b3bba8fa35f0f1921fb24ff4476235a6e94a26ada2e54370e6da7"}, + {file = "typed_ast-1.4.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0fb71b8c643187d7492c1f8352f2c15b4c4af3f6338f21681d3681b3dc31a266"}, + {file = "typed_ast-1.4.3-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:760ad187b1041a154f0e4d0f6aae3e40fdb51d6de16e5c99aedadd9246450e9e"}, + {file = "typed_ast-1.4.3-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:5feca99c17af94057417d744607b82dd0a664fd5e4ca98061480fd8b14b18d04"}, + {file = "typed_ast-1.4.3-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:95431a26309a21874005845c21118c83991c63ea800dd44843e42a916aec5899"}, + {file = "typed_ast-1.4.3-cp37-cp37m-win32.whl", hash = "sha256:aee0c1256be6c07bd3e1263ff920c325b59849dc95392a05f258bb9b259cf39c"}, + {file = "typed_ast-1.4.3-cp37-cp37m-win_amd64.whl", hash = "sha256:9ad2c92ec681e02baf81fdfa056fe0d818645efa9af1f1cd5fd6f1bd2bdfd805"}, + {file = "typed_ast-1.4.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b36b4f3920103a25e1d5d024d155c504080959582b928e91cb608a65c3a49e1a"}, + {file = "typed_ast-1.4.3-cp38-cp38-manylinux1_i686.whl", hash = "sha256:067a74454df670dcaa4e59349a2e5c81e567d8d65458d480a5b3dfecec08c5ff"}, + {file = "typed_ast-1.4.3-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7538e495704e2ccda9b234b82423a4038f324f3a10c43bc088a1636180f11a41"}, + {file = "typed_ast-1.4.3-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:af3d4a73793725138d6b334d9d247ce7e5f084d96284ed23f22ee626a7b88e39"}, + {file = "typed_ast-1.4.3-cp38-cp38-win32.whl", hash = "sha256:f2362f3cb0f3172c42938946dbc5b7843c2a28aec307c49100c8b38764eb6927"}, + {file = "typed_ast-1.4.3-cp38-cp38-win_amd64.whl", hash = "sha256:dd4a21253f42b8d2b48410cb31fe501d32f8b9fbeb1f55063ad102fe9c425e40"}, + {file = "typed_ast-1.4.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f328adcfebed9f11301eaedfa48e15bdece9b519fb27e6a8c01aa52a17ec31b3"}, + {file = "typed_ast-1.4.3-cp39-cp39-manylinux1_i686.whl", hash = "sha256:2c726c276d09fc5c414693a2de063f521052d9ea7c240ce553316f70656c84d4"}, + {file = "typed_ast-1.4.3-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:cae53c389825d3b46fb37538441f75d6aecc4174f615d048321b716df2757fb0"}, + {file = "typed_ast-1.4.3-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:b9574c6f03f685070d859e75c7f9eeca02d6933273b5e69572e5ff9d5e3931c3"}, + {file = "typed_ast-1.4.3-cp39-cp39-win32.whl", hash = "sha256:209596a4ec71d990d71d5e0d312ac935d86930e6eecff6ccc7007fe54d703808"}, + {file = "typed_ast-1.4.3-cp39-cp39-win_amd64.whl", hash = "sha256:9c6d1a54552b5330bc657b7ef0eae25d00ba7ffe85d9ea8ae6540d2197a3788c"}, + {file = "typed_ast-1.4.3.tar.gz", hash = "sha256:fb1bbeac803adea29cedd70781399c99138358c26d05fcbd23c13016b7f5ec65"}, ] typing-extensions = [ - {file = "typing_extensions-3.7.4.3-py2-none-any.whl", hash = "sha256:dafc7639cde7f1b6e1acc0f457842a83e722ccca8eef5270af2d74792619a89f"}, - {file = "typing_extensions-3.7.4.3-py3-none-any.whl", hash = "sha256:7cb407020f00f7bfc3cb3e7881628838e69d8f3fcab2f64742a5e76b2f841918"}, - {file = "typing_extensions-3.7.4.3.tar.gz", hash = "sha256:99d4073b617d30288f569d3f13d2bd7548c3a7e4c8de87db09a9d29bb3a4a60c"}, + {file = "typing_extensions-3.10.0.0-py2-none-any.whl", hash = "sha256:0ac0f89795dd19de6b97debb0c6af1c70987fd80a2d62d1958f7e56fcc31b497"}, + {file = "typing_extensions-3.10.0.0-py3-none-any.whl", hash = "sha256:779383f6086d90c99ae41cf0ff39aac8a7937a9283ce0a414e5dd782f4c94a84"}, + {file = "typing_extensions-3.10.0.0.tar.gz", hash = "sha256:50b6f157849174217d0656f99dc82fe932884fb250826c18350e159ec6cdf342"}, ] zipp = [ {file = "zipp-3.4.1-py3-none-any.whl", hash = "sha256:51cb66cc54621609dd593d1787f286ee42a5c0adbb4b29abea5a63edc3e03098"}, diff --git a/pyproject.toml b/pyproject.toml index c854f61..9a380fb 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -5,7 +5,7 @@ description = "Slim Python interface for Dolt's CLI API." authors = ["Max Hoffman ", "Oscar Batori "] [tool.poetry.dependencies] -python = "^3.6" +python = ">=3.6.1,<4.0" dataclasses = {version = ">=0.6", markers = "python_version < \"3.7\""} [tool.poetry.dev-dependencies] @@ -13,6 +13,11 @@ pytest = "^6.2.2" black = "^20.8b1" mypy = "0.800" pytest-cov = "^2.11.1" +isort = "^5.9.1" +flake8 = "^3.9.2" + +[tool.isort] +profile = "black" [build-system] requires = ["poetry-core>=1.0.0a5"] diff --git a/tests/conftest.py b/tests/conftest.py index 39caed2..9758fe5 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,5 +1,5 @@ -import datetime import csv +import datetime import os import shutil from typing import Tuple @@ -8,16 +8,26 @@ from doltcli import Dolt -TEST_TABLE = 'characters' +TEST_TABLE = "characters" TEST_DATA_INITIAL = [ - {'name': 'Anna', 'adjective': 'tragic', 'id': 1, 'date_of_death': datetime.datetime(1877, 1, 1)}, - {'name': 'Vronksy', 'adjective': 'honorable', 'id': 2, 'date_of_death': None}, - {'name': 'Oblonksy', 'adjective': 'buffoon', 'id': 3, 'date_of_death': None}, + { + "name": "Anna", + "adjective": "tragic", + "id": 1, + "date_of_death": datetime.datetime(1877, 1, 1), + }, + {"name": "Vronksy", "adjective": "honorable", "id": 2, "date_of_death": None}, + {"name": "Oblonksy", "adjective": "buffoon", "id": 3, "date_of_death": None}, ] TEST_DATA_UPDATE = [ - {'name': 'Vronksy', 'adjective': 'honorable', 'id': 2, 'date_of_death': datetime.datetime(1879, 1, 1)}, - {'name': 'Levin', 'adjective': 'tiresome', 'id': 4, 'date_of_death': None}, + { + "name": "Vronksy", + "adjective": "honorable", + "id": 2, + "date_of_death": datetime.datetime(1879, 1, 1), + }, + {"name": "Levin", "adjective": "tiresome", "id": 4, "date_of_death": None}, ] TEST_DATA_FINAL = [TEST_DATA_INITIAL[0], TEST_DATA_INITIAL[2]] + TEST_DATA_UPDATE @@ -25,9 +35,9 @@ def get_repo_path_tmp_path(path: str, subpath: str = None) -> Tuple[str, str]: if subpath: - return os.path.join(path, subpath), os.path.join(path, subpath, '.dolt') + return os.path.join(path, subpath), os.path.join(path, subpath, ".dolt") else: - return path, os.path.join(path, '.dolt') + return path, os.path.join(path, ".dolt") @pytest.fixture() @@ -38,7 +48,8 @@ def with_test_data_initial_file(tmp_path): @pytest.fixture() def with_test_table(init_empty_test_repo): dolt = init_empty_test_repo - dolt.sql(query=f''' + dolt.sql( + query=f""" CREATE TABLE `{TEST_TABLE}` ( `name` VARCHAR(32), `adjective` VARCHAR(32), @@ -46,9 +57,10 @@ def with_test_table(init_empty_test_repo): `date_of_death` DATETIME, PRIMARY KEY (`id`) ); - ''') + """ + ) dolt.add(TEST_TABLE) - dolt.commit('Created test table') + dolt.commit("Created test table") return dolt @@ -73,17 +85,17 @@ def doltdb(): @pytest.fixture() def with_test_data_initial_file(tmp_path): - return _test_data_to_file(tmp_path, 'initial', TEST_DATA_INITIAL) + return _test_data_to_file(tmp_path, "initial", TEST_DATA_INITIAL) @pytest.fixture() def with_test_data_final_file(tmp_path): - return _test_data_to_file(tmp_path, 'final', TEST_DATA_FINAL) + return _test_data_to_file(tmp_path, "final", TEST_DATA_FINAL) def _test_data_to_file(file_path, file_name, test_data): path = os.path.join(file_path, file_name) - with open(path, 'w') as fh: + with open(path, "w") as fh: csv_writer = csv.DictWriter(fh, fieldnames=test_data[0].keys()) csv_writer.writeheader() csv_writer.writerows(test_data) @@ -98,7 +110,7 @@ def init_empty_test_repo(tmpdir) -> Dolt: @pytest.fixture def init_other_empty_test_repo(tmpdir) -> Dolt: - return _init_helper(tmpdir, 'other') + return _init_helper(tmpdir, "other") def _init_helper(path: str, ext: str = None): diff --git a/tests/helpers.py b/tests/helpers.py index b95c4e4..c059261 100644 --- a/tests/helpers.py +++ b/tests/helpers.py @@ -18,17 +18,19 @@ def read_csv_to_dict(file): def compare_rows_helper(expected: List[dict], actual: List[dict]): - assert len(expected) == len(actual), f'Unequal row counts: {len(expected)} != {len(actual)}' + assert len(expected) == len( + actual + ), f"Unequal row counts: {len(expected)} != {len(actual)}" errors = [] for l, r in zip(expected, actual): l_cols, r_cols = set(l.keys()), set(r.keys()) - assert l_cols == r_cols, f'Unequal sets of columns: {l_cols} != {r_cols}' + assert l_cols == r_cols, f"Unequal sets of columns: {l_cols} != {r_cols}" for col in l_cols: l_val, r_val = l[col], r[col] - if col.startswith('date'): + if col.startswith("date"): l_val, r_val = l_val[:10], r_val[:10] - if l_val != r_val and not (l_val is None and r_val == ''): - errors.append(f'{col}: {l_val} != {r_val}') + if l_val != r_val and not (l_val is None and r_val == ""): + errors.append(f"{col}: {l_val} != {r_val}") - error_str = '\n'.join(errors) - assert not errors, f'Failed with the following unequal columns:\n{error_str}' + error_str = "\n".join(errors) + assert not errors, f"Failed with the following unequal columns:\n{error_str}" diff --git a/tests/test_dolt.py b/tests/test_dolt.py index 3d20ee8..e863382 100644 --- a/tests/test_dolt.py +++ b/tests/test_dolt.py @@ -3,35 +3,31 @@ import shutil import tempfile import uuid - -from tests.helpers import compare_rows_helper, read_csv_to_dict -from typing import Tuple, List +from typing import List, Tuple import pytest + from doltcli import ( - detach_head, + CREATE, + UPDATE, Dolt, DoltException, _execute, - CREATE, - UPDATE, + detach_head, read_rows, - write_rows, set_dolt_path, + write_rows, ) +from tests.helpers import compare_rows_helper, read_csv_to_dict - -BASE_TEST_ROWS = [ - {'name': 'Rafael', 'id': '1'}, - {'name': 'Novak', 'id': '2'} -] +BASE_TEST_ROWS = [{"name": "Rafael", "id": "1"}, {"name": "Novak", "id": "2"}] def get_repo_path_tmp_path(path: str, subpath: str = None) -> Tuple[str, str]: if subpath: - return os.path.join(path, subpath), os.path.join(path, subpath, '.dolt') + return os.path.join(path, subpath), os.path.join(path, subpath, ".dolt") else: - return path, os.path.join(path, '.dolt') + return path, os.path.join(path, ".dolt") @pytest.fixture @@ -49,19 +45,21 @@ def create_test_data(tmp_path) -> str: @pytest.fixture def create_test_table(init_empty_test_repo, create_test_data) -> Tuple[Dolt, str]: repo, test_data_path = init_empty_test_repo, create_test_data - repo.sql(query=''' + repo.sql( + query=""" CREATE TABLE `test_players` ( `name` LONGTEXT NOT NULL COMMENT 'tag:0', `id` BIGINT NOT NULL COMMENT 'tag:1', PRIMARY KEY (`id`) ); - ''') + """ + ) data = BASE_TEST_ROWS - write_rows(repo, 'test_players', data, UPDATE, commit=False) - yield repo, 'test_players' + write_rows(repo, "test_players", data, UPDATE, commit=False) + yield repo, "test_players" - if 'test_players' in [table.name for table in repo.ls()]: - _execute(['table', 'rm', 'test_players'], repo.repo_dir) + if "test_players" in [table.name for table in repo.ls()]: + _execute(["table", "rm", "test_players"], repo.repo_dir) def test_init(tmp_path): @@ -82,7 +80,7 @@ def test_commit(create_test_table): repo, test_table = create_test_table repo.add(test_table) before_commit_count = len(repo.log()) - repo.commit('Julianna, the very serious intellectual') + repo.commit("Julianna, the very serious intellectual") assert repo.status().is_clean and len(repo.log()) == before_commit_count + 1 @@ -103,26 +101,26 @@ def test_active_branch(create_test_table): def test_merge_fast_forward(create_test_table): repo, test_table = create_test_table - message_one = 'Base branch' - message_two = 'Other branch' - message_merge = 'merge' + message_one = "Base branch" + message_two = "Other branch" + message_merge = "merge" # commit the current working set to master repo.add(test_table) repo.commit(message_one) # create another branch from the working set - repo.branch('other') + repo.branch("other") # create a non-trivial commit against `other` - repo.checkout('other') + repo.checkout("other") repo.sql('INSERT INTO `test_players` (`name`, `id`) VALUES ("Juan Martin", 5)') repo.add(test_table) repo.commit(message_two) # merge - repo.checkout('master') - repo.merge('other', message_merge) + repo.checkout("master") + repo.merge("other", message_merge) commits = list(repo.log().values()) fast_forward_commit = commits[0] @@ -135,16 +133,16 @@ def test_merge_fast_forward(create_test_table): def test_merge_conflict(create_test_table): repo, test_table = create_test_table - message_one = 'Base branch' - message_two = 'Base branch new data' - message_three = 'Other branch' - message_merge = 'merge' + message_one = "Base branch" + message_two = "Base branch new data" + message_three = "Other branch" + message_merge = "merge" # commit the current working set to master repo.add(test_table) repo.commit(message_one) # create another branch from the working set - repo.branch('other') + repo.branch("other") # create a non-trivial commit against `master` repo.sql('INSERT INTO `test_players` (`name`, `id`) VALUES ("Stan", 4)') @@ -152,14 +150,14 @@ def test_merge_conflict(create_test_table): repo.commit(message_two) # create a non-trivial commit against `other` - repo.checkout('other') + repo.checkout("other") repo.sql('INSERT INTO `test_players` (`name`, `id`) VALUES ("Marin", 4)') repo.add(test_table) repo.commit(message_three) # merge - repo.checkout('master') - repo.merge('other', message_merge) + repo.checkout("master") + repo.merge("other", message_merge) commits = list(repo.log().values()) head_of_master = commits[0] @@ -169,10 +167,10 @@ def test_merge_conflict(create_test_table): def test_dolt_log(create_test_table): repo, test_table = create_test_table - message_one = 'Julianna, the very serious intellectual' - message_two = 'Added Stan the Man' + message_one = "Julianna, the very serious intellectual" + message_two = "Added Stan the Man" repo.add(test_table) - repo.commit('Julianna, the very serious intellectual') + repo.commit("Julianna, the very serious intellectual") repo.sql('INSERT INTO `test_players` (`name`, `id`) VALUES ("Stan", 4)') repo.add(test_table) repo.commit(message_two) @@ -185,10 +183,10 @@ def test_dolt_log(create_test_table): def test_dolt_log_scope(create_test_table): repo, test_table = create_test_table - message_one = 'Julianna, the very serious intellectual' - message_two = 'Added Stan the Man' + message_one = "Julianna, the very serious intellectual" + message_two = "Added Stan the Man" repo.add(test_table) - repo.commit('Julianna, the very serious intellectual') + repo.commit("Julianna, the very serious intellectual") repo.checkout("tmp_br", checkout_branch=True) repo.sql('INSERT INTO `test_players` (`name`, `id`) VALUES ("Stan", 4)') repo.add(test_table) @@ -202,10 +200,10 @@ def test_dolt_log_scope(create_test_table): def test_dolt_log_number(create_test_table): repo, test_table = create_test_table - message_one = 'Julianna, the very serious intellectual' - message_two = 'Added Stan the Man' + message_one = "Julianna, the very serious intellectual" + message_two = "Added Stan the Man" repo.add(test_table) - repo.commit('Julianna, the very serious intellectual') + repo.commit("Julianna, the very serious intellectual") repo.sql('INSERT INTO `test_players` (`name`, `id`) VALUES ("Stan", 4)') repo.add(test_table) repo.commit(message_two) @@ -224,10 +222,10 @@ def test_dolt_single_commit_log(create_test_table): def test_dolt_log_commit(create_test_table): repo, test_table = create_test_table - message_one = 'Julianna, the very serious intellectual' - message_two = 'Added Stan the Man' + message_one = "Julianna, the very serious intellectual" + message_two = "Added Stan the Man" repo.add(test_table) - repo.commit('Julianna, the very serious intellectual') + repo.commit("Julianna, the very serious intellectual") repo.sql('INSERT INTO `test_players` (`name`, `id`) VALUES ("Stan", 4)') repo.add(test_table) repo.commit(message_two) @@ -242,16 +240,16 @@ def test_dolt_log_commit(create_test_table): def test_dolt_log_merge_commit(create_test_table): repo, test_table = create_test_table - message_one = 'Base branch' - message_two = 'Base branch new data' - message_three = 'Other branch' - message_merge = 'merge' + message_one = "Base branch" + message_two = "Base branch new data" + message_three = "Other branch" + message_merge = "merge" # commit the current working set to master repo.add(test_table) repo.commit(message_one) # create another branch from the working set - repo.branch('other') + repo.branch("other") # create a non-trivial commit against `master` repo.sql('INSERT INTO `test_players` (`name`, `id`) VALUES ("Stan", 4)') @@ -259,14 +257,14 @@ def test_dolt_log_merge_commit(create_test_table): repo.commit(message_two) # create a non-trivial commit against `other` - repo.checkout('other') + repo.checkout("other") repo.sql('INSERT INTO `test_players` (`name`, `id`) VALUES ("Juan Martin", 5)') repo.add(test_table) repo.commit(message_three) # merge - repo.checkout('master') - repo.merge('other', message_merge) + repo.checkout("master") + repo.merge("other", message_merge) commits = list(repo.log().values()) merge_commit = commits[0] @@ -279,7 +277,7 @@ def test_dolt_log_merge_commit(create_test_table): def test_get_dirty_tables(create_test_table): repo, test_table = create_test_table - message = 'Committing test data' + message = "Committing test data" # Some test data initial = [dict(id=1, name="Bianca", role="Champion")] @@ -293,12 +291,12 @@ def _insert_row_helper(repo, table, row): repo.commit(message) # existing, modified, staged - modified_staged = 'modified_staged' + modified_staged = "modified_staged" write_rows(repo, modified_staged, initial, commit=False) repo.add(modified_staged) # existing, modified, unstaged - modified_unstaged = 'modified_unstaged' + modified_unstaged = "modified_unstaged" write_rows(repo, modified_unstaged, initial, commit=False) repo.add(modified_unstaged) @@ -310,18 +308,22 @@ def _insert_row_helper(repo, table, row): write_rows(repo, modified_unstaged, appended_row, UPDATE, commit=False) # created, staged - created_staged = 'created_staged' - write_rows(repo, created_staged, initial, import_mode=CREATE, primary_key=['id'], commit=False) + created_staged = "created_staged" + write_rows( + repo, created_staged, initial, import_mode=CREATE, primary_key=["id"], commit=False + ) repo.add(created_staged) # created, unstaged - created_unstaged = 'created_unstaged' - write_rows(repo, created_unstaged, initial, import_mode=CREATE, primary_key=['id'], commit=False) + created_unstaged = "created_unstaged" + write_rows( + repo, created_unstaged, initial, import_mode=CREATE, primary_key=["id"], commit=False + ) status = repo.status() - expected_new_tables = {'created_staged': True, 'created_unstaged': False} - expected_changes = {'modified_staged': True, 'modified_unstaged': False} + expected_new_tables = {"created_staged": True, "created_unstaged": False} + expected_changes = {"modified_staged": True, "modified_unstaged": False} assert status.added_tables == expected_new_tables assert status.modified_tables == expected_changes @@ -336,39 +338,42 @@ def test_checkout_with_tables(create_test_table): def test_branch(create_test_table): repo, _ = create_test_table active_branch, branches = repo.branch() - assert [active_branch.name] == [branch.name for branch in branches] == ['master'] + assert [active_branch.name] == [branch.name for branch in branches] == ["master"] - repo.checkout('dosac', checkout_branch=True) - repo.checkout('master') + repo.checkout("dosac", checkout_branch=True) + repo.checkout("master") next_active_branch, next_branches = repo.branch() - assert set(branch.name for branch in next_branches) == {'master', 'dosac'} and next_active_branch.name == 'master' + assert ( + set(branch.name for branch in next_branches) == {"master", "dosac"} + and next_active_branch.name == "master" + ) - repo.checkout('dosac') + repo.checkout("dosac") different_active_branch, _ = repo.branch() - assert different_active_branch.name == 'dosac' + assert different_active_branch.name == "dosac" # we want to make sure that we can delte a branch atomically def test_branch_delete(create_test_table): repo, _ = create_test_table - _verify_branches(repo, ['master']) + _verify_branches(repo, ["master"]) - repo.checkout('dosac', checkout_branch=True) - repo.checkout('master') - _verify_branches(repo, ['master', 'dosac']) + repo.checkout("dosac", checkout_branch=True) + repo.checkout("master") + _verify_branches(repo, ["master", "dosac"]) - repo.branch('dosac', delete=True) - _verify_branches(repo, ['master']) + repo.branch("dosac", delete=True) + _verify_branches(repo, ["master"]) def test_branch_move(create_test_table): repo, _ = create_test_table - _verify_branches(repo, ['master']) + _verify_branches(repo, ["master"]) - repo.branch('master', move=True, new_branch='dosac') - _verify_branches(repo, ['dosac']) + repo.branch("master", move=True, new_branch="dosac") + _verify_branches(repo, ["dosac"]) def _verify_branches(repo: Dolt, branch_list: List[str]): @@ -378,10 +383,10 @@ def _verify_branches(repo: Dolt, branch_list: List[str]): def test_remote_list(create_test_table): repo, _ = create_test_table - repo.remote(add=True, name='origin', url='blah-blah') - assert repo.remote()[0].name == 'origin' - repo.remote(add=True, name='another-origin', url='blah-blah') - assert set([remote.name for remote in repo.remote()]) == {'origin', 'another-origin'} + repo.remote(add=True, name="origin", url="blah-blah") + assert repo.remote()[0].name == "origin" + repo.remote(add=True, name="another-origin", url="blah-blah") + assert set([remote.name for remote in repo.remote()]) == {"origin", "another-origin"} def test_checkout_non_existent_branch(doltdb): @@ -401,32 +406,38 @@ def test_ls_empty(init_empty_test_repo): def test_sql(create_test_table): repo, test_table = create_test_table - sql = ''' + sql = """ INSERT INTO {table} (name, id) VALUES ('Roger', 3) - '''.format(table=test_table) + """.format( + table=test_table + ) repo.sql(query=sql) test_data = read_rows(repo, test_table) - assert 'Roger' in [x["name"] for x in test_data] + assert "Roger" in [x["name"] for x in test_data] def test_sql_json(create_test_table): repo, test_table = create_test_table - result = repo.sql(query='SELECT * FROM `{table}`'.format(table=test_table), result_format='json')['rows'] + result = repo.sql( + query="SELECT * FROM `{table}`".format(table=test_table), result_format="json" + )["rows"] _verify_against_base_rows(result) def test_sql_csv(create_test_table): repo, test_table = create_test_table - result = repo.sql(query='SELECT * FROM `{table}`'.format(table=test_table), result_format='csv') + result = repo.sql( + query="SELECT * FROM `{table}`".format(table=test_table), result_format="csv" + ) _verify_against_base_rows(result) def _verify_against_base_rows(result: List[dict]): assert len(result) == len(BASE_TEST_ROWS) - result_sorted = sorted(result, key=lambda el: el['id']) + result_sorted = sorted(result, key=lambda el: el["id"]) for left, right in zip(BASE_TEST_ROWS, result_sorted): assert set(left.keys()) == set(right.keys()) for k in left.keys(): @@ -436,20 +447,20 @@ def _verify_against_base_rows(result: List[dict]): assert str(left[k]) == str(right[k]) -TEST_IMPORT_FILE_DATA = ''' +TEST_IMPORT_FILE_DATA = """ name,id roger,1 rafa,2 -'''.lstrip() +""".lstrip() def test_schema_import_create(init_empty_test_repo, tmp_path): repo = init_empty_test_repo - table = 'test_table' - test_file = tmp_path / 'test_data.csv' - with open(test_file, 'w') as f: + table = "test_table" + test_file = tmp_path / "test_data.csv" + with open(test_file, "w") as f: f.writelines(TEST_IMPORT_FILE_DATA) - repo.schema_import(table=table, create=True, pks=['id'], filename=test_file) + repo.schema_import(table=table, create=True, pks=["id"], filename=test_file) assert repo.status().added_tables == {table: False} @@ -457,29 +468,34 @@ def test_schema_import_create(init_empty_test_repo, tmp_path): def test_config_global(init_empty_test_repo): _ = init_empty_test_repo current_global_config = Dolt.config_global(list=True) - test_username, test_email = 'test_user', 'test_email' - Dolt.config_global(add=True, name='user.name', value=test_username) - Dolt.config_global(add=True, name='user.email', value=test_email) + test_username, test_email = "test_user", "test_email" + Dolt.config_global(add=True, name="user.name", value=test_username) + Dolt.config_global(add=True, name="user.email", value=test_email) updated_config = Dolt.config_global(list=True) - assert updated_config['user.name'] == test_username and updated_config['user.email'] == test_email - Dolt.config_global(add=True, name='user.name', value=current_global_config['user.name']) - Dolt.config_global(add=True, name='user.email', value=current_global_config['user.email']) + assert ( + updated_config["user.name"] == test_username + and updated_config["user.email"] == test_email + ) + Dolt.config_global(add=True, name="user.name", value=current_global_config["user.name"]) + Dolt.config_global(add=True, name="user.email", value=current_global_config["user.email"]) reset_config = Dolt.config_global(list=True) - assert reset_config['user.name'] == current_global_config['user.name'] - assert reset_config['user.email'] == current_global_config['user.email'] + assert reset_config["user.name"] == current_global_config["user.name"] + assert reset_config["user.email"] == current_global_config["user.email"] def test_config_local(init_empty_test_repo): repo = init_empty_test_repo current_global_config = Dolt.config_global(list=True) - test_username, test_email = 'test_user', 'test_email' - repo.config_local(add=True, name='user.name', value=test_username) - repo.config_local(add=True, name='user.email', value=test_email) + test_username, test_email = "test_user", "test_email" + repo.config_local(add=True, name="user.name", value=test_username) + repo.config_local(add=True, name="user.email", value=test_email) local_config = repo.config_local(list=True) global_config = Dolt.config_global(list=True) - assert local_config['user.name'] == test_username and local_config['user.email'] == test_email - assert global_config['user.name'] == current_global_config['user.name'] - assert global_config['user.email'] == current_global_config['user.email'] + assert ( + local_config["user.name"] == test_username and local_config["user.email"] == test_email + ) + assert global_config["user.name"] == current_global_config["user.name"] + assert global_config["user.email"] == current_global_config["user.email"] def test_detached_head_cm(doltdb): @@ -515,29 +531,37 @@ def test_clone_new_dir(tmp_path): def test_dolt_sql_csv(init_empty_test_repo): dolt = init_empty_test_repo - write_rows(dolt, 'test_table', BASE_TEST_ROWS, commit=True) - result = dolt.sql("SELECT `name` as name, `id` as id FROM test_table ", result_format='csv') + write_rows(dolt, "test_table", BASE_TEST_ROWS, commit=True) + result = dolt.sql( + "SELECT `name` as name, `id` as id FROM test_table ", result_format="csv" + ) assert BASE_TEST_ROWS == result def test_dolt_sql_json(init_empty_test_repo): dolt = init_empty_test_repo - write_rows(dolt, 'test_table', BASE_TEST_ROWS, commit=True) - result = dolt.sql("SELECT `name` as name, `id` as id FROM test_table ", result_format='json') + write_rows(dolt, "test_table", BASE_TEST_ROWS, commit=True) + result = dolt.sql( + "SELECT `name` as name, `id` as id FROM test_table ", result_format="json" + ) # JSON return value preserves some type information, we cast back to a string - for row in result['rows']: - row['id'] = str(row['id']) - compare_rows_helper(BASE_TEST_ROWS, result['rows']) + for row in result["rows"]: + row["id"] = str(row["id"]) + compare_rows_helper(BASE_TEST_ROWS, result["rows"]) + def test_dolt_sql_file(init_empty_test_repo): dolt = init_empty_test_repo with tempfile.NamedTemporaryFile() as f: - write_rows(dolt, 'test_table', BASE_TEST_ROWS, commit=True) - result = dolt.sql("SELECT `name` as name, `id` as id FROM test_table ", result_file=f.name) + write_rows(dolt, "test_table", BASE_TEST_ROWS, commit=True) + result = dolt.sql( + "SELECT `name` as name, `id` as id FROM test_table ", result_file=f.name + ) res = read_csv_to_dict(f.name) compare_rows_helper(BASE_TEST_ROWS, res) + def test_dolt_sql_errors(doltdb): db = Dolt(doltdb) @@ -550,11 +574,13 @@ def test_dolt_sql_errors(doltdb): with pytest.raises(ValueError): db.sql(result_format="csv", query=None) + def test_no_init_error(init_empty_test_repo): dolt = init_empty_test_repo dolt.init(dolt.repo_dir, error=False) + def test_set_dolt_path_error(doltdb): db = Dolt(doltdb) set_dolt_path("dolt") @@ -564,12 +590,35 @@ def test_set_dolt_path_error(doltdb): with pytest.raises(FileNotFoundError): set_dolt_path("notdolt") from doltcli.utils import DOLT_PATH + assert DOLT_PATH == "notdolt" db.sql(test_cmd, result_format="csv") finally: set_dolt_path("dolt") + def test_no_checkout_error(init_empty_test_repo): dolt = init_empty_test_repo dolt.checkout(branch="master", error=False) + + +def test_reset(doltdb): + db = Dolt(doltdb) + db.reset() + db.reset(hard=True) + db.reset(soft=True) + db.reset(tables="t1") + db.reset(tables=["t1"]) + + +def test_reset_errors(doltdb): + db = Dolt(doltdb) + with pytest.raises(ValueError): + db.reset(hard=True, soft=True) + with pytest.raises(ValueError): + db.reset(tables="t1", hard=True) + with pytest.raises(ValueError): + db.reset(tables="t1", soft=True) + with pytest.raises(ValueError): + db.reset(tables={"t1": True}) diff --git a/tests/test_read.py b/tests/test_read.py index 3a3013b..33f0806 100644 --- a/tests/test_read.py +++ b/tests/test_read.py @@ -1,26 +1,26 @@ from typing import List + import pytest -from tests.helpers import compare_rows_helper + from doltcli import ( - Dolt, - write_rows, CREATE, UPDATE, - read_rows, - read_columns, + Dolt, columns_to_rows, + read_columns, + read_rows, + write_rows, ) +from tests.helpers import compare_rows_helper -TEST_TABLE = 'characters' +TEST_TABLE = "characters" TEST_DATA_INITIAL = [ - {'name': 'Anna', 'adjective': 'tragic', 'id': '1', 'date_of_death': '1877-01-01'}, - {'name': 'Vronksy', 'adjective': 'honorable', 'id': '2', 'date_of_death': ''}, - {'name': 'Oblonksy', 'adjective': 'buffoon', 'id': '3', 'date_of_death': ''}, + {"name": "Anna", "adjective": "tragic", "id": "1", "date_of_death": "1877-01-01"}, + {"name": "Vronksy", "adjective": "honorable", "id": "2", "date_of_death": ""}, + {"name": "Oblonksy", "adjective": "buffoon", "id": "3", "date_of_death": ""}, ] -TEST_DATA_UPDATE = [ - {'name': 'Levin', 'adjective': 'tiresome', 'id': '4', 'date_of_death': ''} -] +TEST_DATA_UPDATE = [{"name": "Levin", "adjective": "tiresome", "id": "4", "date_of_death": ""}] TEST_DATA_COMBINED = TEST_DATA_INITIAL + TEST_DATA_UPDATE @@ -37,7 +37,7 @@ def update_test_data(dolt: Dolt): def _write_helper(dolt: Dolt, data: List[dict], update_type: str): - write_rows(dolt, TEST_TABLE, data, update_type, ['id'], commit=True) + write_rows(dolt, TEST_TABLE, data, update_type, ["id"], commit=True) commit_hash, _ = dolt.log().popitem(last=False) return dolt, commit_hash diff --git a/tests/test_types.py b/tests/test_types.py index bf7796b..7b47775 100644 --- a/tests/test_types.py +++ b/tests/test_types.py @@ -3,6 +3,8 @@ from doltcli import Branch dt = datetime.datetime.strptime("2018-06-29", "%Y-%m-%d") + + def test_datetime_serialize(): cmp = dict( name="test", @@ -14,6 +16,9 @@ def test_datetime_serialize(): ) br = Branch(**cmp) assert br.dict() == cmp - assert br.json() == """ + assert ( + br.json() + == """ {"name": "test", "hash": "23", "latest_committer": null, "latest_committer_email": null, "latest_commit_date": "2018-06-29 00:00:00", "latest_commit_message": null} """.strip() + ) diff --git a/tests/test_write.py b/tests/test_write.py index 8f8f1fa..af1bc9c 100644 --- a/tests/test_write.py +++ b/tests/test_write.py @@ -1,40 +1,35 @@ import pytest -from tests.helpers import compare_rows_helper -from doltcli import ( - write_rows, - write_columns, - CREATE, - read_rows, -) +from doltcli import CREATE, read_rows, write_columns, write_rows +from tests.helpers import compare_rows_helper # Note that we use string values here as serializing via CSV does preserve type information in any meaningful way TEST_ROWS = [ - {'name': 'Anna', 'adjective': 'tragic', 'id': '1', 'date_of_death': '1877-01-01'}, - {'name': 'Vronksy', 'adjective': 'honorable', 'id': '2', 'date_of_death': ''}, - {'name': 'Oblonksy', 'adjective': 'buffoon', 'id': '3', 'date_of_death': ''}, + {"name": "Anna", "adjective": "tragic", "id": "1", "date_of_death": "1877-01-01"}, + {"name": "Vronksy", "adjective": "honorable", "id": "2", "date_of_death": ""}, + {"name": "Oblonksy", "adjective": "buffoon", "id": "3", "date_of_death": ""}, ] TEST_COLUMNS = { - 'name': ['Anna', 'Vronksy', 'Oblonksy'], - 'adjective': ['tragic', 'honorable', 'buffoon'], - 'id': ['1', '2', '3'], - 'date_of_birth': ['1840-01-01', '1840-01-01', '1840-01-01'], - 'date_of_death': ['1877-01-01', '', ''] + "name": ["Anna", "Vronksy", "Oblonksy"], + "adjective": ["tragic", "honorable", "buffoon"], + "id": ["1", "2", "3"], + "date_of_birth": ["1840-01-01", "1840-01-01", "1840-01-01"], + "date_of_death": ["1877-01-01", "", ""], } def test_write_rows(init_empty_test_repo): dolt = init_empty_test_repo - write_rows(dolt, 'characters', TEST_ROWS, CREATE, ['id']) - actual = read_rows(dolt, 'characters') + write_rows(dolt, "characters", TEST_ROWS, CREATE, ["id"]) + actual = read_rows(dolt, "characters") compare_rows_helper(TEST_ROWS, actual) def test_write_columns(init_empty_test_repo): dolt = init_empty_test_repo - write_columns(dolt, 'characters', TEST_COLUMNS, CREATE, ['id']) - actual = read_rows(dolt, 'characters') + write_columns(dolt, "characters", TEST_COLUMNS, CREATE, ["id"]) + actual = read_rows(dolt, "characters") expected = [{} for _ in range(len(list(TEST_COLUMNS.values())[0]))] for col_name in TEST_COLUMNS.keys(): for j, val in enumerate(TEST_COLUMNS[col_name]): @@ -43,15 +38,10 @@ def test_write_columns(init_empty_test_repo): compare_rows_helper(expected, actual) -DICT_OF_LISTS_UNEVEN_LENGTHS = { - 'name': ['Roger', 'Rafael', 'Novak'], - 'rank': [1, 2] -} +DICT_OF_LISTS_UNEVEN_LENGTHS = {"name": ["Roger", "Rafael", "Novak"], "rank": [1, 2]} def test_write_columns_uneven(init_empty_test_repo): repo = init_empty_test_repo with pytest.raises(ValueError): - write_columns(repo, 'players', DICT_OF_LISTS_UNEVEN_LENGTHS, CREATE, ['name']) - - + write_columns(repo, "players", DICT_OF_LISTS_UNEVEN_LENGTHS, CREATE, ["name"])