From b603ad4963b10858fa06442462905ab729471da9 Mon Sep 17 00:00:00 2001 From: himkt Date: Sat, 13 Jan 2024 20:37:48 +0900 Subject: [PATCH 1/2] feat: src-layout --- pyproject.toml | 1 + {konoha => src/konoha}/__init__.py | 0 {konoha => src/konoha}/api/server.py | 0 {konoha => src/konoha}/api/v1/batch_tokenization.py | 0 {konoha => src/konoha}/api/v1/tokenization.py | 0 {konoha => src/konoha}/data/__init__.py | 0 {konoha => src/konoha}/data/resource.py | 0 {konoha => src/konoha}/data/token.py | 0 {konoha => src/konoha}/sentence_tokenizer.py | 0 {konoha => src/konoha}/word_tokenizer.py | 0 {konoha => src/konoha}/word_tokenizers/__init__.py | 0 {konoha => src/konoha}/word_tokenizers/character_tokenizer.py | 0 {konoha => src/konoha}/word_tokenizers/janome_tokenizer.py | 0 {konoha => src/konoha}/word_tokenizers/kytea_tokenizer.py | 0 {konoha => src/konoha}/word_tokenizers/mecab_tokenizer.py | 0 {konoha => src/konoha}/word_tokenizers/nagisa_tokenizer.py | 0 .../konoha}/word_tokenizers/sentencepiece_tokenizer.py | 0 {konoha => src/konoha}/word_tokenizers/sudachi_tokenizer.py | 0 {konoha => src/konoha}/word_tokenizers/tokenizer.py | 0 {konoha => src/konoha}/word_tokenizers/whitespace_tokenizer.py | 0 20 files changed, 1 insertion(+) rename {konoha => src/konoha}/__init__.py (100%) rename {konoha => src/konoha}/api/server.py (100%) rename {konoha => src/konoha}/api/v1/batch_tokenization.py (100%) rename {konoha => src/konoha}/api/v1/tokenization.py (100%) rename {konoha => src/konoha}/data/__init__.py (100%) rename {konoha => src/konoha}/data/resource.py (100%) rename {konoha => src/konoha}/data/token.py (100%) rename {konoha => src/konoha}/sentence_tokenizer.py (100%) rename {konoha => src/konoha}/word_tokenizer.py (100%) rename {konoha => src/konoha}/word_tokenizers/__init__.py (100%) rename {konoha => src/konoha}/word_tokenizers/character_tokenizer.py (100%) rename {konoha => src/konoha}/word_tokenizers/janome_tokenizer.py (100%) rename {konoha => src/konoha}/word_tokenizers/kytea_tokenizer.py (100%) rename {konoha => src/konoha}/word_tokenizers/mecab_tokenizer.py (100%) rename {konoha => src/konoha}/word_tokenizers/nagisa_tokenizer.py (100%) rename {konoha => src/konoha}/word_tokenizers/sentencepiece_tokenizer.py (100%) rename {konoha => src/konoha}/word_tokenizers/sudachi_tokenizer.py (100%) rename {konoha => src/konoha}/word_tokenizers/tokenizer.py (100%) rename {konoha => src/konoha}/word_tokenizers/whitespace_tokenizer.py (100%) diff --git a/pyproject.toml b/pyproject.toml index 58da92d..d384127 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -7,6 +7,7 @@ version = "5.5.2" description = "A tiny sentence/word tokenizer for Japanese text written in Python" authors = ["himkt "] readme = "README.md" +packages = [{include = "konoha", from = "src" }] license = "MIT" [tool.poetry.dependencies] diff --git a/konoha/__init__.py b/src/konoha/__init__.py similarity index 100% rename from konoha/__init__.py rename to src/konoha/__init__.py diff --git a/konoha/api/server.py b/src/konoha/api/server.py similarity index 100% rename from konoha/api/server.py rename to src/konoha/api/server.py diff --git a/konoha/api/v1/batch_tokenization.py b/src/konoha/api/v1/batch_tokenization.py similarity index 100% rename from konoha/api/v1/batch_tokenization.py rename to src/konoha/api/v1/batch_tokenization.py diff --git a/konoha/api/v1/tokenization.py b/src/konoha/api/v1/tokenization.py similarity index 100% rename from konoha/api/v1/tokenization.py rename to src/konoha/api/v1/tokenization.py diff --git a/konoha/data/__init__.py b/src/konoha/data/__init__.py similarity index 100% rename from konoha/data/__init__.py rename to src/konoha/data/__init__.py diff --git a/konoha/data/resource.py b/src/konoha/data/resource.py similarity index 100% rename from konoha/data/resource.py rename to src/konoha/data/resource.py diff --git a/konoha/data/token.py b/src/konoha/data/token.py similarity index 100% rename from konoha/data/token.py rename to src/konoha/data/token.py diff --git a/konoha/sentence_tokenizer.py b/src/konoha/sentence_tokenizer.py similarity index 100% rename from konoha/sentence_tokenizer.py rename to src/konoha/sentence_tokenizer.py diff --git a/konoha/word_tokenizer.py b/src/konoha/word_tokenizer.py similarity index 100% rename from konoha/word_tokenizer.py rename to src/konoha/word_tokenizer.py diff --git a/konoha/word_tokenizers/__init__.py b/src/konoha/word_tokenizers/__init__.py similarity index 100% rename from konoha/word_tokenizers/__init__.py rename to src/konoha/word_tokenizers/__init__.py diff --git a/konoha/word_tokenizers/character_tokenizer.py b/src/konoha/word_tokenizers/character_tokenizer.py similarity index 100% rename from konoha/word_tokenizers/character_tokenizer.py rename to src/konoha/word_tokenizers/character_tokenizer.py diff --git a/konoha/word_tokenizers/janome_tokenizer.py b/src/konoha/word_tokenizers/janome_tokenizer.py similarity index 100% rename from konoha/word_tokenizers/janome_tokenizer.py rename to src/konoha/word_tokenizers/janome_tokenizer.py diff --git a/konoha/word_tokenizers/kytea_tokenizer.py b/src/konoha/word_tokenizers/kytea_tokenizer.py similarity index 100% rename from konoha/word_tokenizers/kytea_tokenizer.py rename to src/konoha/word_tokenizers/kytea_tokenizer.py diff --git a/konoha/word_tokenizers/mecab_tokenizer.py b/src/konoha/word_tokenizers/mecab_tokenizer.py similarity index 100% rename from konoha/word_tokenizers/mecab_tokenizer.py rename to src/konoha/word_tokenizers/mecab_tokenizer.py diff --git a/konoha/word_tokenizers/nagisa_tokenizer.py b/src/konoha/word_tokenizers/nagisa_tokenizer.py similarity index 100% rename from konoha/word_tokenizers/nagisa_tokenizer.py rename to src/konoha/word_tokenizers/nagisa_tokenizer.py diff --git a/konoha/word_tokenizers/sentencepiece_tokenizer.py b/src/konoha/word_tokenizers/sentencepiece_tokenizer.py similarity index 100% rename from konoha/word_tokenizers/sentencepiece_tokenizer.py rename to src/konoha/word_tokenizers/sentencepiece_tokenizer.py diff --git a/konoha/word_tokenizers/sudachi_tokenizer.py b/src/konoha/word_tokenizers/sudachi_tokenizer.py similarity index 100% rename from konoha/word_tokenizers/sudachi_tokenizer.py rename to src/konoha/word_tokenizers/sudachi_tokenizer.py diff --git a/konoha/word_tokenizers/tokenizer.py b/src/konoha/word_tokenizers/tokenizer.py similarity index 100% rename from konoha/word_tokenizers/tokenizer.py rename to src/konoha/word_tokenizers/tokenizer.py diff --git a/konoha/word_tokenizers/whitespace_tokenizer.py b/src/konoha/word_tokenizers/whitespace_tokenizer.py similarity index 100% rename from konoha/word_tokenizers/whitespace_tokenizer.py rename to src/konoha/word_tokenizers/whitespace_tokenizer.py From c8dedf075da997880186cf5d5dda1bf6fa55bb1d Mon Sep 17 00:00:00 2001 From: himkt Date: Sat, 13 Jan 2024 20:44:09 +0900 Subject: [PATCH 2/2] fix: ruff check target --- .github/workflows/ci.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 183bd0c..0f2d7c3 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -36,8 +36,8 @@ jobs: - run: pip install ruff pytest httpx - run: | - ruff check konoha - ruff format --diff konoha + ruff check src + ruff format --diff src - run: pytest env: AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}