diff --git a/core/core/settings.py b/core/core/settings.py index 23171d6..8e76b0c 100644 --- a/core/core/settings.py +++ b/core/core/settings.py @@ -37,6 +37,7 @@ 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', + 'rest_framework', 'i18nilize', ] diff --git a/core/db.sqlite3 b/core/db.sqlite3 index 60c13c5..193156f 100644 Binary files a/core/db.sqlite3 and b/core/db.sqlite3 differ diff --git a/core/i18nilize/tests.py b/core/i18nilize/tests.py index f0caedb..a3b1abf 100644 --- a/core/i18nilize/tests.py +++ b/core/i18nilize/tests.py @@ -1010,4 +1010,43 @@ def test_bulk_translations(self): # validate get requests response = self.client.get(reverse('translation'), query_params=query_params_get[i], headers=headers) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) - self.assertEqual(response.data['error'], 'Translation not found for given language and word!') \ No newline at end of file + self.assertEqual(response.data['error'], 'Translation not found for given language and word!') + +class PullTranslations(APITestCase): + + def setUp(self): + token = Token.objects.create() + self.TEST_TOKEN = str(token.value) + + def test_pulling_multiple_assigned_translations(self): + headers = { + 'Token': self.TEST_TOKEN + } + translations_data = { + 'translations': [ + { + 'language': 'spanish', + 'hello': 'hola', + 'bye': 'chau', + }, + { + 'language': 'french', + 'hello': 'bonjour', + } + ] + } + expected_response = { + 'spanish': { + 'hello': 'hola', + 'bye': 'chau', + }, + 'french': { + 'hello': 'bonjour', + } + } + + self.client.post(reverse('process-translations'), data=translations_data, headers=headers, format='json') + + response = self.client.get(reverse('pull-translations'), headers=headers, format='json') + response_data = response.json() + self.assertEqual(response_data, expected_response) diff --git a/core/i18nilize/urls.py b/core/i18nilize/urls.py index 5048bee..83859e8 100644 --- a/core/i18nilize/urls.py +++ b/core/i18nilize/urls.py @@ -1,10 +1,12 @@ from django.urls import path -from . import views -from .views import TokenView, TranslationView +from .views import TokenView, TranslationView, ProcessTranslationsView, PullTranslations, TestTokenView urlpatterns = [ path('token/', TokenView.as_view(), name='create-token'), path('token//', TokenView.as_view(), name='read-token'), + path('test/', TestTokenView.as_view(), name='test-token'), path('translation', TranslationView.as_view(), name='translation'), - path('translations', views.ProcessTranslationsView.as_view(), name='process-translations') + path('translations', ProcessTranslationsView.as_view(), name='process-translations'), + path('translations/pull/', PullTranslations.as_view(), name='pull-translations'), + path('translations/push/', TranslationView.as_view(), name='push-translations'), ] diff --git a/core/i18nilize/views.py b/core/i18nilize/views.py index 8adc567..35f489f 100644 --- a/core/i18nilize/views.py +++ b/core/i18nilize/views.py @@ -43,6 +43,21 @@ def get(self, request, value=None): except Token.DoesNotExist: return Response({'error': 'Token not found.'}, status=status.HTTP_404_NOT_FOUND) +class TestTokenView(APIView): + """ + Endpoint to delete all translations tied to a token for testing. + """ + @require_valid_token + def delete(self, request): + token = request.token + try: + translations = Translation.objects.filter(token=token) + for t in translations: + t.delete() + except Exception as e: + print(e) + return Response({'error': 'Could not delete all translations for given token.'}, status=status.HTTP_500_INTERNAL_SERVER_ERROR) + return Response({'message': 'Deleted all translations tied to given token.'}, status=status.HTTP_200_OK) class ProcessTranslationsView(APIView): """ @@ -304,4 +319,32 @@ def delete(self, request): # Throw a bad request if the translation doesn't exist except Translation.DoesNotExist: - return Response({"error": "translation doesn't exist!"}, status=status.HTTP_404_NOT_FOUND) \ No newline at end of file + return Response({"error": "translation doesn't exist!"}, status=status.HTTP_404_NOT_FOUND) + +class PullTranslations(APIView): + """ + Pulls all translations for a given token. + """ + @require_valid_token + def get(self, request): + token = request.token + + try: + translations = Translation.objects.filter(token=token) + + # Consolidate all translations into single dictionary following + # the format of local translation files to overwrite files easily. + response_data = {} + for translation in translations: + language = translation.language.lower() + original_word = translation.original_word + translated_word = translation.translated_word + + if language not in response_data: + response_data[language] = {} + response_data[language][original_word] = translated_word + except Exception as e: + print(e) + return Response({"error": "could not fetch translations"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR) + + return Response(response_data, status=status.HTTP_200_OK) diff --git a/i18nilize/pyproject.toml b/i18nilize/pyproject.toml index fcff509..d0f577d 100644 --- a/i18nilize/pyproject.toml +++ b/i18nilize/pyproject.toml @@ -1,3 +1,38 @@ [build-system] -requires = ['setuptools>=42'] -build-backend = 'setuptools.build_meta' \ No newline at end of file +requires = ["setuptools >= 42", "wheel"] +build-backend = "setuptools.build_meta" + +[project] +name = "localization" +version = "1.0.0" +authors = [ + { name = "UBC Launchpad", email = "strategy@ubclaunchpad.com" } +] +description = "A localization package for microservices" +readme = "readme.md" +license = { file = "LICENSE.txt" } +keywords = ["localization", "microservices"] +classifiers = [ + "Programming Language :: Python :: 3", + "License :: OSI Approved :: MIT License", + "Operating System :: OS Independent" +] +dependencies = [ + "requests>=2.25.1", + "geocoder>=1.38.1", + "geopy>=2.2.0", + "Babel>=2.9.1", + "dirsync>=2.2.5", +] + +[project.scripts] +i18nilize = "src.internationalize.command_line:cli" + +[tool.setuptools] +packages = ["src"] + +[tool.setuptools.package-dir] +src = "src" + +# [tool.setuptools.packages.find] +# where = ["src"] diff --git a/i18nilize/setup.cfg b/i18nilize/setup_temp.cfg similarity index 100% rename from i18nilize/setup.cfg rename to i18nilize/setup_temp.cfg diff --git a/i18nilize/src/internationalize/api_helpers.py b/i18nilize/src/internationalize/api_helpers.py index 7a231e7..179a3a7 100644 --- a/i18nilize/src/internationalize/api_helpers.py +++ b/i18nilize/src/internationalize/api_helpers.py @@ -1,7 +1,7 @@ # api_helpers.py import requests -from . import globals +from . import globals import sys def create_token(): diff --git a/i18nilize/src/internationalize/command_line.py b/i18nilize/src/internationalize/command_line.py index e2c4124..62cf596 100644 --- a/i18nilize/src/internationalize/command_line.py +++ b/i18nilize/src/internationalize/command_line.py @@ -1,7 +1,10 @@ #from src.internationalize.helpers import add_language import json import argparse -from i18nilize.src.internationalize.helpers import add_language, add_update_translated_word, delete_translation +from src.internationalize.helpers import add_language, add_update_translated_word, delete_translation +from src.internationalize.sync_processor import pull_translations, push_translations +from src.internationalize.diffing_processor import DiffingProcessor +from src.internationalize import globals def cli(): # initialize the parser @@ -30,6 +33,15 @@ def cli(): delete_parser.add_argument('original_word') delete_parser.add_argument('translated_word') + # sub parser for pull + pull_parser = subparsers.add_parser('pull') + + # sub parser for push + push_parser = subparsers.add_parser('push') + + # sub parser for setup + setup_parser = subparsers.add_parser('setup') + # the subparser is used because different CLIs use a different amount of inputs args = parser.parse_args() @@ -41,7 +53,16 @@ def cli(): add_update_translated_word(args.language, args.original_word, args.translated_word) elif args.command == 'delete': delete_translation(args.language, args.original_word, args.translated_word) + elif args.command == 'pull': + pull_translations() + elif args.command == 'push': + push_translations() + elif args.command == 'setup': + # Quick fix for now + dp = DiffingProcessor(globals.LANGUAGES_DIR) + dp.setup() else: print("Invalid command") -cli() \ No newline at end of file +if __name__ == "__main__": + cli() diff --git a/i18nilize/src/internationalize/default_languages/chinese.json b/i18nilize/src/internationalize/default_languages/chinese.json new file mode 100644 index 0000000..df53d19 --- /dev/null +++ b/i18nilize/src/internationalize/default_languages/chinese.json @@ -0,0 +1,3 @@ +{ + "thank you": "\u8c22\u8c22" +} \ No newline at end of file diff --git a/i18nilize/src/internationalize/default_languages/french.json b/i18nilize/src/internationalize/default_languages/french.json new file mode 100644 index 0000000..78b6885 --- /dev/null +++ b/i18nilize/src/internationalize/default_languages/french.json @@ -0,0 +1,4 @@ +{ + "thanks": "merci", + "hello": "bonjour" +} \ No newline at end of file diff --git a/i18nilize/src/internationalize/default_languages/german.json b/i18nilize/src/internationalize/default_languages/german.json new file mode 100644 index 0000000..2c57c1d --- /dev/null +++ b/i18nilize/src/internationalize/default_languages/german.json @@ -0,0 +1,3 @@ +{ + "thank you": "danke" +} \ No newline at end of file diff --git a/i18nilize/src/internationalize/default_languages/korean.json b/i18nilize/src/internationalize/default_languages/korean.json new file mode 100644 index 0000000..a029b92 --- /dev/null +++ b/i18nilize/src/internationalize/default_languages/korean.json @@ -0,0 +1,3 @@ +{ + "welcome": "\ud658\uc601\ud569\ub2c8\ub2e4" +} \ No newline at end of file diff --git a/i18nilize/src/internationalize/default_languages/spanish.json b/i18nilize/src/internationalize/default_languages/spanish.json new file mode 100644 index 0000000..11336ed --- /dev/null +++ b/i18nilize/src/internationalize/default_languages/spanish.json @@ -0,0 +1,4 @@ +{ + "hello": "hola", + "thanks": "gracias" +} \ No newline at end of file diff --git a/i18nilize/src/internationalize/diffing_processor.py b/i18nilize/src/internationalize/diffing_processor.py index 73cd3c6..8062667 100644 --- a/i18nilize/src/internationalize/diffing_processor.py +++ b/i18nilize/src/internationalize/diffing_processor.py @@ -1,6 +1,7 @@ import os import hashlib import json +import logging from dirsync import sync from . import globals from src.internationalize.helpers import compute_hash, compute_hashes, read_json_file @@ -18,6 +19,7 @@ """ class DiffingProcessor(): def __init__(self, curr_translations_dir): + logging.getLogger('dirsync').disabled = True self.diff_state_root_dir = "diff_state" self.diff_state_files_dir = os.path.join(self.diff_state_root_dir, "translations") self.metadata_file_dir = os.path.join(self.diff_state_root_dir, "metadata.json") @@ -28,8 +30,10 @@ def __init__(self, curr_translations_dir): """ def setup(self): try: - os.mkdir(self.diff_state_root_dir) - os.mkdir(self.diff_state_files_dir) + if not os.path.exists(self.diff_state_root_dir): + os.mkdir(self.diff_state_root_dir) + if not os.path.exists(self.diff_state_files_dir): + os.mkdir(self.diff_state_files_dir) with open(self.metadata_file_dir, "w") as outfile: json.dump({}, outfile) @@ -50,7 +54,9 @@ def setup(self): """ Updates translation files with new changes and updates hashes in metadata. """ - def update_to_current_state(self, hash_dict): + def update_to_current_state(self, hash_dict=None): + if hash_dict == None: + hash_dict = compute_hashes(self.curr_translation_files_dir) self.update_metadata(hash_dict) self.sync_translations() diff --git a/i18nilize/src/internationalize/error_handler.py b/i18nilize/src/internationalize/error_handler.py index 2abbbb9..bf4dc95 100644 --- a/i18nilize/src/internationalize/error_handler.py +++ b/i18nilize/src/internationalize/error_handler.py @@ -24,6 +24,10 @@ def verify_languages(self): all_language_files = os.listdir(self.translations_dir) for language_file in all_language_files: + absolute_file_path = os.path.join(self.translations_dir, language_file) + if os.path.isdir(absolute_file_path): + continue + error = self.handle_error(language_file) if error != "": errors[language_file] = error diff --git a/i18nilize/src/internationalize/globals.py b/i18nilize/src/internationalize/globals.py index a1b68e8..cc29ad1 100644 --- a/i18nilize/src/internationalize/globals.py +++ b/i18nilize/src/internationalize/globals.py @@ -1,13 +1,17 @@ # globals.py +# Test Token: "c84234c3-b507-4ed0-a6eb-8b10116cdef1" + class GlobalToken: def __init__(self): - self.value = "dummy" + self.value = "dummy" API_BASE_URL = "http://localhost:8000/api/" TOKEN_ENDPOINT = f"{API_BASE_URL}token/" TRANSLATIONS_ENDPOINT = f"{API_BASE_URL}translations/" +PULL_TRANSLATIONS_ENDPOINT = f"{TRANSLATIONS_ENDPOINT}pull/" +PUSH_TRANSLATIONS_ENDPOINT = f"{TRANSLATIONS_ENDPOINT}push/" LANGUAGES_DIR = 'src/internationalize/languages' diff --git a/i18nilize/src/internationalize/helpers.py b/i18nilize/src/internationalize/helpers.py index a5c399f..d1fe71a 100644 --- a/i18nilize/src/internationalize/helpers.py +++ b/i18nilize/src/internationalize/helpers.py @@ -4,13 +4,13 @@ import hashlib import requests from . import globals -from internationalize.error_handler import ErrorHandler +from src.internationalize.error_handler import ErrorHandler # Function to parse json file, given its path def get_json(file_path): try: # open file and parse - with open(file_path, 'r') as file: + with open(file_path, 'r', encoding='utf8') as file: data = json.load(file) except FileNotFoundError: print("File not found") diff --git a/i18nilize/src/internationalize/sync_processor.py b/i18nilize/src/internationalize/sync_processor.py new file mode 100644 index 0000000..8ff622e --- /dev/null +++ b/i18nilize/src/internationalize/sync_processor.py @@ -0,0 +1,67 @@ +import requests, os, json +from . import globals +from src.internationalize.diffing_processor import DiffingProcessor + +""" +Pulls all translations assigned to the microservices' token +and overwrites all language files to sync translations. +""" +def pull_translations(write_directory=globals.LANGUAGES_DIR): + token = globals.token.value + diff_processor = DiffingProcessor(write_directory) + + try: + all_translations = requests.get(globals.PULL_TRANSLATIONS_ENDPOINT, headers={'Token': token}) + except Exception as e: + print("Error: Could not fetch translations from database.", e) + + # Overwrite all translation files + all_transactions_dict = all_translations.json() + for language, translations in all_transactions_dict.items(): + file_name = f"{language}.json" + curr_file_path = os.path.join(write_directory, file_name) + with open(curr_file_path, "w+") as file: + json.dump(translations, file, indent=4) + + diff_processor.update_to_current_state() + print(f"Pulled all translations from the database.") + +""" +Push all local translations to the API. +""" +def push_translations(translations_dir=globals.LANGUAGES_DIR): + token = globals.token.value + diff_processor = DiffingProcessor(translations_dir) + changed_translations = diff_processor.get_changed_translations() + + for language in changed_translations: + created = changed_translations[language]["created"] + modified = changed_translations[language]["modified"] + deleted = changed_translations[language]["deleted"] + + # Post a new entry for each new translation + for original_word in created: + try: + response = requests.post(globals.PUSH_TRANSLATIONS_ENDPOINT, headers={'Token': token}, + params={'language': language, original_word: created[original_word]}) + except Exception as e: + print("Error: Could not create translation.", e) + + # Patch the appropriate entry for each modified translation + for original_word in modified: + try: + response = requests.patch(globals.PUSH_TRANSLATIONS_ENDPOINT, headers={'Token': token}, + params={'language': language, original_word: modified[original_word]}) + except Exception as e: + print("Error: Could not patch translation.", e) + + # Delete the appropriate entry for each deleted translation + for original_word in deleted: + try: + response = requests.delete(globals.PUSH_TRANSLATIONS_ENDPOINT, headers={'Token': token}, + params={'language': language, original_word: deleted[original_word]}) + except Exception as e: + print("Error: Could not delete translation.", e) + + diff_processor.update_to_current_state() + print(f"Pushed all translations from the database.") diff --git a/i18nilize/tests/test_cli.py b/i18nilize/tests/test_cli.py index f742f86..5a64e8c 100644 --- a/i18nilize/tests/test_cli.py +++ b/i18nilize/tests/test_cli.py @@ -1,6 +1,9 @@ -import unittest, os, json, timeit +import unittest, os, json, timeit, shutil, requests from unittest.mock import patch from src.internationalize.helpers import delete_translation, get_json, make_translation_map, get_translation, add_language, add_update_translated_word +from src.internationalize import globals +from src.internationalize.sync_processor import pull_translations, push_translations +from src.internationalize.diffing_processor import DiffingProcessor # Create your tests here. # To test: @@ -9,6 +12,7 @@ class TestCLI(unittest.TestCase): def setUp(self): self.languages_dir = "src/internationalize/languages" + self.reset_token_endpoint = globals.API_BASE_URL + "test/" os.makedirs(self.languages_dir, exist_ok=True) def test_add_new_language(self): @@ -76,14 +80,14 @@ def test_delete_translation_success(self): language = "German" add_language(language) file_path = os.path.join(self.languages_dir, f"{language.lower()}.json") - + initial_translations = { "goodbye": "auf Wiedersehen", "thank you": "danke" } with open(file_path, "w") as file: json.dump(initial_translations, file, indent=4) - + data = get_json(file_path) self.assertIn("goodbye", data) self.assertEqual(data["goodbye"], "auf Wiedersehen") @@ -111,13 +115,13 @@ def test_delete_translation_word_does_not_exist(self): language = "Chinese" add_language(language) file_path = os.path.join(self.languages_dir, f"{language.lower()}.json") - + initial_translations = { "thank you": "谢谢" } with open(file_path, "w") as file: json.dump(initial_translations, file, indent=4) - + data = get_json(file_path) self.assertIn("thank you", data) self.assertNotIn("good morning", data) @@ -136,13 +140,13 @@ def test_delete_translation_word_mismatch(self): language = "Korean" add_language(language) file_path = os.path.join(self.languages_dir, f"{language.lower()}.json") - + initial_translations = { "welcome": "환영합니다" } with open(file_path, "w") as file: json.dump(initial_translations, file, indent=4) - + data = get_json(file_path) self.assertIn("welcome", data) self.assertEqual(data["welcome"], "환영합니다") @@ -157,5 +161,120 @@ def test_delete_translation_word_mismatch(self): self.assertIn("welcome", data) self.assertEqual(data["welcome"], "환영합니다") + + """ + Commenting out push and pull tests because they need the backend to be running. + GitHub CI is not configured to start the backend before running tests. + """ + + # def test_pull_translations(self): + # # Set global token to test token (Note: test will fail if translations + # # tied token are modified) + # prev_token = globals.token.value + # test_token = "c84234c3-b507-4ed0-a6eb-8b10116cdef1" + # globals.token.value = test_token + # + # # Initialize DiffingProcessor with test directory + # temp_dir_path = os.path.join(self.languages_dir, "temp") + # diff_processor = DiffingProcessor(temp_dir_path) + # if os.path.exists(diff_processor.diff_state_root_dir): + # shutil.rmtree(diff_processor.diff_state_root_dir) + # diff_processor.setup() + # + # # Create temporary directories to pull translations + # if os.path.exists(temp_dir_path): + # shutil.rmtree(temp_dir_path) + # os.mkdir(temp_dir_path) + # + # # Copy test files into temp dir to test overwriting + # files_to_copy = ["spanish.json", "french.json"] + # for file_name in files_to_copy: + # curr_file_path = os.path.join(self.languages_dir, file_name) + # new_file_path = os.path.join(temp_dir_path, file_name) + # shutil.copy(curr_file_path, new_file_path) + # + # # Expected content after pulling from API + # expected_file_content = { + # "fr.json": { + # "hello": "bonjour" + # }, + # "french.json": { + # "hello": "bonjour" + # }, + # "spanish.json": { + # "hello": "hola", + # "bye": "chau", + # "what": "que", + # "como": "how", + # "codigo": "code" + # } + # } + # + # pull_translations(write_directory=temp_dir_path) + # for file_name in os.listdir(temp_dir_path): + # file_path = os.path.join(temp_dir_path, file_name) + # file_content = get_json(file_path) + # self.assertEqual(file_content, expected_file_content[file_name]) + # + # # Cleanup + # shutil.rmtree(diff_processor.diff_state_root_dir) + # shutil.rmtree(temp_dir_path) + # globals.token.value = prev_token + # + # def test_push_translations(self): + # # Set global token to test token + # prev_token = globals.token.value + # test_token = "a373fc5e-5b65-463e-b89e-1a37706a69dd" + # globals.token.value = test_token + # + # # Initialize DiffingProcessor with test directory + # temp_dir_path = os.path.join(self.languages_dir, "temp") + # diff_processor = DiffingProcessor(temp_dir_path) + # + # # Remove any persisting test data from previous tests (in case of a test failure) + # if os.path.exists(diff_processor.diff_state_root_dir): + # shutil.rmtree(diff_processor.diff_state_root_dir) + # if os.path.exists(temp_dir_path): + # shutil.rmtree(temp_dir_path) + # os.mkdir(temp_dir_path) + # + # # Initialize with no translations in either state + # diff_processor.setup() + # + # # Deletes all translations tied to test_token + # response = requests.delete(self.reset_token_endpoint, headers={'Token': test_token}) + # self.assertTrue(response.ok) + # + # # Copy files to push to API + # files_to_copy = ["spanish.json", "french.json"] + # for file_name in files_to_copy: + # curr_file_path = os.path.join(self.languages_dir, file_name) + # new_file_path = os.path.join(temp_dir_path, file_name) + # shutil.copy(curr_file_path, new_file_path) + # + # # Push changes, delete copied files, and pull + # push_translations(translations_dir=temp_dir_path) + # shutil.rmtree(temp_dir_path) + # os.mkdir(temp_dir_path) + # pull_translations(write_directory=temp_dir_path) + # + # # Expected content after pulling from API (same content that was pushed) + # expected_file_content = {} + # for file_name in files_to_copy: + # copied_file_path = os.path.join(self.languages_dir, file_name) + # expected_file_content[file_name] = get_json(copied_file_path) + # + # pulled_files = os.listdir(temp_dir_path) + # self.assertEqual(len(pulled_files), 2) + # for file_name in os.listdir(temp_dir_path): + # file_path = os.path.join(temp_dir_path, file_name) + # file_content = get_json(file_path) + # self.assertEqual(file_content, expected_file_content[file_name]) + # + # # Cleanup + # shutil.rmtree(diff_processor.diff_state_root_dir) + # shutil.rmtree(temp_dir_path) + # globals.token.value = prev_token + if __name__ == '__main__': unittest.main()