diff --git a/.github/workflows/main.yaml b/.github/workflows/main.yaml index e12cb9d..07b475f 100644 --- a/.github/workflows/main.yaml +++ b/.github/workflows/main.yaml @@ -492,7 +492,7 @@ jobs: shell: bash working-directory: tests run: | - pytest -s -v --tags ${{ matrix.case_tag }} -n 4 + pytest -s -v --tags ${{ matrix.case_tag }} --log-cli-level=INFO --capture=no - name: Get Milvus status shell: bash @@ -518,7 +518,7 @@ jobs: if: ${{ ! success() }} uses: actions/upload-artifact@v2 with: - name: api-test-logs-${{ matrix.deploy_tools }}-${{ matrix.milvus_mode }} + name: api-test-logs-${{ matrix.deploy_tools }}-${{ matrix.milvus_mode }}-${{ matrix.case_tag }} path: | ./logs ./server.log diff --git a/tests/common/common_func.py b/tests/common/common_func.py index b764c4c..d90f9ac 100644 --- a/tests/common/common_func.py +++ b/tests/common/common_func.py @@ -106,6 +106,11 @@ def gen_json_field(name=ct.default_json_field_name, is_primary=False, descriptio description=description, is_primary=is_primary) return json_field +def gen_array_field(name=ct.default_array_field_name, is_primary=False, element_type=DataType.VARCHAR ,description=ct.default_desc): + array_field, _ = ApiFieldSchemaWrapper().init_field_schema(name=name, dtype=DataType.ARRAY, + description=description, is_primary=is_primary, element_type=element_type, max_capacity=2000, max_length=1500) + return array_field + def gen_float_vec_field(name=ct.default_float_vec_field_name, is_primary=False, dim=ct.default_dim, description=ct.default_desc): diff --git a/tests/common/common_type.py b/tests/common/common_type.py index 6a57cd8..11f0004 100644 --- a/tests/common/common_type.py +++ b/tests/common/common_type.py @@ -34,6 +34,7 @@ default_double_field_name = "double" default_string_field_name = "varchar" default_json_field_name = "json" +default_array_field_name = "array" default_float_vec_field_name = "float_vector" another_float_vec_field_name = "float_vector1" default_binary_vec_field_name = "binary_vector" diff --git a/tests/requirements.txt b/tests/requirements.txt index 0cd9687..2f712e0 100644 --- a/tests/requirements.txt +++ b/tests/requirements.txt @@ -17,7 +17,7 @@ pytest-print==0.2.1 pytest-level==0.1.1 pytest-xdist==2.5.0 pytest-loguru==0.2.0 -pymilvus==2.2.9.dev18 +pymilvus==2.3.2 pytest-rerunfailures==9.1.1 git+https://github.com/Projectplace/pytest-tags ndg-httpsclient diff --git a/tests/testcases/test_restore_backup.py b/tests/testcases/test_restore_backup.py index 5b21dd4..9025186 100644 --- a/tests/testcases/test_restore_backup.py +++ b/tests/testcases/test_restore_backup.py @@ -3,7 +3,7 @@ import json import numpy as np from collections import defaultdict -from pymilvus import db, list_collections, Collection +from pymilvus import db, list_collections, Collection, DataType from base.client_base import TestcaseBase from common import common_func as cf from common import common_type as ct @@ -322,3 +322,65 @@ def test_milvus_restore_with_db_collections(self, drop_db, str_json): assert collection_name + suffix in res if not drop_db: self.compare_collections(collection_name, collection_name + suffix) + + + @pytest.mark.parametrize("include_partition_key", [True, False]) + @pytest.mark.parametrize("include_dynamic", [True, False]) + @pytest.mark.tags(CaseLabel.L1) + def test_milvus_restore_back_with_array_datatype(self, include_dynamic, include_partition_key): + self._connect() + name_origin = cf.gen_unique_str(prefix) + back_up_name = cf.gen_unique_str(backup_prefix) + fields = [cf.gen_int64_field(name="int64", is_primary=True), + cf.gen_int64_field(name="key"), + cf.gen_array_field(name="var_array", element_type=DataType.VARCHAR), + cf.gen_array_field(name="int_array", element_type=DataType.INT64), + cf.gen_float_vec_field(name="float_vector", dim=128), + ] + if include_partition_key: + partition_key = "key" + default_schema = cf.gen_collection_schema(fields, + enable_dynamic_field=include_dynamic, + partition_key_field=partition_key) + else: + default_schema = cf.gen_collection_schema(fields, + enable_dynamic_field=include_dynamic) + + collection_w = self.init_collection_wrap(name=name_origin, schema=default_schema, active_trace=True) + nb = 3000 + data = [ + [i for i in range(nb)], + [i % 3 for i in range(nb)], + [[str(x) for x in range(10)] for i in range(nb)], + [[int(x) for x in range(10)] for i in range(nb)], + [[np.float32(i) for i in range(128)] for _ in range(nb)], + ] + collection_w.insert(data=data) + + res = client.create_backup({"async": False, "backup_name": back_up_name, "collection_names": [name_origin]}) + log.info(f"create_backup {res}") + res = client.list_backup() + log.info(f"list_backup {res}") + if "data" in res: + all_backup = [r["name"] for r in res["data"]] + else: + all_backup = [] + assert back_up_name in all_backup + backup = client.get_backup(back_up_name) + assert backup["data"]["name"] == back_up_name + backup_collections = [backup["collection_name"]for backup in backup["data"]["collection_backups"]] + assert name_origin in backup_collections + res = client.restore_backup({"async": False, "backup_name": back_up_name, "collection_names": [name_origin], + "collection_suffix": suffix}) + log.info(f"restore_backup: {res}") + res, _ = self.utility_wrap.list_collections() + assert name_origin + suffix in res + output_fields = None + self.compare_collections(name_origin, name_origin + suffix, output_fields=output_fields) + res = client.delete_backup(back_up_name) + res = client.list_backup() + if "data" in res: + all_backup = [r["name"] for r in res["data"]] + else: + all_backup = [] + assert back_up_name not in all_backup \ No newline at end of file