From 0dac57da70cefe2277e854b1e3894f0ba2c75330 Mon Sep 17 00:00:00 2001 From: juliusvonkohout <45896133+juliusvonkohout@users.noreply.github.com> Date: Mon, 24 Feb 2025 11:45:13 +0100 Subject: [PATCH] we have to move since dockerhub introduces a stricter rate limit. 3.9 is also EOL soon in October 2025 Signed-off-by: juliusvonkohout <45896133+juliusvonkohout@users.noreply.github.com> --- backend/Dockerfile | 2 +- backend/metadata_writer/Dockerfile | 2 +- .../metadata_writer/update_requirements.sh | 2 +- .../config/testdata/sample_pipeline.yaml | 2 +- .../resource/resource_manager_test.go | 12 +++---- .../server/pipeline_upload_server_test.go | 12 +++---- .../server/test/pipeline_with_volume.yaml | 6 ++-- .../apiserver/server/test/v2-hello-world.json | 2 +- .../server/test/xgboost_sample_pipeline.yaml | 14 ++++---- .../apiserver/storage/pipeline_store_test.go | 2 +- .../template/testdata/hello_world.yaml | 2 +- .../testdata/hello_world_schema_2_0_0.yaml | 2 +- .../testdata/pipeline_with_volume.yaml | 6 ++-- backend/src/cache/server/mutation_test.go | 14 ++++---- backend/src/v2/cacheutils/cache_test.go | 10 +++--- .../create_mount_delete_dynamic_pvc.yaml | 2 +- .../testdata/create_pod_metadata.yaml | 2 +- .../argocompiler/testdata/hello_world.yaml | 2 +- .../testdata/hello_world_log_level.yaml | 2 +- .../testdata/hello_world_run_as_user.yaml | 2 +- .../tektoncompiler/testdata/condition_ir.yaml | 10 +++--- .../tektoncompiler/testdata/exit_handler.yaml | 12 +++---- .../testdata/exit_handler_ir.yaml | 6 ++-- .../tektoncompiler/testdata/hello_world.yaml | 4 +-- .../tektoncompiler/testdata/loop_static.yaml | 12 +++---- .../testdata/loop_static_ir.yaml | 6 ++-- .../testdata/mnist_pipeline.yaml | 24 ++++++------- .../testdata/mnist_pipeline_ir.yaml | 12 +++---- .../tektoncompiler/testdata/nestedloop.yaml | 16 ++++----- .../testdata/nestedloop_ir.yaml | 8 ++--- .../tektoncompiler/testdata/pod_metadata.yaml | 4 +-- .../testdata/component_used_twice.json | 2 +- .../create_mount_delete_dynamic_pvc.json | 4 +-- .../src/v2/compiler/testdata/hello_world.json | 2 +- backend/src/v2/driver/driver_test.go | 20 +++++------ backend/src/v2/test/Dockerfile | 2 +- .../src/v2/test/components/run_sample.yaml | 2 +- backend/test/resources/v2-hello-world.yaml | 2 +- backend/update_requirements.sh | 2 +- .../HuggingFace/Load_dataset/component.py | 2 +- .../HuggingFace/Load_dataset/component.yaml | 2 +- .../HuggingFace/Split_dataset/component.py | 2 +- .../HuggingFace/Split_dataset/component.yaml | 2 +- .../contrib/kfp/Run_component/component.py | 2 +- .../contrib/kfp/Run_component/component.yaml | 2 +- components/kserve/Dockerfile | 2 +- ...htweight_python_functions_v2_pipeline.json | 4 +-- ...ight_python_functions_v2_pipeline_rev.yaml | 4 +-- .../pipeline_with_loops_and_conditions.json | 26 +++++++------- .../pipeline_with_loops_and_conditions.yaml | 26 +++++++------- .../v2/pipeline/protobuf_value_params_v2.json | 2 +- .../v2/pipeline/xgboost_sample_pipeline.json | 14 ++++---- .../v2/pipeline/xgboost_sample_pipeline.yaml | 14 ++++---- .../tabs/StaticNodeDetailsV2.test.tsx | 4 +-- .../test/create_mount_delete_dynamic_pvc.yaml | 4 +-- ...ight_python_functions_v2_pipeline_rev.yaml | 4 +-- .../pipeline_with_loops_and_conditions.yaml | 26 +++++++------- .../data/test/xgboost_sample_pipeline.yaml | 14 ++++---- frontend/src/lib/v2/WorkflowUtils.test.ts | 4 +-- hack/update-requirements.sh | 2 +- .../test/snapshot/data/config_map_as_env.yaml | 2 +- .../test/snapshot/data/config_map_as_vol.yaml | 2 +- .../data/create_mount_delete_dynamic_pvc.yaml | 4 +-- .../create_mount_delete_existing_pvc.yaml | 2 +- ..._delete_existing_pvc_from_task_output.yaml | 4 +-- .../test/snapshot/data/empty_dir_mounts.yaml | 2 +- .../test/snapshot/data/field_path_as_env.yaml | 2 +- .../data/general_ephemeral_volume.yaml | 2 +- .../snapshot/data/image_pull_secrets.yaml | 2 +- .../test/snapshot/data/node_selector.yaml | 2 +- .../test/snapshot/data/secret_as_env.yaml | 2 +- .../test/snapshot/data/secret_as_vol.yaml | 2 +- .../python/test/snapshot/data/timeout.yaml | 2 +- .../python/test/snapshot/data/toleration.yaml | 2 +- .../deployment.yaml | 2 +- samples/test/metrics_visualization_v2.py | 6 ++-- sdk/python/kfp/cli/compile_test.py | 4 +-- sdk/python/kfp/cli/component_test.py | 14 ++++---- sdk/python/kfp/compiler/compiler_test.py | 16 ++++----- .../components/load_yaml_utilities_test.py | 2 +- sdk/python/kfp/dsl/component_decorator.py | 2 +- .../kfp/dsl/component_decorator_test.py | 4 +-- sdk/python/kfp/dsl/component_factory.py | 4 +-- .../dsl/container_component_decorator_test.py | 2 +- sdk/python/kfp/dsl/pipeline_task.py | 2 +- sdk/python/kfp/dsl/placeholders.py | 4 +-- sdk/python/kfp/dsl/placeholders_test.py | 6 ++-- sdk/python/kfp/dsl/structures_test.py | 4 +-- .../kfp/local/docker_task_handler_test.py | 2 +- .../test_data/components/add_numbers.yaml | 2 +- .../component_with_metadata_fields.yaml | 2 +- .../component_with_pip_install.yaml | 2 +- .../component_with_pip_install_in_venv.yaml | 2 +- .../component_with_task_final_status.yaml | 2 +- .../test_data/components/concat_message.yaml | 2 +- .../test_data/components/container_io.py | 2 +- .../test_data/components/container_io.yaml | 2 +- .../components/container_no_input.py | 2 +- .../components/container_no_input.yaml | 2 +- .../container_with_concat_placeholder.py | 2 +- .../container_with_concat_placeholder.yaml | 2 +- .../container_with_if_placeholder.py | 2 +- .../container_with_if_placeholder.yaml | 2 +- .../container_with_placeholder_in_fstring.py | 2 +- ...container_with_placeholder_in_fstring.yaml | 2 +- .../containerized_python_component.py | 2 +- .../test_data/components/dict_input.yaml | 2 +- sdk/python/test_data/components/identity.yaml | 2 +- .../test_data/components/input_artifact.yaml | 2 +- .../test_data/components/nested_return.yaml | 2 +- .../test_data/components/output_metrics.yaml | 2 +- .../test_data/components/preprocess.yaml | 2 +- .../component_with_optional_inputs.yaml | 2 +- .../component_with_pip_index_urls.yaml | 2 +- .../components_with_optional_artifacts.yaml | 4 +-- .../container_component_with_no_inputs.py | 2 +- .../container_component_with_no_inputs.yaml | 2 +- .../pipelines/cross_loop_after_topology.yaml | 16 ++++----- .../pipelines/if_elif_else_complex.yaml | 26 +++++++------- .../if_elif_else_with_oneof_parameters.yaml | 10 +++--- .../if_else_with_oneof_artifacts.yaml | 10 +++--- .../if_else_with_oneof_parameters.yaml | 8 ++--- ...lightweight_python_functions_pipeline.yaml | 4 +-- ...tweight_python_functions_with_outputs.yaml | 8 ++--- .../parallelfor_fan_in/artifacts_complex.yaml | 10 +++--- .../parallelfor_fan_in/artifacts_simple.py | 2 +- .../parallelfor_fan_in/artifacts_simple.yaml | 6 ++-- .../conditional_producer_and_consumers.yaml | 4 +-- .../nested_with_parameters.yaml | 8 ++--- .../parameters_complex.yaml | 14 ++++---- .../parallelfor_fan_in/parameters_simple.yaml | 4 +-- .../pipeline_producer_consumer.yaml | 8 ++--- .../pipelines/pipeline_as_exit_task.yaml | 8 ++--- .../pipelines/pipeline_in_pipeline.yaml | 4 +-- .../pipeline_in_pipeline_complex.yaml | 4 +-- ...pipeline_in_pipeline_loaded_from_yaml.yaml | 6 ++-- .../pipelines/pipeline_with_condition.yaml | 10 +++--- ...namic_task_output_custom_training_job.yaml | 8 ++--- ...peline_with_dynamic_importer_metadata.yaml | 2 +- ...namic_task_output_custom_training_job.yaml | 6 ++-- .../pipelines/pipeline_with_env.yaml | 2 +- .../pipelines/pipeline_with_exit_handler.yaml | 6 ++-- .../pipeline_with_google_artifact_type.yaml | 4 +-- .../pipelines/pipeline_with_importer.yaml | 4 +-- .../pipelines/pipeline_with_loops.yaml | 16 ++++----- .../pipeline_with_loops_and_conditions.yaml | 26 +++++++------- .../pipeline_with_metadata_fields.yaml | 4 +-- .../pipeline_with_metrics_outputs.yaml | 4 +-- .../pipeline_with_multiple_exit_handlers.yaml | 14 ++++---- .../pipeline_with_nested_conditions.yaml | 16 ++++----- .../pipelines/pipeline_with_nested_loops.yaml | 6 ++-- .../pipelines/pipeline_with_outputs.yaml | 4 +-- ...eline_with_parallelfor_list_artifacts.yaml | 8 ++--- ...pipeline_with_parallelfor_parallelism.yaml | 34 +++++++++---------- ...ipeline_with_params_containing_format.yaml | 6 ++-- .../pipelines/pipeline_with_placeholders.yaml | 10 +++--- .../pipelines/pipeline_with_retry.yaml | 2 +- .../pipeline_with_task_final_status.yaml | 6 ++-- .../pipeline_with_task_final_status_yaml.py | 4 +-- .../pipeline_with_task_final_status_yaml.yaml | 4 +-- ...th_task_using_ignore_upstream_failure.yaml | 4 +-- .../pythonic_artifact_with_single_return.yaml | 2 +- ...onic_artifacts_with_list_of_artifacts.yaml | 4 +-- ...honic_artifacts_with_multiple_returns.yaml | 4 +-- .../pipeline_with_task_final_status.yaml | 6 ++-- test/kfp-functional-test/README.md | 4 +-- 166 files changed, 483 insertions(+), 483 deletions(-) diff --git a/backend/Dockerfile b/backend/Dockerfile index e701644eb93..c255baa1f82 100644 --- a/backend/Dockerfile +++ b/backend/Dockerfile @@ -28,7 +28,7 @@ COPY . . RUN GO111MODULE=on go build -o /bin/apiserver backend/src/apiserver/*.go # 2. Compile preloaded pipeline samples -FROM python:3.9 as compiler +FROM public.ecr.aws/docker/library/python:3.12 as compiler RUN apt-get update -y && apt-get install --no-install-recommends -y -q default-jdk python3-setuptools python3-dev jq RUN wget https://bootstrap.pypa.io/get-pip.py && python3 get-pip.py COPY backend/requirements.txt . diff --git a/backend/metadata_writer/Dockerfile b/backend/metadata_writer/Dockerfile index 749807d3666..fb17fd9d073 100644 --- a/backend/metadata_writer/Dockerfile +++ b/backend/metadata_writer/Dockerfile @@ -1,5 +1,5 @@ # ml-metadata package depends on tensorflow package -FROM python:3.9 +FROM public.ecr.aws/docker/library/python:3.12 COPY backend/metadata_writer/requirements.txt /kfp/metadata_writer/ RUN python3 -m pip install -r /kfp/metadata_writer/requirements.txt diff --git a/backend/metadata_writer/update_requirements.sh b/backend/metadata_writer/update_requirements.sh index a907c2a9fc5..fef4ba3d7b3 100755 --- a/backend/metadata_writer/update_requirements.sh +++ b/backend/metadata_writer/update_requirements.sh @@ -1,5 +1,5 @@ #!/bin/bash # This image should be in sync with Dockerfile. -IMAGE="python:3.9" +IMAGE="public.ecr.aws/docker/library/python:3.12" ../../hack/update-requirements.sh $IMAGE requirements.txt diff --git a/backend/src/apiserver/config/testdata/sample_pipeline.yaml b/backend/src/apiserver/config/testdata/sample_pipeline.yaml index 4a0edac76e4..d95c31ecf52 100644 --- a/backend/src/apiserver/config/testdata/sample_pipeline.yaml +++ b/backend/src/apiserver/config/testdata/sample_pipeline.yaml @@ -32,7 +32,7 @@ deploymentSpec: ' - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef hello_world():\n print('hello')\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 pipelineInfo: name: pipeline-hello-world root: diff --git a/backend/src/apiserver/resource/resource_manager_test.go b/backend/src/apiserver/resource/resource_manager_test.go index e7e7eddca5e..87ab5d9ad42 100644 --- a/backend/src/apiserver/resource/resource_manager_test.go +++ b/backend/src/apiserver/resource/resource_manager_test.go @@ -3431,7 +3431,7 @@ spec: - name: ENABLE_CACHING valueFrom: fieldRef: {fieldPath: 'metadata.labels[''pipelines.kubeflow.org/enable_caching'']'} - - {name: KFP_V2_IMAGE, value: 'python:3.9'} + - {name: KFP_V2_IMAGE, value: 'public.ecr.aws/docker/library/python:3.12'} - {name: KFP_V2_RUNTIME_INFO, value: '{"inputParameters": {"some_int": {"type": "INT"}, "uri": {"type": "STRING"}}, "inputArtifacts": {}, "outputParameters": {"output_parameter_one": {"type": "INT", "path": "/tmp/outputs/output_parameter_one/data"}}, @@ -3439,7 +3439,7 @@ spec: "instanceSchema": "", "metadataPath": "/tmp/outputs/output_dataset_one/data"}}}'} envFrom: - configMapRef: {name: metadata-grpc-configmap, optional: true} - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 volumeMounts: - {mountPath: /kfp-launcher, name: kfp-launcher} inputs: @@ -3533,7 +3533,7 @@ spec: - name: ENABLE_CACHING valueFrom: fieldRef: {fieldPath: 'metadata.labels[''pipelines.kubeflow.org/enable_caching'']'} - - {name: KFP_V2_IMAGE, value: 'python:3.9'} + - {name: KFP_V2_IMAGE, value: 'public.ecr.aws/docker/library/python:3.12'} - {name: KFP_V2_RUNTIME_INFO, value: '{"inputParameters": {"num_steps": {"type": "INT"}}, "inputArtifacts": {"dataset": {"metadataPath": "/tmp/inputs/dataset/data", "schemaTitle": "system.Dataset", "instanceSchema": ""}}, "outputParameters": @@ -3541,7 +3541,7 @@ spec: "", "metadataPath": "/tmp/outputs/model/data"}}}'} envFrom: - configMapRef: {name: metadata-grpc-configmap, optional: true} - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 volumeMounts: - {mountPath: /kfp-launcher, name: kfp-launcher} inputs: @@ -4065,7 +4065,7 @@ deploymentSpec: _parsed_args = vars(_parser.parse_args()) _outputs = hello_world(**_parsed_args) - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 pipelineInfo: name: hello-world root: @@ -4098,7 +4098,7 @@ deploymentSpec: executors: exec-hello-world: container: - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 pipelineInfo: name: pipelines/p1/versions/v1 root: diff --git a/backend/src/apiserver/server/pipeline_upload_server_test.go b/backend/src/apiserver/server/pipeline_upload_server_test.go index 14264d59f5d..ba5a8e6e28a 100644 --- a/backend/src/apiserver/server/pipeline_upload_server_test.go +++ b/backend/src/apiserver/server/pipeline_upload_server_test.go @@ -628,7 +628,7 @@ deploymentSpec: _parsed_args = vars(_parser.parse_args()) _outputs = hello_world(**_parsed_args) - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 pipelineInfo: name: hello-world root: @@ -685,7 +685,7 @@ deploymentSpec: _parsed_args = vars(_parser.parse_args()) _outputs = hello_world(**_parsed_args) - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 pipelineInfo: name: hello-world root: @@ -726,7 +726,7 @@ deploymentSpec: executors: exec-hello-world: container: - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 pipelineInfo: name: hello-world- root: @@ -751,7 +751,7 @@ deploymentSpec: executors: exec-hello-world: container: - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 pipelineInfo: name: hEllo-world root: @@ -776,7 +776,7 @@ deploymentSpec: executors: exec-hello-world: container: - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 pipelineInfo: name: more than 128 characters more than 128 characters more than 128 characters more than 128 characters more than 128 characters root: @@ -801,7 +801,7 @@ deploymentSpec: executors: exec-hello-world: container: - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 pipelineInfo: name: hello-worl.d root: diff --git a/backend/src/apiserver/server/test/pipeline_with_volume.yaml b/backend/src/apiserver/server/test/pipeline_with_volume.yaml index 99c0009236b..209109e6c7c 100644 --- a/backend/src/apiserver/server/test/pipeline_with_volume.yaml +++ b/backend/src/apiserver/server/test/pipeline_with_volume.yaml @@ -66,7 +66,7 @@ deploymentSpec: ' - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef comp():\n pass\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-comp-2: container: args: @@ -92,7 +92,7 @@ deploymentSpec: ' - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef comp():\n pass\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-comp-3: container: args: @@ -118,7 +118,7 @@ deploymentSpec: ' - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef comp():\n pass\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-createpvc: container: image: argostub/createpvc diff --git a/backend/src/apiserver/server/test/v2-hello-world.json b/backend/src/apiserver/server/test/v2-hello-world.json index bff7b6b7b2a..7dcd719e114 100644 --- a/backend/src/apiserver/server/test/v2-hello-world.json +++ b/backend/src/apiserver/server/test/v2-hello-world.json @@ -22,7 +22,7 @@ "program_path=$(mktemp)\nprintf \"%s\" \"$0\" > \"$program_path\"\npython3 -u \"$program_path\" \"$@\"\n", "def hello_world(text):\n print(text)\n return text\n\nimport argparse\n_parser = argparse.ArgumentParser(prog='Hello world', description='')\n_parser.add_argument(\"--text\", dest=\"text\", type=str, required=True, default=argparse.SUPPRESS)\n_parsed_args = vars(_parser.parse_args())\n\n_outputs = hello_world(**_parsed_args)\n" ], - "image": "python:3.9" + "image": "public.ecr.aws/docker/library/python:3.12" } } } diff --git a/backend/src/apiserver/server/test/xgboost_sample_pipeline.yaml b/backend/src/apiserver/server/test/xgboost_sample_pipeline.yaml index 67354ed309c..51e4e485f10 100644 --- a/backend/src/apiserver/server/test/xgboost_sample_pipeline.yaml +++ b/backend/src/apiserver/server/test/xgboost_sample_pipeline.yaml @@ -293,7 +293,7 @@ deploymentSpec: \ try:\n os.makedirs(os.path.dirname(output_file))\n except\ \ OSError:\n pass\n with open(output_file, 'w') as f:\n \ \ f.write(_output_serializers[idx](_outputs[idx]))\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-xgboost-predict: container: args: @@ -344,7 +344,7 @@ deploymentSpec: _parser.add_argument(\"--predictions\", dest=\"predictions_path\", type=_make_parent_dirs_and_return_path,\ \ required=True, default=argparse.SUPPRESS)\n_parsed_args = vars(_parser.parse_args())\n\ \n_outputs = xgboost_predict(**_parsed_args)\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-xgboost-predict-2: container: args: @@ -398,7 +398,7 @@ deploymentSpec: predictions_path\", type=_make_parent_dirs_and_return_path, required=True,\ \ default=argparse.SUPPRESS)\n_parsed_args = vars(_parser.parse_args())\n\ \n_outputs = xgboost_predict(**_parsed_args)\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-xgboost-predict-3: container: args: @@ -452,7 +452,7 @@ deploymentSpec: predictions_path\", type=_make_parent_dirs_and_return_path, required=True,\ \ default=argparse.SUPPRESS)\n_parsed_args = vars(_parser.parse_args())\n\ \n_outputs = xgboost_predict(**_parsed_args)\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-xgboost-predict-4: container: args: @@ -503,7 +503,7 @@ deploymentSpec: _parser.add_argument(\"--predictions\", dest=\"predictions_path\", type=_make_parent_dirs_and_return_path,\ \ required=True, default=argparse.SUPPRESS)\n_parsed_args = vars(_parser.parse_args())\n\ \n_outputs = xgboost_predict(**_parsed_args)\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-xgboost-train: container: args: @@ -620,7 +620,7 @@ deploymentSpec: , dest=\"model_config_path\", type=_make_parent_dirs_and_return_path, required=True,\ \ default=argparse.SUPPRESS)\n_parsed_args = vars(_parser.parse_args())\n\ \n_outputs = xgboost_train(**_parsed_args)\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-xgboost-train-2: container: args: @@ -737,7 +737,7 @@ deploymentSpec: , dest=\"model_config_path\", type=_make_parent_dirs_and_return_path, required=True,\ \ default=argparse.SUPPRESS)\n_parsed_args = vars(_parser.parse_args())\n\ \n_outputs = xgboost_train(**_parsed_args)\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 pipelineInfo: name: xgboost-sample-pipeline root: diff --git a/backend/src/apiserver/storage/pipeline_store_test.go b/backend/src/apiserver/storage/pipeline_store_test.go index 6a04827e55b..31fcc4f081c 100644 --- a/backend/src/apiserver/storage/pipeline_store_test.go +++ b/backend/src/apiserver/storage/pipeline_store_test.go @@ -1936,7 +1936,7 @@ executors: _parsed_args = vars(_parser.parse_args()) _outputs = hello_world(**_parsed_args) - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 pipelineInfo: name: hello-world root: diff --git a/backend/src/apiserver/template/testdata/hello_world.yaml b/backend/src/apiserver/template/testdata/hello_world.yaml index 8f37ce80098..fd8ac7a1afc 100644 --- a/backend/src/apiserver/template/testdata/hello_world.yaml +++ b/backend/src/apiserver/template/testdata/hello_world.yaml @@ -31,7 +31,7 @@ deploymentSpec: _parsed_args = vars(_parser.parse_args()) _outputs = hello_world(**_parsed_args) - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 pipelineInfo: name: namespace/n1/pipeline/hello-world root: diff --git a/backend/src/apiserver/template/testdata/hello_world_schema_2_0_0.yaml b/backend/src/apiserver/template/testdata/hello_world_schema_2_0_0.yaml index ac46b6c1aa3..0fcb42956fa 100644 --- a/backend/src/apiserver/template/testdata/hello_world_schema_2_0_0.yaml +++ b/backend/src/apiserver/template/testdata/hello_world_schema_2_0_0.yaml @@ -30,7 +30,7 @@ deploymentSpec: _parsed_args = vars(_parser.parse_args()) _outputs = hello_world(**_parsed_args) - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 pipelineInfo: name: hello-world root: diff --git a/backend/src/apiserver/template/testdata/pipeline_with_volume.yaml b/backend/src/apiserver/template/testdata/pipeline_with_volume.yaml index 64247b4bbff..3bf3838a94b 100644 --- a/backend/src/apiserver/template/testdata/pipeline_with_volume.yaml +++ b/backend/src/apiserver/template/testdata/pipeline_with_volume.yaml @@ -68,7 +68,7 @@ deploymentSpec: ' - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef comp():\n pass\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-comp-2: container: args: @@ -94,7 +94,7 @@ deploymentSpec: ' - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef comp():\n pass\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-comp-3: container: args: @@ -120,7 +120,7 @@ deploymentSpec: ' - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef comp():\n pass\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-createpvc: container: image: argostub/createpvc diff --git a/backend/src/cache/server/mutation_test.go b/backend/src/cache/server/mutation_test.go index 52d92ee27c9..f90513d53ca 100644 --- a/backend/src/cache/server/mutation_test.go +++ b/backend/src/cache/server/mutation_test.go @@ -52,7 +52,7 @@ var ( Command: []string{"python"}, Env: []corev1.EnvVar{{ Name: ArgoWorkflowTemplateEnvKey, - Value: `{"name": "Does not matter","container":{"command":["echo", "Hello"],"image":"python:3.9"}}`, + Value: `{"name": "Does not matter","container":{"command":["echo", "Hello"],"image":"public.ecr.aws/docker/library/python:3.12"}}`, }}, }, }, @@ -171,7 +171,7 @@ func TestMutatePodIfCachedWithCacheEntryExist(t *testing.T) { executionCache := &model.ExecutionCache{ ExecutionCacheKey: "1933d178a14bc415466cfd1b3ca2100af975e8c59e1ff9d502fcf18eb5cbd7f7", ExecutionOutput: "testOutput", - ExecutionTemplate: `{"container":{"command":["echo", "Hello"],"image":"python:3.9"}}`, + ExecutionTemplate: `{"container":{"command":["echo", "Hello"],"image":"public.ecr.aws/docker/library/python:3.12"}}`, MaxCacheStaleness: -1, } fakeClientManager.CacheStore().CreateExecutionCache(executionCache) @@ -190,7 +190,7 @@ func TestDefaultImage(t *testing.T) { executionCache := &model.ExecutionCache{ ExecutionCacheKey: "1933d178a14bc415466cfd1b3ca2100af975e8c59e1ff9d502fcf18eb5cbd7f7", ExecutionOutput: "testOutput", - ExecutionTemplate: `{"container":{"command":["echo", "Hello"],"image":"python:3.9"}}`, + ExecutionTemplate: `{"container":{"command":["echo", "Hello"],"image":"public.ecr.aws/docker/library/python:3.12"}}`, MaxCacheStaleness: -1, } fakeClientManager.CacheStore().CreateExecutionCache(executionCache) @@ -209,7 +209,7 @@ func TestSetImage(t *testing.T) { executionCache := &model.ExecutionCache{ ExecutionCacheKey: "f5fe913be7a4516ebfe1b5de29bcb35edd12ecc776b2f33f10ca19709ea3b2f0", ExecutionOutput: "testOutput", - ExecutionTemplate: `{"container":{"command":["echo", "Hello"],"image":"python:3.9"}}`, + ExecutionTemplate: `{"container":{"command":["echo", "Hello"],"image":"public.ecr.aws/docker/library/python:3.12"}}`, MaxCacheStaleness: -1, } fakeClientManager.CacheStore().CreateExecutionCache(executionCache) @@ -226,7 +226,7 @@ func TestCacheNodeRestriction(t *testing.T) { executionCache := &model.ExecutionCache{ ExecutionCacheKey: "f5fe913be7a4516ebfe1b5de29bcb35edd12ecc776b2f33f10ca19709ea3b2f0", ExecutionOutput: "testOutput", - ExecutionTemplate: `{"container":{"command":["echo", "Hello"],"image":"python:3.9"},"nodeSelector":{"disktype":"ssd"}}`, + ExecutionTemplate: `{"container":{"command":["echo", "Hello"],"image":"public.ecr.aws/docker/library/python:3.12"},"nodeSelector":{"disktype":"ssd"}}`, MaxCacheStaleness: -1, } fakeClientManager.CacheStore().CreateExecutionCache(executionCache) @@ -241,7 +241,7 @@ func TestMutatePodIfCachedWithTeamplateCleanup(t *testing.T) { executionCache := &model.ExecutionCache{ ExecutionCacheKey: "c81988503d55a5817d79bd972017d95c37f72b024e522b4d79787d9f599c0725", ExecutionOutput: "testOutput", - ExecutionTemplate: `Cache key was calculated from this: {"container":{"command":["echo", "Hello"],"image":"python:3.9"},"outputs":"anything"}`, + ExecutionTemplate: `Cache key was calculated from this: {"container":{"command":["echo", "Hello"],"image":"public.ecr.aws/docker/library/python:3.12"},"outputs":"anything"}`, MaxCacheStaleness: -1, } fakeClientManager.CacheStore().CreateExecutionCache(executionCache) @@ -253,7 +253,7 @@ func TestMutatePodIfCachedWithTeamplateCleanup(t *testing.T) { "name": "Does not matter", "metadata": "anything", "container": { - "image": "python:3.9", + "image": "public.ecr.aws/docker/library/python:3.12", "command": ["echo", "Hello"] }, "outputs": "anything", diff --git a/backend/src/v2/cacheutils/cache_test.go b/backend/src/v2/cacheutils/cache_test.go index 68b2f684e70..89b17922ed7 100644 --- a/backend/src/v2/cacheutils/cache_test.go +++ b/backend/src/v2/cacheutils/cache_test.go @@ -92,7 +92,7 @@ func TestGenerateCacheKey(t *testing.T) { "output_parameter_two": "INT", }, cmdArgs: []string{"sh", "ec", "test"}, - image: "python:3.9", + image: "public.ecr.aws/docker/library/python:3.12", want: &cachekey.CacheKey{ InputArtifactNames: map[string]*cachekey.ArtifactNameList{ "dataset_one": {ArtifactNames: []string{"1"}}, @@ -126,7 +126,7 @@ func TestGenerateCacheKey(t *testing.T) { }, ContainerSpec: &cachekey.ContainerSpec{ CmdArgs: []string{"sh", "ec", "test"}, - Image: "python:3.9", + Image: "public.ecr.aws/docker/library/python:3.12", }, }, @@ -186,7 +186,7 @@ func TestGenerateFingerPrint(t *testing.T) { }, ContainerSpec: &cachekey.ContainerSpec{ CmdArgs: []string{"sh", "ec", "test"}, - Image: "python:3.9", + Image: "public.ecr.aws/docker/library/python:3.12", }, } tests := []struct { @@ -230,7 +230,7 @@ func TestGenerateFingerPrint(t *testing.T) { }, ContainerSpec: &cachekey.ContainerSpec{ CmdArgs: []string{"sh", "ec", "test"}, - Image: "python:3.9", + Image: "public.ecr.aws/docker/library/python:3.12", }, }, wantEqual: true, @@ -246,7 +246,7 @@ func TestGenerateFingerPrint(t *testing.T) { }, ContainerSpec: &cachekey.ContainerSpec{ CmdArgs: []string{"sh", "ec", "run"}, - Image: "python:3.9", + Image: "public.ecr.aws/docker/library/python:3.12", }, }, wantEqual: false, diff --git a/backend/src/v2/compiler/argocompiler/testdata/create_mount_delete_dynamic_pvc.yaml b/backend/src/v2/compiler/argocompiler/testdata/create_mount_delete_dynamic_pvc.yaml index c4abf4655a7..34dc6cdda87 100644 --- a/backend/src/v2/compiler/argocompiler/testdata/create_mount_delete_dynamic_pvc.yaml +++ b/backend/src/v2/compiler/argocompiler/testdata/create_mount_delete_dynamic_pvc.yaml @@ -18,7 +18,7 @@ spec: \u0026\u0026 \"$0\" \"$@\"\n","sh","-ec","program_path=$(mktemp -d) printf \"%s\" \"$0\" \u003e \"$program_path/ephemeral_component.py\" python3 -m kfp.components.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\" ","\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import - *\n\ndef comp():\n pass\n\n"],"image":"python:3.9"}' + *\n\ndef comp():\n pass\n\n"],"image":"public.ecr.aws/docker/library/python:3.12"}' - name: kubernetes-comp-comp-2 value: '{"pvcMount":[{"mountPath":"/reused_data","taskOutputParameter":{"outputParameterKey":"name","producerTask":"createpvc"}}]}' - name: components-98f254581598234b59377784d6cbf209de79e0bcda8013fe4c4397b5d3a26767 diff --git a/backend/src/v2/compiler/argocompiler/testdata/create_pod_metadata.yaml b/backend/src/v2/compiler/argocompiler/testdata/create_pod_metadata.yaml index c6b66ba4326..ad758ef0129 100644 --- a/backend/src/v2/compiler/argocompiler/testdata/create_pod_metadata.yaml +++ b/backend/src/v2/compiler/argocompiler/testdata/create_pod_metadata.yaml @@ -16,7 +16,7 @@ spec: hello_world(text):\n print(text)\n return text\n\nimport argparse\n_parser = argparse.ArgumentParser(prog=''Hello world'', description='''')\n_parser.add_argument(\"--text\", dest=\"text\", type=str, required=True, default=argparse.SUPPRESS)\n_parsed_args - = vars(_parser.parse_args())\n\n_outputs = hello_world(**_parsed_args)\n"],"image":"python:3.9"}' + = vars(_parser.parse_args())\n\n_outputs = hello_world(**_parsed_args)\n"],"image":"public.ecr.aws/docker/library/python:3.12"}' - name: components-root value: '{"dag":{"tasks":{"hello-world":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-hello-world"},"inputs":{"parameters":{"text":{"componentInputParameter":"text"}}},"taskInfo":{"name":"hello-world"}}}},"inputDefinitions":{"parameters":{"text":{"type":"STRING"}}}}' entrypoint: entrypoint diff --git a/backend/src/v2/compiler/argocompiler/testdata/hello_world.yaml b/backend/src/v2/compiler/argocompiler/testdata/hello_world.yaml index dcbee178d31..6e43eab53b5 100644 --- a/backend/src/v2/compiler/argocompiler/testdata/hello_world.yaml +++ b/backend/src/v2/compiler/argocompiler/testdata/hello_world.yaml @@ -14,7 +14,7 @@ spec: hello_world(text):\n print(text)\n return text\n\nimport argparse\n_parser = argparse.ArgumentParser(prog=''Hello world'', description='''')\n_parser.add_argument(\"--text\", dest=\"text\", type=str, required=True, default=argparse.SUPPRESS)\n_parsed_args - = vars(_parser.parse_args())\n\n_outputs = hello_world(**_parsed_args)\n"],"image":"python:3.9"}' + = vars(_parser.parse_args())\n\n_outputs = hello_world(**_parsed_args)\n"],"image":"public.ecr.aws/docker/library/python:3.12"}' - name: components-root value: '{"dag":{"tasks":{"hello-world":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-hello-world"},"inputs":{"parameters":{"text":{"componentInputParameter":"text"}}},"taskInfo":{"name":"hello-world"}}}},"inputDefinitions":{"parameters":{"text":{"type":"STRING"}}}}' entrypoint: entrypoint diff --git a/backend/src/v2/compiler/argocompiler/testdata/hello_world_log_level.yaml b/backend/src/v2/compiler/argocompiler/testdata/hello_world_log_level.yaml index 1aa484f7cda..d489d318513 100644 --- a/backend/src/v2/compiler/argocompiler/testdata/hello_world_log_level.yaml +++ b/backend/src/v2/compiler/argocompiler/testdata/hello_world_log_level.yaml @@ -14,7 +14,7 @@ spec: hello_world(text):\n print(text)\n return text\n\nimport argparse\n_parser = argparse.ArgumentParser(prog=''Hello world'', description='''')\n_parser.add_argument(\"--text\", dest=\"text\", type=str, required=True, default=argparse.SUPPRESS)\n_parsed_args - = vars(_parser.parse_args())\n\n_outputs = hello_world(**_parsed_args)\n"],"image":"python:3.9"}' + = vars(_parser.parse_args())\n\n_outputs = hello_world(**_parsed_args)\n"],"image":"public.ecr.aws/docker/library/python:3.12"}' - name: components-root value: '{"dag":{"tasks":{"hello-world":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-hello-world"},"inputs":{"parameters":{"text":{"componentInputParameter":"text"}}},"taskInfo":{"name":"hello-world"}}}},"inputDefinitions":{"parameters":{"text":{"type":"STRING"}}}}' entrypoint: entrypoint diff --git a/backend/src/v2/compiler/argocompiler/testdata/hello_world_run_as_user.yaml b/backend/src/v2/compiler/argocompiler/testdata/hello_world_run_as_user.yaml index 437c2ed85e8..292a3019ef6 100644 --- a/backend/src/v2/compiler/argocompiler/testdata/hello_world_run_as_user.yaml +++ b/backend/src/v2/compiler/argocompiler/testdata/hello_world_run_as_user.yaml @@ -14,7 +14,7 @@ spec: hello_world(text):\n print(text)\n return text\n\nimport argparse\n_parser = argparse.ArgumentParser(prog=''Hello world'', description='''')\n_parser.add_argument(\"--text\", dest=\"text\", type=str, required=True, default=argparse.SUPPRESS)\n_parsed_args - = vars(_parser.parse_args())\n\n_outputs = hello_world(**_parsed_args)\n"],"image":"python:3.9"}' + = vars(_parser.parse_args())\n\n_outputs = hello_world(**_parsed_args)\n"],"image":"public.ecr.aws/docker/library/python:3.12"}' - name: components-root value: '{"dag":{"tasks":{"hello-world":{"cachingOptions":{"enableCache":true},"componentRef":{"name":"comp-hello-world"},"inputs":{"parameters":{"text":{"componentInputParameter":"text"}}},"taskInfo":{"name":"hello-world"}}}},"inputDefinitions":{"parameters":{"text":{"type":"STRING"}}}}' entrypoint: entrypoint diff --git a/backend/src/v2/compiler/tektoncompiler/testdata/condition_ir.yaml b/backend/src/v2/compiler/tektoncompiler/testdata/condition_ir.yaml index 143d178fac8..15e92341173 100644 --- a/backend/src/v2/compiler/tektoncompiler/testdata/condition_ir.yaml +++ b/backend/src/v2/compiler/tektoncompiler/testdata/condition_ir.yaml @@ -114,7 +114,7 @@ pipelineSpec: \ a coin and output heads or tails randomly.\"\"\"\n if force_flip_result:\n\ \ return force_flip_result\n import random\n result = 'heads'\ \ if random.randint(0, 1) == 0 else 'tails'\n return result\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-flip-coin-2: container: args: @@ -143,7 +143,7 @@ pipelineSpec: \ a coin and output heads or tails randomly.\"\"\"\n if force_flip_result:\n\ \ return force_flip_result\n import random\n result = 'heads'\ \ if random.randint(0, 1) == 0 else 'tails'\n return result\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-msg: container: args: @@ -170,7 +170,7 @@ pipelineSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_msg(msg: str):\n \"\"\"Print a message.\"\"\"\n print(msg)\n\ \n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-msg-2: container: args: @@ -197,7 +197,7 @@ pipelineSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_msg(msg: str):\n \"\"\"Print a message.\"\"\"\n print(msg)\n\ \n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-msg-3: container: args: @@ -224,7 +224,7 @@ pipelineSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_msg(msg: str):\n \"\"\"Print a message.\"\"\"\n print(msg)\n\ \n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 pipelineInfo: name: condition-v2 root: diff --git a/backend/src/v2/compiler/tektoncompiler/testdata/exit_handler.yaml b/backend/src/v2/compiler/tektoncompiler/testdata/exit_handler.yaml index 30f68e10b7a..2e054d03712 100755 --- a/backend/src/v2/compiler/tektoncompiler/testdata/exit_handler.yaml +++ b/backend/src/v2/compiler/tektoncompiler/testdata/exit_handler.yaml @@ -49,7 +49,7 @@ spec: -d)\nprintf \"%s\" \"$0\" \u003e \"$program_path/ephemeral_component.py\"\npython3 -m kfp.components.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import - *\n\ndef print_op(message: str):\n \"\"\"Prints a message.\"\"\"\n print(message)\n\n"],"image":"python:3.9"}' + *\n\ndef print_op(message: str):\n \"\"\"Prints a message.\"\"\"\n print(message)\n\n"],"image":"public.ecr.aws/docker/library/python:3.12"}' - name: iteration-index value: "" - name: kubernetes-config @@ -166,7 +166,7 @@ spec: - configMapRef: name: metadata-grpc-configmap optional: true - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 name: user-main - name: root-system-dag-pub-driver params: @@ -210,7 +210,7 @@ spec: \"%s\" \"$0\" \u003e \"$program_path/ephemeral_component.py\"\npython3 -m kfp.components.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef - fail_op(message: str):\n \"\"\"Fails.\"\"\"\n import sys\n print(message)\n sys.exit(1)\n\n"],"image":"python:3.9"}' + fail_op(message: str):\n \"\"\"Fails.\"\"\"\n import sys\n print(message)\n sys.exit(1)\n\n"],"image":"public.ecr.aws/docker/library/python:3.12"}' - name: iteration-index value: "" - name: kubernetes-config @@ -329,7 +329,7 @@ spec: - configMapRef: name: metadata-grpc-configmap optional: true - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 name: user-main - name: print-op-2 params: @@ -352,7 +352,7 @@ spec: \"%s\" \"$0\" \u003e \"$program_path/ephemeral_component.py\"\npython3 -m kfp.components.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef - print_op(message: str):\n \"\"\"Prints a message.\"\"\"\n print(message)\n\n"],"image":"python:3.9"}' + print_op(message: str):\n \"\"\"Prints a message.\"\"\"\n print(message)\n\n"],"image":"public.ecr.aws/docker/library/python:3.12"}' - name: iteration-index value: "" - name: kubernetes-config @@ -469,7 +469,7 @@ spec: - configMapRef: name: metadata-grpc-configmap optional: true - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 name: user-main - name: exit-handler-1-dag-driver params: diff --git a/backend/src/v2/compiler/tektoncompiler/testdata/exit_handler_ir.yaml b/backend/src/v2/compiler/tektoncompiler/testdata/exit_handler_ir.yaml index 45aeae7cc76..7c4e038071a 100644 --- a/backend/src/v2/compiler/tektoncompiler/testdata/exit_handler_ir.yaml +++ b/backend/src/v2/compiler/tektoncompiler/testdata/exit_handler_ir.yaml @@ -76,7 +76,7 @@ pipelineSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef fail_op(message: str):\n \"\"\"Fails.\"\"\"\n import sys\n\ \ print(message)\n sys.exit(1)\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-op: container: args: @@ -103,7 +103,7 @@ pipelineSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_op(message: str):\n \"\"\"Prints a message.\"\"\"\n\ \ print(message)\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-op-2: container: args: @@ -130,7 +130,7 @@ pipelineSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_op(message: str):\n \"\"\"Prints a message.\"\"\"\n\ \ print(message)\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 pipelineInfo: name: pipeline-with-exit-handler root: diff --git a/backend/src/v2/compiler/tektoncompiler/testdata/hello_world.yaml b/backend/src/v2/compiler/tektoncompiler/testdata/hello_world.yaml index bd0ab351ce5..92e6d415110 100644 --- a/backend/src/v2/compiler/tektoncompiler/testdata/hello_world.yaml +++ b/backend/src/v2/compiler/tektoncompiler/testdata/hello_world.yaml @@ -31,7 +31,7 @@ spec: hello_world(text):\n print(text)\n return text\n\nimport argparse\n_parser = argparse.ArgumentParser(prog=''Hello world'', description='''')\n_parser.add_argument(\"--text\", dest=\"text\", type=str, required=True, default=argparse.SUPPRESS)\n_parsed_args - = vars(_parser.parse_args())\n\n_outputs = hello_world(**_parsed_args)\n"],"image":"python:3.9"}' + = vars(_parser.parse_args())\n\n_outputs = hello_world(**_parsed_args)\n"],"image":"public.ecr.aws/docker/library/python:3.12"}' - name: iteration-index value: "" - name: kubernetes-config @@ -137,7 +137,7 @@ spec: - configMapRef: name: metadata-grpc-configmap optional: true - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 name: user-main - name: root-system-dag-driver params: diff --git a/backend/src/v2/compiler/tektoncompiler/testdata/loop_static.yaml b/backend/src/v2/compiler/tektoncompiler/testdata/loop_static.yaml index 30b35248cd5..b2dcc7a910c 100644 --- a/backend/src/v2/compiler/tektoncompiler/testdata/loop_static.yaml +++ b/backend/src/v2/compiler/tektoncompiler/testdata/loop_static.yaml @@ -78,7 +78,7 @@ spec: -m kfp.components.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef concat_op(a: str, b: str) -\u003e str:\n print(a + b)\n return - a + b\n\n"],"image":"python:3.9"}' + a + b\n\n"],"image":"public.ecr.aws/docker/library/python:3.12"}' - name: iteration-index value: "" - name: kubernetes-config @@ -195,7 +195,7 @@ spec: - configMapRef: name: metadata-grpc-configmap optional: true - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 name: user-main - name: print-op-2 params: @@ -219,7 +219,7 @@ spec: -m kfp.components.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef print_op(text: str) -\u003e str:\n print(text)\n return - text\n\n"],"image":"python:3.9"}' + text\n\n"],"image":"public.ecr.aws/docker/library/python:3.12"}' - name: iteration-index value: "" - name: kubernetes-config @@ -338,7 +338,7 @@ spec: - configMapRef: name: metadata-grpc-configmap optional: true - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 name: user-main - name: for-loop-2-dag-driver params: @@ -406,7 +406,7 @@ spec: \"%s\" \"$0\" \u003e \"$program_path/ephemeral_component.py\"\npython3 -m kfp.components.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef - print_op(text: str) -\u003e str:\n print(text)\n return text\n\n"],"image":"python:3.9"}' + print_op(text: str) -\u003e str:\n print(text)\n return text\n\n"],"image":"public.ecr.aws/docker/library/python:3.12"}' - name: iteration-index value: "" - name: kubernetes-config @@ -523,7 +523,7 @@ spec: - configMapRef: name: metadata-grpc-configmap optional: true - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 name: user-main - name: root-system-dag-driver params: diff --git a/backend/src/v2/compiler/tektoncompiler/testdata/loop_static_ir.yaml b/backend/src/v2/compiler/tektoncompiler/testdata/loop_static_ir.yaml index 13d9b22e388..065f39ced62 100644 --- a/backend/src/v2/compiler/tektoncompiler/testdata/loop_static_ir.yaml +++ b/backend/src/v2/compiler/tektoncompiler/testdata/loop_static_ir.yaml @@ -101,7 +101,7 @@ pipelineSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef concat_op(a: str, b: str) -> str:\n print(a + b)\n return\ \ a + b\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-op: container: args: @@ -128,7 +128,7 @@ pipelineSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_op(text: str) -> str:\n print(text)\n return text\n\ \n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-op-2: container: args: @@ -155,7 +155,7 @@ pipelineSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_op(text: str) -> str:\n print(text)\n return text\n\ \n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 pipelineInfo: name: pipeline-with-loop-static root: diff --git a/backend/src/v2/compiler/tektoncompiler/testdata/mnist_pipeline.yaml b/backend/src/v2/compiler/tektoncompiler/testdata/mnist_pipeline.yaml index 559d460b1a2..20987a6d0de 100755 --- a/backend/src/v2/compiler/tektoncompiler/testdata/mnist_pipeline.yaml +++ b/backend/src/v2/compiler/tektoncompiler/testdata/mnist_pipeline.yaml @@ -37,7 +37,7 @@ spec: convert_experiment_spec_to_str(experiment_spec_json: Dict[str, str])-\u003e NamedTuple(''Outputs'', [(''experiment_spec_str_output'', str)]):\n import json\n output = NamedTuple(''Outputs'', [(''experiment_spec_str_output'', - str)])\n return output(json.dumps(experiment_spec_json))\n\n"],"image":"python:3.9"}' + str)])\n return output(json.dumps(experiment_spec_json))\n\n"],"image":"public.ecr.aws/docker/library/python:3.12"}' - name: iteration-index value: "" - name: kubernetes-config @@ -157,7 +157,7 @@ spec: - configMapRef: name: metadata-grpc-configmap optional: true - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 name: user-main - name: convert-katib-results params: @@ -188,7 +188,7 @@ spec: pa[\"name\"] == \"learning_rate\":\n best_hps.append(\"--tf-learning-rate=\" + pa[\"value\"])\n elif pa[\"name\"] == \"batch_size\":\n best_hps.append(\"--tf-batch-size=\" + pa[\"value\"])\n print(\"Best Hyperparameters: {}\".format(best_hps))\n return - \" \".join(best_hps)\n\n"],"image":"python:3.9"}' + \" \".join(best_hps)\n\n"],"image":"public.ecr.aws/docker/library/python:3.12"}' - name: iteration-index value: "" - name: kubernetes-config @@ -321,7 +321,7 @@ spec: - configMapRef: name: metadata-grpc-configmap optional: true - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 name: user-main - name: create-dataset params: @@ -501,7 +501,7 @@ spec: Create an Experiment from the above parameters.\n experiment_spec = V1beta1ExperimentSpec(\n max_trial_count=max_trial_count,\n max_failed_trial_count=max_failed_trial_count,\n parallel_trial_count=parallel_trial_count,\n objective=objective,\n algorithm=algorithm,\n parameters=parameters,\n trial_template=trial_template\n )\n\n # Convert experiment_spec to Dict type.\n experiment_spec_json = ApiClient().sanitize_for_serialization(experiment_spec)\n output = NamedTuple(''Outputs'', [(''experiment_spec_json'', Dict[str, str])])\n return - output(experiment_spec_json)\n\n"],"image":"python:3.9"}' + output(experiment_spec_json)\n\n"],"image":"public.ecr.aws/docker/library/python:3.12"}' - name: iteration-index value: "" - name: kubernetes-config @@ -761,7 +761,7 @@ spec: - configMapRef: name: metadata-grpc-configmap optional: true - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 name: user-main - name: create-tfjob-task params: @@ -812,7 +812,7 @@ spec: [\n \"python /opt/model.py --tf-export-dir=/mnt/export --tf-train-steps={} {}\".format(training_steps, best_hps) \n ],\n }\n ],\n }\n }\n }\n\n output = NamedTuple(''Outputs'', [(''chief_spec'', Dict[str, str]), (''worker_spec'', - Dict[str, str])])\n return output(tfjob_chief_spec, tfjob_worker_spec)\n\n"],"image":"python:3.9"}' + Dict[str, str])])\n return output(tfjob_chief_spec, tfjob_worker_spec)\n\n"],"image":"public.ecr.aws/docker/library/python:3.12"}' - name: iteration-index value: "" - name: kubernetes-config @@ -962,7 +962,7 @@ spec: - configMapRef: name: metadata-grpc-configmap optional: true - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 name: user-main - name: convert-inference-service-to-artifact params: @@ -987,7 +987,7 @@ spec: kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef convert_inference_service_to_artifact(inferenceservice_yaml: Dict[str, str], inferenceservice_artifact: Output[Artifact]):\n import json\n with - open(inferenceservice_artifact.path, ''w'') as f:\n f.write(json.dumps(inferenceservice_yaml))\n\n"],"image":"python:3.9"}' + open(inferenceservice_artifact.path, ''w'') as f:\n f.write(json.dumps(inferenceservice_yaml))\n\n"],"image":"public.ecr.aws/docker/library/python:3.12"}' - name: iteration-index value: "" - name: kubernetes-config @@ -1107,7 +1107,7 @@ spec: - configMapRef: name: metadata-grpc-configmap optional: true - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 name: user-main - name: create-serving-task params: @@ -1138,7 +1138,7 @@ spec: {\n \"sidecar.istio.io/inject\": \"false\"\n }\n },\n \"spec\":{\n \"predictor\":{\n \"tensorflow\": {\n \"storageUri\": \"pvc://{}/\".format(model_volume_name)\n }\n }\n }\n }\n\n output = NamedTuple(''Outputs'', [(''inferenceservice_yaml'', Dict[str, str])])\n return - output(inference_service)\n\n"],"image":"python:3.9"}' + output(inference_service)\n\n"],"image":"public.ecr.aws/docker/library/python:3.12"}' - name: iteration-index value: "" - name: kubernetes-config @@ -1277,7 +1277,7 @@ spec: - configMapRef: name: metadata-grpc-configmap optional: true - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 name: user-main - name: serving-launcher params: diff --git a/backend/src/v2/compiler/tektoncompiler/testdata/mnist_pipeline_ir.yaml b/backend/src/v2/compiler/tektoncompiler/testdata/mnist_pipeline_ir.yaml index be0bb9bd097..d69762e54ad 100644 --- a/backend/src/v2/compiler/tektoncompiler/testdata/mnist_pipeline_ir.yaml +++ b/backend/src/v2/compiler/tektoncompiler/testdata/mnist_pipeline_ir.yaml @@ -224,7 +224,7 @@ pipelineSpec: \ str])-> NamedTuple('Outputs', [('experiment_spec_str_output', str)]):\n\ \ import json\n output = NamedTuple('Outputs', [('experiment_spec_str_output',\ \ str)])\n return output(json.dumps(experiment_spec_json))\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-convert-inference-service-to-artifact: container: args: @@ -253,7 +253,7 @@ pipelineSpec: \ Dict[str, str], inferenceservice_artifact: Output[Artifact]):\n import\ \ json\n with open(inferenceservice_artifact.path, 'w') as f:\n \ \ f.write(json.dumps(inferenceservice_yaml))\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-convert-katib-results: container: args: @@ -289,7 +289,7 @@ pipelineSpec: batch_size\":\n best_hps.append(\"--tf-batch-size=\" + pa[\"\ value\"])\n print(\"Best Hyperparameters: {}\".format(best_hps))\n \ \ return \" \".join(best_hps)\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-create-dataset: container: args: @@ -409,7 +409,7 @@ pipelineSpec: \ = ApiClient().sanitize_for_serialization(experiment_spec)\n output\ \ = NamedTuple('Outputs', [('experiment_spec_json', Dict[str, str])])\n\ \ return output(experiment_spec_json)\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-create-serving-task: container: args: @@ -446,7 +446,7 @@ pipelineSpec: .format(model_volume_name)\n }\n }\n }\n }\n\ \n output = NamedTuple('Outputs', [('inferenceservice_yaml', Dict[str,\ \ str])])\n return output(inference_service)\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-create-tfjob-task: container: args: @@ -513,7 +513,7 @@ pipelineSpec: \ ],\n }\n }\n }\n\n output = NamedTuple('Outputs',\ \ [('chief_spec', Dict[str, str]), ('worker_spec', Dict[str, str])])\n \ \ return output(tfjob_chief_spec, tfjob_worker_spec)\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-serving-launcher: container: args: diff --git a/backend/src/v2/compiler/tektoncompiler/testdata/nestedloop.yaml b/backend/src/v2/compiler/tektoncompiler/testdata/nestedloop.yaml index 877b86d5cb6..d103ba459a7 100644 --- a/backend/src/v2/compiler/tektoncompiler/testdata/nestedloop.yaml +++ b/backend/src/v2/compiler/tektoncompiler/testdata/nestedloop.yaml @@ -36,7 +36,7 @@ spec: kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef flip_coin_op() -\u003e str:\n \"\"\"Flip a coin and output heads or tails randomly.\"\"\"\n import random\n result = random.choice([''heads'', - ''tails''])\n print(result)\n return result\n\n"],"image":"python:3.9"}' + ''tails''])\n print(result)\n return result\n\n"],"image":"public.ecr.aws/docker/library/python:3.12"}' - name: iteration-index value: "" - name: kubernetes-config @@ -156,7 +156,7 @@ spec: - configMapRef: name: metadata-grpc-configmap optional: true - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 name: user-main - name: for-loop-2-pipelineloop params: @@ -228,7 +228,7 @@ spec: -d)\nprintf \"%s\" \"$0\" \u003e \"$program_path/ephemeral_component.py\"\npython3 -m kfp.components.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import - *\n\ndef print_op(message: str):\n \"\"\"Print a message.\"\"\"\n print(message)\n\n"],"image":"python:3.9"}' + *\n\ndef print_op(message: str):\n \"\"\"Print a message.\"\"\"\n print(message)\n\n"],"image":"public.ecr.aws/docker/library/python:3.12"}' - name: iteration-index value: "" - name: kubernetes-config @@ -345,7 +345,7 @@ spec: - configMapRef: name: metadata-grpc-configmap optional: true - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 name: user-main when: - input: $(tasks.condition-4-dag-driver.results.condition) @@ -424,7 +424,7 @@ spec: -d)\nprintf \"%s\" \"$0\" \u003e \"$program_path/ephemeral_component.py\"\npython3 -m kfp.components.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n","\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import - *\n\ndef print_op(message: str):\n \"\"\"Print a message.\"\"\"\n print(message)\n\n"],"image":"python:3.9"}' + *\n\ndef print_op(message: str):\n \"\"\"Print a message.\"\"\"\n print(message)\n\n"],"image":"public.ecr.aws/docker/library/python:3.12"}' - name: iteration-index value: "" - name: kubernetes-config @@ -541,7 +541,7 @@ spec: - configMapRef: name: metadata-grpc-configmap optional: true - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 name: user-main when: - input: $(tasks.condition-5-dag-driver.results.condition) @@ -621,7 +621,7 @@ spec: *\n\ndef get_random_int_op(minimum: int, maximum: int) -\u003e int:\n \"\"\"Generate a random number between minimum and maximum (inclusive).\"\"\"\n import random\n result = random.randint(minimum, maximum)\n print(result)\n return - result\n\n"],"image":"python:3.9"}' + result\n\n"],"image":"public.ecr.aws/docker/library/python:3.12"}' - name: iteration-index value: "" - name: kubernetes-config @@ -741,7 +741,7 @@ spec: - configMapRef: name: metadata-grpc-configmap optional: true - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 name: user-main when: - input: $(tasks.condition-3-dag-driver.results.condition) diff --git a/backend/src/v2/compiler/tektoncompiler/testdata/nestedloop_ir.yaml b/backend/src/v2/compiler/tektoncompiler/testdata/nestedloop_ir.yaml index 94a4b33a478..942a69cbd89 100644 --- a/backend/src/v2/compiler/tektoncompiler/testdata/nestedloop_ir.yaml +++ b/backend/src/v2/compiler/tektoncompiler/testdata/nestedloop_ir.yaml @@ -204,7 +204,7 @@ pipelineSpec: \ *\n\ndef flip_coin_op() -> str:\n \"\"\"Flip a coin and output heads\ \ or tails randomly.\"\"\"\n import random\n result = random.choice(['heads',\ \ 'tails'])\n print(result)\n return result\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-get-random-int-op: container: args: @@ -233,7 +233,7 @@ pipelineSpec: \"\"Generate a random number between minimum and maximum (inclusive).\"\"\ \"\n import random\n result = random.randint(minimum, maximum)\n \ \ print(result)\n return result\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-op: container: args: @@ -260,7 +260,7 @@ pipelineSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_op(message: str):\n \"\"\"Print a message.\"\"\"\n \ \ print(message)\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-op-2: container: args: @@ -287,7 +287,7 @@ pipelineSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_op(message: str):\n \"\"\"Print a message.\"\"\"\n \ \ print(message)\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 pipelineInfo: description: Shows how to use dsl.Condition(), dsl.ParallelFor, and dsl.ExitHandler(). name: tutorial-control-flows diff --git a/backend/src/v2/compiler/tektoncompiler/testdata/pod_metadata.yaml b/backend/src/v2/compiler/tektoncompiler/testdata/pod_metadata.yaml index 1dfeb2012ea..2ab5c45291d 100644 --- a/backend/src/v2/compiler/tektoncompiler/testdata/pod_metadata.yaml +++ b/backend/src/v2/compiler/tektoncompiler/testdata/pod_metadata.yaml @@ -31,7 +31,7 @@ spec: hello_world(text):\n print(text)\n return text\n\nimport argparse\n_parser = argparse.ArgumentParser(prog=''Hello world'', description='''')\n_parser.add_argument(\"--text\", dest=\"text\", type=str, required=True, default=argparse.SUPPRESS)\n_parsed_args - = vars(_parser.parse_args())\n\n_outputs = hello_world(**_parsed_args)\n"],"image":"python:3.9"}' + = vars(_parser.parse_args())\n\n_outputs = hello_world(**_parsed_args)\n"],"image":"public.ecr.aws/docker/library/python:3.12"}' - name: iteration-index value: "" - name: kubernetes-config @@ -137,7 +137,7 @@ spec: - configMapRef: name: metadata-grpc-configmap optional: true - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 name: user-main - name: root-system-dag-driver params: diff --git a/backend/src/v2/compiler/testdata/component_used_twice.json b/backend/src/v2/compiler/testdata/component_used_twice.json index 3fa73594297..af690d757ce 100644 --- a/backend/src/v2/compiler/testdata/component_used_twice.json +++ b/backend/src/v2/compiler/testdata/component_used_twice.json @@ -23,7 +23,7 @@ "program_path=$(mktemp)\nprintf \"%s\" \"$0\" > \"$program_path\"\npython3 -u \"$program_path\" \"$@\"\n", "def hello_world(text):\n print(text)\n return text\n\nimport argparse\n_parser = argparse.ArgumentParser(prog='Hello world', description='')\n_parser.add_argument(\"--text\", dest=\"text\", type=str, required=True, default=argparse.SUPPRESS)\n_parsed_args = vars(_parser.parse_args())\n\n_outputs = hello_world(**_parsed_args)\n" ], - "image": "python:3.9" + "image": "public.ecr.aws/docker/library/python:3.12" } } } diff --git a/backend/src/v2/compiler/testdata/create_mount_delete_dynamic_pvc.json b/backend/src/v2/compiler/testdata/create_mount_delete_dynamic_pvc.json index 99b7a7c5db5..c69ce6f8d00 100644 --- a/backend/src/v2/compiler/testdata/create_mount_delete_dynamic_pvc.json +++ b/backend/src/v2/compiler/testdata/create_mount_delete_dynamic_pvc.json @@ -73,7 +73,7 @@ "program_path=$(mktemp -d) printf \"%s\" \"$0\" > \"$program_path/ephemeral_component.py\" python3 -m kfp.components.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\" ", "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef comp():\n pass\n\n" ], - "image": "python:3.9" + "image": "public.ecr.aws/docker/library/python:3.12" } }, "exec-comp-2": { @@ -88,7 +88,7 @@ "program_path=$(mktemp -d) printf \"%s\" \"$0\" > \"$program_path/ephemeral_component.py\" python3 -m kfp.components.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\" ", "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef comp():\n pass\n\n" ], - "image": "python:3.9" + "image": "public.ecr.aws/docker/library/python:3.12" } }, "exec-createpvc": { diff --git a/backend/src/v2/compiler/testdata/hello_world.json b/backend/src/v2/compiler/testdata/hello_world.json index 5e41e48315c..4a6a4abb7a7 100644 --- a/backend/src/v2/compiler/testdata/hello_world.json +++ b/backend/src/v2/compiler/testdata/hello_world.json @@ -26,7 +26,7 @@ "program_path=$(mktemp)\nprintf \"%s\" \"$0\" > \"$program_path\"\npython3 -u \"$program_path\" \"$@\"\n", "def hello_world(text):\n print(text)\n return text\n\nimport argparse\n_parser = argparse.ArgumentParser(prog='Hello world', description='')\n_parser.add_argument(\"--text\", dest=\"text\", type=str, required=True, default=argparse.SUPPRESS)\n_parsed_args = vars(_parser.parse_args())\n\n_outputs = hello_world(**_parsed_args)\n" ], - "image": "python:3.9" + "image": "public.ecr.aws/docker/library/python:3.12" } } } diff --git a/backend/src/v2/driver/driver_test.go b/backend/src/v2/driver/driver_test.go index 3021abfee47..8d4b178a829 100644 --- a/backend/src/v2/driver/driver_test.go +++ b/backend/src/v2/driver/driver_test.go @@ -52,7 +52,7 @@ func Test_initPodSpecPatch_acceleratorConfig(t *testing.T) { "Valid - nvidia.com/gpu", args{ &pipelinespec.PipelineDeploymentConfig_PipelineContainerSpec{ - Image: "python:3.9", + Image: "public.ecr.aws/docker/library/python:3.12", Args: []string{"--function_to_execute", "add"}, Command: []string{"sh", "-ec", "python3 -m kfp.components.executor_main"}, Resources: &pipelinespec.PipelineDeploymentConfig_PipelineContainerSpec_ResourceSpec{ @@ -92,7 +92,7 @@ func Test_initPodSpecPatch_acceleratorConfig(t *testing.T) { "Valid - amd.com/gpu", args{ &pipelinespec.PipelineDeploymentConfig_PipelineContainerSpec{ - Image: "python:3.9", + Image: "public.ecr.aws/docker/library/python:3.12", Args: []string{"--function_to_execute", "add"}, Command: []string{"sh", "-ec", "python3 -m kfp.components.executor_main"}, Resources: &pipelinespec.PipelineDeploymentConfig_PipelineContainerSpec_ResourceSpec{ @@ -132,7 +132,7 @@ func Test_initPodSpecPatch_acceleratorConfig(t *testing.T) { "Valid - cloud-tpus.google.com/v3", args{ &pipelinespec.PipelineDeploymentConfig_PipelineContainerSpec{ - Image: "python:3.9", + Image: "public.ecr.aws/docker/library/python:3.12", Args: []string{"--function_to_execute", "add"}, Command: []string{"sh", "-ec", "python3 -m kfp.components.executor_main"}, Resources: &pipelinespec.PipelineDeploymentConfig_PipelineContainerSpec_ResourceSpec{ @@ -172,7 +172,7 @@ func Test_initPodSpecPatch_acceleratorConfig(t *testing.T) { "Valid - cloud-tpus.google.com/v2", args{ &pipelinespec.PipelineDeploymentConfig_PipelineContainerSpec{ - Image: "python:3.9", + Image: "public.ecr.aws/docker/library/python:3.12", Args: []string{"--function_to_execute", "add"}, Command: []string{"sh", "-ec", "python3 -m kfp.components.executor_main"}, Resources: &pipelinespec.PipelineDeploymentConfig_PipelineContainerSpec_ResourceSpec{ @@ -212,7 +212,7 @@ func Test_initPodSpecPatch_acceleratorConfig(t *testing.T) { "Valid - custom string", args{ &pipelinespec.PipelineDeploymentConfig_PipelineContainerSpec{ - Image: "python:3.9", + Image: "public.ecr.aws/docker/library/python:3.12", Args: []string{"--function_to_execute", "add"}, Command: []string{"sh", "-ec", "python3 -m kfp.components.executor_main"}, Resources: &pipelinespec.PipelineDeploymentConfig_PipelineContainerSpec_ResourceSpec{ @@ -268,7 +268,7 @@ func Test_initPodSpecPatch_acceleratorConfig(t *testing.T) { func Test_initPodSpecPatch_resource_placeholders(t *testing.T) { containerSpec := &pipelinespec.PipelineDeploymentConfig_PipelineContainerSpec{ - Image: "python:3.9", + Image: "public.ecr.aws/docker/library/python:3.12", Args: []string{"--function_to_execute", "add"}, Command: []string{"sh", "-ec", "python3 -m kfp.components.executor_main"}, Resources: &pipelinespec.PipelineDeploymentConfig_PipelineContainerSpec_ResourceSpec{ @@ -366,7 +366,7 @@ func Test_initPodSpecPatch_resource_placeholders(t *testing.T) { func Test_initPodSpecPatch_legacy_resources(t *testing.T) { containerSpec := &pipelinespec.PipelineDeploymentConfig_PipelineContainerSpec{ - Image: "python:3.9", + Image: "public.ecr.aws/docker/library/python:3.12", Args: []string{"--function_to_execute", "add"}, Command: []string{"sh", "-ec", "python3 -m kfp.components.executor_main"}, Resources: &pipelinespec.PipelineDeploymentConfig_PipelineContainerSpec_ResourceSpec{ @@ -399,7 +399,7 @@ func Test_initPodSpecPatch_legacy_resources(t *testing.T) { func Test_initPodSpecPatch_modelcar_input_artifact(t *testing.T) { containerSpec := &pipelinespec.PipelineDeploymentConfig_PipelineContainerSpec{ - Image: "python:3.9", + Image: "public.ecr.aws/docker/library/python:3.12", Args: []string{"--function_to_execute", "add"}, Command: []string{"sh", "-ec", "python3 -m kfp.components.executor_main"}, } @@ -522,7 +522,7 @@ func Test_initPodSpecPatch_resourceRequests(t *testing.T) { "Valid - with requests", args{ &pipelinespec.PipelineDeploymentConfig_PipelineContainerSpec{ - Image: "python:3.9", + Image: "public.ecr.aws/docker/library/python:3.12", Args: []string{"--function_to_execute", "add"}, Command: []string{"sh", "-ec", "python3 -m kfp.components.executor_main"}, Resources: &pipelinespec.PipelineDeploymentConfig_PipelineContainerSpec_ResourceSpec{ @@ -559,7 +559,7 @@ func Test_initPodSpecPatch_resourceRequests(t *testing.T) { "Valid - zero requests", args{ &pipelinespec.PipelineDeploymentConfig_PipelineContainerSpec{ - Image: "python:3.9", + Image: "public.ecr.aws/docker/library/python:3.12", Args: []string{"--function_to_execute", "add"}, Command: []string{"sh", "-ec", "python3 -m kfp.components.executor_main"}, Resources: &pipelinespec.PipelineDeploymentConfig_PipelineContainerSpec_ResourceSpec{ diff --git a/backend/src/v2/test/Dockerfile b/backend/src/v2/test/Dockerfile index caf7acc9c6a..b8f8ad9110b 100644 --- a/backend/src/v2/test/Dockerfile +++ b/backend/src/v2/test/Dockerfile @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -FROM python:3.9-slim +FROM public.ecr.aws/docker/library/python:3.12-slim WORKDIR /workdir COPY backend/src/v2/test/requirements.txt backend/src/v2/test/ diff --git a/backend/src/v2/test/components/run_sample.yaml b/backend/src/v2/test/components/run_sample.yaml index ceb5f340208..c609ad7fda3 100644 --- a/backend/src/v2/test/components/run_sample.yaml +++ b/backend/src/v2/test/components/run_sample.yaml @@ -24,7 +24,7 @@ inputs: - {name: backend_compiler, type: Binary} implementation: container: - image: python:3.9-alpine + image: public.ecr.aws/docker/library/python:3.12-alpine command: - sh - -exc diff --git a/backend/test/resources/v2-hello-world.yaml b/backend/test/resources/v2-hello-world.yaml index 203c205f26b..752e0f9fd12 100644 --- a/backend/test/resources/v2-hello-world.yaml +++ b/backend/test/resources/v2-hello-world.yaml @@ -30,7 +30,7 @@ deploymentSpec: _parsed_args = vars(_parser.parse_args()) _outputs = hello_world(**_parsed_args) - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 pipelineInfo: name: hello-world root: diff --git a/backend/update_requirements.sh b/backend/update_requirements.sh index 93c811f474d..add6895b5b5 100755 --- a/backend/update_requirements.sh +++ b/backend/update_requirements.sh @@ -1,5 +1,5 @@ #!/bin/bash # This image should be in sync with Dockerfile. -IMAGE="python:3.9" +IMAGE="public.ecr.aws/docker/library/python:3.12" ../hack/update-requirements.sh $IMAGE requirements.txt diff --git a/components/contrib/datasets/HuggingFace/Load_dataset/component.py b/components/contrib/datasets/HuggingFace/Load_dataset/component.py index a8fc1431985..1fc1060df19 100644 --- a/components/contrib/datasets/HuggingFace/Load_dataset/component.py +++ b/components/contrib/datasets/HuggingFace/Load_dataset/component.py @@ -20,7 +20,7 @@ def load_dataset_using_huggingface( if __name__ == '__main__': load_dataset_op = create_component_from_func( load_dataset_using_huggingface, - base_image='python:3.9', + base_image='public.ecr.aws/docker/library/python:3.12', packages_to_install=['datasets==1.6.2'], annotations={ 'author': 'Alexey Volkov ', diff --git a/components/contrib/datasets/HuggingFace/Load_dataset/component.yaml b/components/contrib/datasets/HuggingFace/Load_dataset/component.yaml index e7ca4e4fb2b..ce8e23ef29a 100644 --- a/components/contrib/datasets/HuggingFace/Load_dataset/component.yaml +++ b/components/contrib/datasets/HuggingFace/Load_dataset/component.yaml @@ -10,7 +10,7 @@ outputs: - {name: splits, type: JsonArray} implementation: container: - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 command: - sh - -c diff --git a/components/contrib/datasets/HuggingFace/Split_dataset/component.py b/components/contrib/datasets/HuggingFace/Split_dataset/component.py index 5392fa8f1ef..5788f95b1b7 100644 --- a/components/contrib/datasets/HuggingFace/Split_dataset/component.py +++ b/components/contrib/datasets/HuggingFace/Split_dataset/component.py @@ -26,7 +26,7 @@ def split_dataset_huggingface( if __name__ == '__main__': split_dataset_op = create_component_from_func( split_dataset_huggingface, - base_image='python:3.9', + base_image='public.ecr.aws/docker/library/python:3.12', packages_to_install=['datasets==1.6.2'], annotations={ 'author': 'Alexey Volkov ', diff --git a/components/contrib/datasets/HuggingFace/Split_dataset/component.yaml b/components/contrib/datasets/HuggingFace/Split_dataset/component.yaml index 3a46f6420a7..6bbc25c4e5b 100644 --- a/components/contrib/datasets/HuggingFace/Split_dataset/component.yaml +++ b/components/contrib/datasets/HuggingFace/Split_dataset/component.yaml @@ -13,7 +13,7 @@ outputs: - {name: dataset_state, type: JsonObject} implementation: container: - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 command: - sh - -c diff --git a/components/contrib/kfp/Run_component/component.py b/components/contrib/kfp/Run_component/component.py index ab3ac3b58a1..453bcb75a99 100644 --- a/components/contrib/kfp/Run_component/component.py +++ b/components/contrib/kfp/Run_component/component.py @@ -40,7 +40,7 @@ def run_component_or_pipeline( from kfp.components import create_component_from_func run_component_or_pipeline_op = create_component_from_func( run_component_or_pipeline, - base_image='python:3.9', + base_image='public.ecr.aws/docker/library/python:3.12', packages_to_install=['kfp==1.4.0'], output_component_file='component.yaml', annotations={ diff --git a/components/contrib/kfp/Run_component/component.yaml b/components/contrib/kfp/Run_component/component.yaml index 3d0c99239d9..8fcc4eda7ce 100644 --- a/components/contrib/kfp/Run_component/component.yaml +++ b/components/contrib/kfp/Run_component/component.yaml @@ -13,7 +13,7 @@ outputs: - {name: run_object, type: JsonObject} implementation: container: - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 command: - sh - -c diff --git a/components/kserve/Dockerfile b/components/kserve/Dockerfile index 507003ccf0d..83e545c2fb1 100644 --- a/components/kserve/Dockerfile +++ b/components/kserve/Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.9-slim-bullseye +FROM public.ecr.aws/docker/library/python:3.12-slim-bullseye RUN apt-get update && apt-get install -y gcc python3-dev COPY requirements.txt . diff --git a/frontend/mock-backend/data/v2/pipeline/lightweight_python_functions_v2_pipeline.json b/frontend/mock-backend/data/v2/pipeline/lightweight_python_functions_v2_pipeline.json index 2e5c6991282..109c1256e70 100644 --- a/frontend/mock-backend/data/v2/pipeline/lightweight_python_functions_v2_pipeline.json +++ b/frontend/mock-backend/data/v2/pipeline/lightweight_python_functions_v2_pipeline.json @@ -107,7 +107,7 @@ "program_path=$(mktemp -d)\nprintf \"%s\" \"$0\" > \"$program_path/ephemeral_component.py\"\npython3 -m kfp.components.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n", "\nfrom kfp.dsl import *\nfrom typing import *\n\ndef preprocess(\n # An input parameter of type string.\n message: str,\n # An input parameter of type dict.\n input_dict_parameter: Dict[str, int],\n # An input parameter of type list.\n input_list_parameter: List[str],\n # Use Output[T] to get a metadata-rich handle to the output artifact\n # of type `Dataset`.\n output_dataset_one: Output[Dataset],\n # A locally accessible filepath for another output artifact of type\n # `Dataset`.\n output_dataset_two_path: OutputPath('Dataset'),\n # A locally accessible filepath for an output parameter of type string.\n output_parameter_path: OutputPath(str),\n # A locally accessible filepath for an output parameter of type bool.\n output_bool_parameter_path: OutputPath(bool),\n # A locally accessible filepath for an output parameter of type dict.\n output_dict_parameter_path: OutputPath(Dict[str, int]),\n # A locally accessible filepath for an output parameter of type list.\n output_list_parameter_path: OutputPath(List[str]),\n):\n \"\"\"Dummy preprocessing step.\"\"\"\n\n # Use Dataset.path to access a local file path for writing.\n # One can also use Dataset.uri to access the actual URI file path.\n with open(output_dataset_one.path, 'w') as f:\n f.write(message)\n\n # OutputPath is used to just pass the local file path of the output artifact\n # to the function.\n with open(output_dataset_two_path, 'w') as f:\n f.write(message)\n\n with open(output_parameter_path, 'w') as f:\n f.write(message)\n\n with open(output_bool_parameter_path, 'w') as f:\n f.write(\n str(True)) # use either `str()` or `json.dumps()` for bool values.\n\n import json\n with open(output_dict_parameter_path, 'w') as f:\n f.write(json.dumps(input_dict_parameter))\n\n with open(output_list_parameter_path, 'w') as f:\n f.write(json.dumps(input_list_parameter))\n\n" ], - "image": "python:3.9" + "image": "public.ecr.aws/docker/library/python:3.12" } }, "exec-train": { @@ -122,7 +122,7 @@ "program_path=$(mktemp)\nprintf \"%s\" \"$0\" > \"$program_path\"\npython3 -u \"$program_path\" \"$@\"\n", "\nimport json\nimport inspect\nfrom typing import *\n\n# Copyright 2021 The Kubeflow Authors\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\"\"\"Classes for input/output types in KFP SDK.\n\nThese are only compatible with v2 Pipelines.\n\"\"\"\n\nimport os\nfrom typing import Dict, Generic, List, Optional, Type, TypeVar, Union\n\n\n_GCS_LOCAL_MOUNT_PREFIX = '/gcs/'\n_MINIO_LOCAL_MOUNT_PREFIX = '/minio/'\n_S3_LOCAL_MOUNT_PREFIX = '/s3/'\n\n\nclass Artifact(object):\n \"\"\"Generic Artifact class.\n\n This class is meant to represent the metadata around an input or output\n machine-learning Artifact. Artifacts have URIs, which can either be a location\n on disk (or Cloud storage) or some other resource identifier such as\n an API resource name.\n\n Artifacts carry a `metadata` field, which is a dictionary for storing\n metadata related to this artifact.\n \"\"\"\n TYPE_NAME = 'system.Artifact'\n\n def __init__(self,\n name: Optional[str] = None,\n uri: Optional[str] = None,\n metadata: Optional[Dict] = None):\n \"\"\"Initializes the Artifact with the given name, URI and metadata.\"\"\"\n self.uri = uri or ''\n self.name = name or ''\n self.metadata = metadata or {}\n\n @property\n def path(self):\n return self._get_path()\n\n @path.setter\n def path(self, path):\n self._set_path(path)\n\n def _get_path(self) -> Optional[str]:\n if self.uri.startswith('gs://'):\n return _GCS_LOCAL_MOUNT_PREFIX + self.uri[len('gs://'):]\n elif self.uri.startswith('minio://'):\n return _MINIO_LOCAL_MOUNT_PREFIX + self.uri[len('minio://'):]\n elif self.uri.startswith('s3://'):\n return _S3_LOCAL_MOUNT_PREFIX + self.uri[len('s3://'):]\n return None\n\n def _set_path(self, path):\n if path.startswith(_GCS_LOCAL_MOUNT_PREFIX):\n path = 'gs://' + path[len(_GCS_LOCAL_MOUNT_PREFIX):]\n elif path.startswith(_MINIO_LOCAL_MOUNT_PREFIX):\n path = 'minio://' + path[len(_MINIO_LOCAL_MOUNT_PREFIX):]\n elif path.startswith(_S3_LOCAL_MOUNT_PREFIX):\n path = 's3://' + path[len(_S3_LOCAL_MOUNT_PREFIX):]\n self.uri = path\n\n\nclass Model(Artifact):\n \"\"\"An artifact representing an ML Model.\"\"\"\n TYPE_NAME = 'system.Model'\n\n def __init__(self,\n name: Optional[str] = None,\n uri: Optional[str] = None,\n metadata: Optional[Dict] = None):\n super().__init__(uri=uri, name=name, metadata=metadata)\n\n @property\n def framework(self) -> str:\n return self._get_framework()\n\n def _get_framework(self) -> str:\n return self.metadata.get('framework', '')\n\n @framework.setter\n def framework(self, framework: str):\n self._set_framework(framework)\n\n def _set_framework(self, framework: str):\n self.metadata['framework'] = framework\n\n\nclass Dataset(Artifact):\n \"\"\"An artifact representing an ML Dataset.\"\"\"\n TYPE_NAME = 'system.Dataset'\n\n def __init__(self,\n name: Optional[str] = None,\n uri: Optional[str] = None,\n metadata: Optional[Dict] = None):\n super().__init__(uri=uri, name=name, metadata=metadata)\n\n\nclass Metrics(Artifact):\n \"\"\"Represent a simple base Artifact type to store key-value scalar metrics.\"\"\"\n TYPE_NAME = 'system.Metrics'\n\n def __init__(self,\n name: Optional[str] = None,\n uri: Optional[str] = None,\n metadata: Optional[Dict] = None):\n super().__init__(uri=uri, name=name, metadata=metadata)\n\n def log_metric(self, metric: str, value: float):\n \"\"\"Sets a custom scalar metric.\n\n Args:\n metric: Metric key\n value: Value of the metric.\n \"\"\"\n self.metadata[metric] = value\n\n\nclass ClassificationMetrics(Artifact):\n \"\"\"Represents Artifact class to store Classification Metrics.\"\"\"\n TYPE_NAME = 'system.ClassificationMetrics'\n\n def __init__(self,\n name: Optional[str] = None,\n uri: Optional[str] = None,\n metadata: Optional[Dict] = None):\n super().__init__(uri=uri, name=name, metadata=metadata)\n\n def log_roc_data_point(self, fpr: float, tpr: float, threshold: float):\n \"\"\"Logs a single data point in the ROC Curve.\n\n Args:\n fpr: False positive rate value of the data point.\n tpr: True positive rate value of the data point.\n threshold: Threshold value for the data point.\n \"\"\"\n\n roc_reading = {\n 'confidenceThreshold': threshold,\n 'recall': tpr,\n 'falsePositiveRate': fpr\n }\n if 'confidenceMetrics' not in self.metadata.keys():\n self.metadata['confidenceMetrics'] = []\n\n self.metadata['confidenceMetrics'].append(roc_reading)\n\n def log_roc_curve(self, fpr: List[float], tpr: List[float],\n threshold: List[float]):\n \"\"\"Logs an ROC curve.\n\n The list length of fpr, tpr and threshold must be the same.\n\n Args:\n fpr: List of false positive rate values.\n tpr: List of true positive rate values.\n threshold: List of threshold values.\n \"\"\"\n if len(fpr) != len(tpr) or len(fpr) != len(threshold) or len(tpr) != len(\n threshold):\n raise ValueError('Length of fpr, tpr and threshold must be the same. '\n 'Got lengths {}, {} and {} respectively.'.format(\n len(fpr), len(tpr), len(threshold)))\n\n for i in range(len(fpr)):\n self.log_roc_data_point(fpr=fpr[i], tpr=tpr[i], threshold=threshold[i])\n\n def set_confusion_matrix_categories(self, categories: List[str]):\n \"\"\"Stores confusion matrix categories.\n\n Args:\n categories: List of strings specifying the categories.\n \"\"\"\n\n self._categories = []\n annotation_specs = []\n for category in categories:\n annotation_spec = {'displayName': category}\n self._categories.append(category)\n annotation_specs.append(annotation_spec)\n\n self._matrix = []\n for row in range(len(self._categories)):\n self._matrix.append({'row': [0] * len(self._categories)})\n\n self._confusion_matrix = {}\n self._confusion_matrix['annotationSpecs'] = annotation_specs\n self._confusion_matrix['rows'] = self._matrix\n self.metadata['confusionMatrix'] = self._confusion_matrix\n\n def log_confusion_matrix_row(self, row_category: str, row: List[float]):\n \"\"\"Logs a confusion matrix row.\n\n Args:\n row_category: Category to which the row belongs.\n row: List of integers specifying the values for the row.\n\n Raises:\n ValueError: If row_category is not in the list of categories\n set in set_categories call.\n \"\"\"\n if row_category not in self._categories:\n raise ValueError('Invalid category: {} passed. Expected one of: {}'.\\\n format(row_category, self._categories))\n\n if len(row) != len(self._categories):\n raise ValueError('Invalid row. Expected size: {} got: {}'.\\\n format(len(self._categories), len(row)))\n\n self._matrix[self._categories.index(row_category)] = {'row': row}\n self.metadata['confusionMatrix'] = self._confusion_matrix\n\n def log_confusion_matrix_cell(self, row_category: str, col_category: str,\n value: int):\n \"\"\"Logs a cell in the confusion matrix.\n\n Args:\n row_category: String representing the name of the row category.\n col_category: String representing the name of the column category.\n value: Int value of the cell.\n\n Raises:\n ValueError: If row_category or col_category is not in the list of\n categories set in set_categories.\n \"\"\"\n if row_category not in self._categories:\n raise ValueError('Invalid category: {} passed. Expected one of: {}'.\\\n format(row_category, self._categories))\n\n if col_category not in self._categories:\n raise ValueError('Invalid category: {} passed. Expected one of: {}'.\\\n format(row_category, self._categories))\n\n self._matrix[self._categories.index(row_category)]['row'][\n self._categories.index(col_category)] = value\n self.metadata['confusionMatrix'] = self._confusion_matrix\n\n def log_confusion_matrix(self, categories: List[str],\n matrix: List[List[int]]):\n \"\"\"Logs a confusion matrix.\n\n Args:\n categories: List of the category names.\n matrix: Complete confusion matrix.\n\n Raises:\n ValueError: Length of categories does not match number of rows or columns.\n \"\"\"\n self.set_confusion_matrix_categories(categories)\n\n if len(matrix) != len(categories):\n raise ValueError('Invalid matrix: {} passed for categories: {}'.\\\n format(matrix, categories))\n\n for index in range(len(categories)):\n if len(matrix[index]) != len(categories):\n raise ValueError('Invalid matrix: {} passed for categories: {}'.\\\n format(matrix, categories))\n\n self.log_confusion_matrix_row(categories[index], matrix[index])\n\n self.metadata['confusionMatrix'] = self._confusion_matrix\n\n\nclass SlicedClassificationMetrics(Artifact):\n \"\"\"Metrics class representing Sliced Classification Metrics.\n\n Similar to ClassificationMetrics clients using this class are expected to use\n log methods of the class to log metrics with the difference being each log\n method takes a slice to associate the ClassificationMetrics.\n\n \"\"\"\n\n TYPE_NAME = 'system.SlicedClassificationMetrics'\n\n def __init__(self,\n name: Optional[str] = None,\n uri: Optional[str] = None,\n metadata: Optional[Dict] = None):\n super().__init__(uri=uri, name=name, metadata=metadata)\n\n def _upsert_classification_metrics_for_slice(self, slice: str):\n \"\"\"Upserts the classification metrics instance for a slice.\"\"\"\n if slice not in self._sliced_metrics:\n self._sliced_metrics[slice] = ClassificationMetrics()\n\n def _update_metadata(self, slice: str):\n \"\"\"Updates metadata to adhere to the metrics schema.\"\"\"\n self.metadata = {}\n self.metadata['evaluationSlices'] = []\n for slice in self._sliced_metrics.keys():\n slice_metrics = {\n 'slice': slice,\n 'sliceClassificationMetrics': self._sliced_metrics[slice].metadata\n }\n self.metadata['evaluationSlices'].append(slice_metrics)\n\n def log_roc_reading(self, slice: str, threshold: float, tpr: float,\n fpr: float):\n \"\"\"Logs a single data point in the ROC Curve of a slice.\n\n Args:\n slice: String representing slice label.\n threshold: Thresold value for the data point.\n tpr: True positive rate value of the data point.\n fpr: False positive rate value of the data point.\n \"\"\"\n\n self._upsert_classification_metrics_for_slice(slice)\n self._sliced_metrics[slice].log_roc_reading(threshold, tpr, fpr)\n self._update_metadata(slice)\n\n def load_roc_readings(self, slice: str, readings: List[List[float]]):\n \"\"\"Supports bulk loading ROC Curve readings for a slice.\n\n Args:\n slice: String representing slice label.\n readings: A 2-D list providing ROC Curve data points.\n The expected order of the data points is: threshold,\n true_positive_rate, false_positive_rate.\n \"\"\"\n self._upsert_classification_metrics_for_slice(slice)\n self._sliced_metrics[slice].load_roc_readings(readings)\n self._update_metadata(slice)\n\n def set_confusion_matrix_categories(self, slice: str, categories: List[str]):\n \"\"\"Stores confusion matrix categories for a slice..\n\n Categories are stored in the internal metrics_utils.ConfusionMatrix\n instance of the slice.\n\n Args:\n slice: String representing slice label.\n categories: List of strings specifying the categories.\n \"\"\"\n self._upsert_classification_metrics_for_slice(slice)\n self._sliced_metrics[slice].set_confusion_matrix_categories(categories)\n self._update_metadata(slice)\n\n def log_confusion_matrix_row(self, slice: str, row_category: str,\n row: List[int]):\n \"\"\"Logs a confusion matrix row for a slice.\n\n Row is updated on the internal metrics_utils.ConfusionMatrix\n instance of the slice.\n\n Args:\n slice: String representing slice label.\n row_category: Category to which the row belongs.\n row: List of integers specifying the values for the row.\n \"\"\"\n self._upsert_classification_metrics_for_slice(slice)\n self._sliced_metrics[slice].log_confusion_matrix_row(row_category, row)\n self._update_metadata(slice)\n\n def log_confusion_matrix_cell(self, slice: str, row_category: str,\n col_category: str, value: int):\n \"\"\"Logs a confusion matrix cell for a slice..\n\n Cell is updated on the internal metrics_utils.ConfusionMatrix\n instance of the slice.\n\n Args:\n slice: String representing slice label.\n row_category: String representing the name of the row category.\n col_category: String representing the name of the column category.\n value: Int value of the cell.\n \"\"\"\n self._upsert_classification_metrics_for_slice(slice)\n self._sliced_metrics[slice].log_confusion_matrix_cell(\n row_category, col_category, value)\n self._update_metadata(slice)\n\n def load_confusion_matrix(self, slice: str, categories: List[str],\n matrix: List[List[int]]):\n \"\"\"Supports bulk loading the whole confusion matrix for a slice.\n\n Args:\n slice: String representing slice label.\n categories: List of the category names.\n matrix: Complete confusion matrix.\n \"\"\"\n self._upsert_classification_metrics_for_slice(slice)\n self._sliced_metrics[slice].log_confusion_matrix_cell(categories, matrix)\n self._update_metadata(slice)\n\n\nT = TypeVar('T')\n\n\nclass InputAnnotation():\n \"\"\"Marker type for input artifacts.\"\"\"\n pass\n\n\n\nclass OutputAnnotation():\n \"\"\"Marker type for output artifacts.\"\"\"\n pass\n\n\n# TODO: Use typing.Annotated instead of this hack.\n# With typing.Annotated (Python 3.9+ or typing_extensions package), the\n# following would look like:\n# Input = typing.Annotated[T, InputAnnotation]\n# Output = typing.Annotated[T, OutputAnnotation]\n\n\n# Input represents an Input artifact of type T.\nInput = Union[T, InputAnnotation]\n\n# Output represents an Output artifact of type T.\nOutput = Union[T, OutputAnnotation]\n\n\ndef is_artifact_annotation(typ) -> bool:\n if hasattr(typ, '_subs_tree'): # Python 3.6\n subs_tree = typ._subs_tree()\n return len(subs_tree) == 3 and subs_tree[0] == Union and subs_tree[2] in [InputAnnotation, OutputAnnotation]\n\n if not hasattr(typ, '__origin__'):\n return False\n\n\n if typ.__origin__ != Union and type(typ.__origin__) != type(Union):\n return False\n\n\n if not hasattr(typ, '__args__') or len(typ.__args__) != 2:\n return False\n\n if typ.__args__[1] not in [InputAnnotation, OutputAnnotation]:\n return False\n\n return True\n\ndef is_input_artifact(typ) -> bool:\n \"\"\"Returns True if typ is of type Input[T].\"\"\"\n if not is_artifact_annotation(typ):\n return False\n\n if hasattr(typ, '_subs_tree'): # Python 3.6\n subs_tree = typ._subs_tree()\n return len(subs_tree) == 3 and subs_tree[2] == InputAnnotation\n\n return typ.__args__[1] == InputAnnotation\n\ndef is_output_artifact(typ) -> bool:\n \"\"\"Returns True if typ is of type Output[T].\"\"\"\n if not is_artifact_annotation(typ):\n return False\n\n if hasattr(typ, '_subs_tree'): # Python 3.6\n subs_tree = typ._subs_tree()\n return len(subs_tree) == 3 and subs_tree[2] == OutputAnnotation\n\n return typ.__args__[1] == OutputAnnotation\n\ndef get_io_artifact_class(typ):\n if not is_artifact_annotation(typ):\n return None\n if typ == Input or typ == Output:\n return None\n\n if hasattr(typ, '_subs_tree'): # Python 3.6\n subs_tree = typ._subs_tree()\n if len(subs_tree) != 3:\n return None\n return subs_tree[1]\n\n return typ.__args__[0]\n\ndef get_io_artifact_annotation(typ):\n if not is_artifact_annotation(typ):\n return None\n\n if hasattr(typ, '_subs_tree'): # Python 3.6\n subs_tree = typ._subs_tree()\n if len(subs_tree) != 3:\n return None\n return subs_tree[2]\n\n return typ.__args__[1]\n\n\n\n_SCHEMA_TITLE_TO_TYPE: Dict[str, Artifact] = {\n x.TYPE_NAME: x\n for x in [Artifact, Model, Dataset, Metrics, ClassificationMetrics]\n}\n\n\ndef create_runtime_artifact(runtime_artifact: Dict) -> Artifact:\n \"\"\"Creates an Artifact instance from the specified RuntimeArtifact.\n\n Args:\n runtime_artifact: Dictionary representing JSON-encoded RuntimeArtifact.\n \"\"\"\n schema_title = runtime_artifact.get('type', {}).get('schemaTitle', '')\n\n artifact_type = _SCHEMA_TITLE_TO_TYPE.get(schema_title)\n if not artifact_type:\n artifact_type = Artifact\n return artifact_type(\n uri=runtime_artifact.get('uri', ''),\n name=runtime_artifact.get('name', ''),\n metadata=runtime_artifact.get('metadata', {}),\n )\n\nclass InputPath:\n '''When creating component from function, :class:`.InputPath` should be used as function parameter annotation to tell the system to pass the *data file path* to the function instead of passing the actual data.'''\n def __init__(self, type=None):\n self.type = type\n\nclass OutputPath:\n '''When creating component from function, :class:`.OutputPath` should be used as function parameter annotation to tell the system that the function wants to output data by writing it into a file with the given path instead of returning the data from the function.'''\n def __init__(self, type=None):\n self.type = type\n\nclass Executor():\n \"\"\"Executor executes v2-based Python function components.\"\"\"\n\n def __init__(self, executor_input: Dict, function_to_execute: Callable):\n self._func = function_to_execute\n self._input = executor_input\n self._input_artifacts: Dict[str, Artifact] = {}\n self._output_artifacts: Dict[str, Artifact] = {}\n\n for name, artifacts in self._input.get('inputs', {}).get('artifacts',\n {}).items():\n artifacts_list = artifacts.get('artifacts')\n if artifacts_list:\n self._input_artifacts[name] = self._make_input_artifact(\n artifacts_list[0])\n\n for name, artifacts in self._input.get('outputs', {}).get('artifacts',\n {}).items():\n artifacts_list = artifacts.get('artifacts')\n if artifacts_list:\n self._output_artifacts[name] = self._make_output_artifact(\n artifacts_list[0])\n\n self._return_annotation = inspect.signature(self._func).return_annotation\n self._executor_output = {}\n\n @classmethod\n def _make_input_artifact(cls, runtime_artifact: Dict):\n return create_runtime_artifact(runtime_artifact)\n\n @classmethod\n def _make_output_artifact(cls, runtime_artifact: Dict):\n import os\n artifact = create_runtime_artifact(runtime_artifact)\n os.makedirs(os.path.dirname(artifact.path), exist_ok=True)\n return artifact\n\n def _get_input_artifact(self, name: str):\n return self._input_artifacts.get(name)\n\n def _get_output_artifact(self, name: str):\n return self._output_artifacts.get(name)\n\n def _get_input_parameter_value(self, parameter_name: str, parameter_type: Any):\n parameter = self._input.get('inputs', {}).get('parameters',\n {}).get(parameter_name, None)\n if parameter is None:\n return None\n\n if parameter.get('stringValue'):\n if parameter_type == str:\n return parameter['stringValue']\n elif parameter_type == bool:\n # Use `.lower()` so it can also handle 'True' and 'False' (resulted from\n # `str(True)` and `str(False)`, respectively.\n return json.loads(parameter['stringValue'].lower())\n else:\n return json.loads(parameter['stringValue'])\n elif parameter.get('intValue'):\n return int(parameter['intValue'])\n elif parameter.get('doubleValue'):\n return float(parameter['doubleValue'])\n\n def _get_output_parameter_path(self, parameter_name: str):\n parameter_name = self._maybe_strip_path_suffix(parameter_name)\n parameter = self._input.get('outputs',\n {}).get('parameters',\n {}).get(parameter_name, None)\n if parameter is None:\n return None\n\n import os\n path = parameter.get('outputFile', None)\n if path:\n os.makedirs(os.path.dirname(path), exist_ok=True)\n return path\n\n def _get_output_artifact_path(self, artifact_name: str):\n artifact_name = self._maybe_strip_path_suffix(artifact_name)\n output_artifact = self._output_artifacts.get(artifact_name)\n if not output_artifact:\n raise ValueError(\n 'Failed to get output artifact path for artifact name {}'.format(\n artifact_name))\n return output_artifact.path\n\n def _get_input_artifact_path(self, artifact_name: str):\n artifact_name = self._maybe_strip_path_suffix(artifact_name)\n input_artifact = self._input_artifacts.get(artifact_name)\n if not input_artifact:\n raise ValueError(\n 'Failed to get input artifact path for artifact name {}'.format(\n artifact_name))\n return input_artifact.path\n\n def _write_output_parameter_value(self, name: str,\n value: Union[str, int, float, bool, dict,\n list, Dict, List]):\n if type(value) == str:\n output = {'stringValue': value}\n elif type(value) == int:\n output = {'intValue': value}\n elif type(value) == float:\n output = {'doubleValue': value}\n else:\n # For bool, list, dict, List, Dict, json serialize the value.\n output = {'stringValue': json.dumps(value)}\n\n if not self._executor_output.get('parameters'):\n self._executor_output['parameters'] = {}\n\n self._executor_output['parameters'][name] = output\n\n def _write_output_artifact_payload(self, name: str, value: Any):\n path = self._get_output_artifact_path(name)\n with open(path, 'w') as f:\n f.write(str(value))\n\n # TODO: extract to a util\n @classmethod\n def _get_short_type_name(cls, type_name: str) -> str:\n \"\"\"Extracts the short form type name.\n\n This method is used for looking up serializer for a given type.\n\n For example:\n typing.List -> List\n typing.List[int] -> List\n typing.Dict[str, str] -> Dict\n List -> List\n str -> str\n\n Args:\n type_name: The original type name.\n\n Returns:\n The short form type name or the original name if pattern doesn't match.\n \"\"\"\n import re\n match = re.match('(typing\\.)?(?P\\w+)(?:\\[.+\\])?', type_name)\n if match:\n return match.group('type')\n else:\n return type_name\n\n # TODO: merge with type_utils.is_parameter_type\n @classmethod\n def _is_parameter(cls, annotation: Any) -> bool:\n if type(annotation) == type:\n return annotation in [str, int, float, bool, dict, list]\n\n # Annotation could be, for instance `typing.Dict[str, str]`, etc.\n return cls._get_short_type_name(str(annotation)) in ['Dict', 'List']\n\n @classmethod\n def _is_artifact(cls, annotation: Any) -> bool:\n if type(annotation) == type:\n return issubclass(annotation, Artifact)\n return False\n\n @classmethod\n def _is_named_tuple(cls, annotation: Any) -> bool:\n if type(annotation) == type:\n return issubclass(annotation, tuple) and hasattr(\n annotation, '_fields') and hasattr(annotation, '__annotations__')\n return False\n\n def _handle_single_return_value(self, output_name: str, annotation_type: Any,\n return_value: Any):\n if self._is_parameter(annotation_type):\n if type(return_value) != annotation_type:\n raise ValueError(\n 'Function `{}` returned value of type {}; want type {}'.format(\n self._func.__name__, type(return_value), annotation_type))\n self._write_output_parameter_value(output_name, return_value)\n elif self._is_artifact(annotation_type):\n self._write_output_artifact_payload(output_name, return_value)\n else:\n raise RuntimeError(\n 'Unknown return type: {}. Must be one of `str`, `int`, `float`, or a'\n ' subclass of `Artifact`'.format(annotation_type))\n\n def _write_executor_output(self, func_output: Optional[Any] = None):\n if self._output_artifacts:\n self._executor_output['artifacts'] = {}\n\n for name, artifact in self._output_artifacts.items():\n runtime_artifact = {\n 'name': artifact.name,\n 'uri': artifact.uri,\n 'metadata': artifact.metadata,\n }\n artifacts_list = {'artifacts': [runtime_artifact]}\n\n self._executor_output['artifacts'][name] = artifacts_list\n\n if func_output is not None:\n if self._is_parameter(self._return_annotation) or self._is_artifact(\n self._return_annotation):\n # Note: single output is named `Output` in component.yaml.\n self._handle_single_return_value('Output', self._return_annotation,\n func_output)\n elif self._is_named_tuple(self._return_annotation):\n if len(self._return_annotation._fields) != len(func_output):\n raise RuntimeError(\n 'Expected {} return values from function `{}`, got {}'.format(\n len(self._return_annotation._fields), self._func.__name__,\n len(func_output)))\n for i in range(len(self._return_annotation._fields)):\n field = self._return_annotation._fields[i]\n field_type = self._return_annotation.__annotations__[field]\n if type(func_output) == tuple:\n field_value = func_output[i]\n else:\n field_value = getattr(func_output, field)\n self._handle_single_return_value(field, field_type, field_value)\n else:\n raise RuntimeError(\n 'Unknown return type: {}. Must be one of `str`, `int`, `float`, a'\n ' subclass of `Artifact`, or a NamedTuple collection of these types.'\n .format(self._return_annotation))\n\n import os\n os.makedirs(\n os.path.dirname(self._input['outputs']['outputFile']), exist_ok=True)\n with open(self._input['outputs']['outputFile'], 'w') as f:\n f.write(json.dumps(self._executor_output))\n\n def _maybe_strip_path_suffix(self, name) -> str:\n if name.endswith('_path'):\n name = name[0:-len('_path')]\n if name.endswith('_file'):\n name = name[0:-len('_file')]\n return name\n\n def execute(self):\n annotations = inspect.getfullargspec(self._func).annotations\n\n # Function arguments.\n func_kwargs = {}\n\n for k, v in annotations.items():\n if k == 'return':\n continue\n\n if self._is_parameter(v):\n func_kwargs[k] = self._get_input_parameter_value(k, v)\n\n if is_artifact_annotation(v):\n if is_input_artifact(v):\n func_kwargs[k] = self._get_input_artifact(k)\n if is_output_artifact(v):\n func_kwargs[k] = self._get_output_artifact(k)\n\n elif isinstance(v, OutputPath):\n if self._is_parameter(v.type):\n func_kwargs[k] = self._get_output_parameter_path(k)\n else:\n func_kwargs[k] = self._get_output_artifact_path(k)\n elif isinstance(v, InputPath):\n func_kwargs[k] = self._get_input_artifact_path(k)\n\n result = self._func(**func_kwargs)\n self._write_executor_output(result)\n\n\ndef train(\n # Use InputPath to get a locally accessible path for the input artifact\n # of type `Dataset`.\n dataset_one_path: InputPath('Dataset'),\n # Use Input[T] to get a metadata-rich handle to the input artifact\n # of type `Dataset`.\n dataset_two: Input[Dataset],\n # An input parameter of type string.\n message: str,\n # Use Output[T] to get a metadata-rich handle to the output artifact\n # of type `Dataset`.\n model: Output[Model],\n # An input parameter of type bool.\n input_bool: bool,\n # An input parameter of type dict.\n input_dict: Dict[str, int],\n # An input parameter of type List[str].\n input_list: List[str],\n # An input parameter of type int with a default value.\n num_steps: int = 100,\n):\n \"\"\"Dummy Training step\"\"\"\n with open(dataset_one_path, 'r') as input_file:\n dataset_one_contents = input_file.read()\n\n with open(dataset_two.path, 'r') as input_file:\n dataset_two_contents = input_file.read()\n\n line = (f'dataset_one_contents: {dataset_one_contents} || '\n f'dataset_two_contents: {dataset_two_contents} || '\n f'message: {message} || '\n f'input_bool: {input_bool}, type {type(input_bool)} || '\n f'input_dict: {input_dict}, type {type(input_dict)} || '\n f'input_list: {input_list}, type {type(input_list)} \\n')\n\n with open(model.path, 'w') as output_file:\n for i in range(num_steps):\n output_file.write('Step {}\\n{}\\n=====\\n'.format(i, line))\n\n # model is an instance of Model artifact, which has a .metadata dictionary\n # to store arbitrary metadata for the output artifact.\n model.metadata['accuracy'] = 0.9\n\n\ndef executor_main():\n import argparse\n import json\n\n parser = argparse.ArgumentParser(description='Process some integers.')\n parser.add_argument('--executor_input', type=str)\n parser.add_argument('--function_to_execute', type=str)\n\n args, _ = parser.parse_known_args()\n executor_input = json.loads(args.executor_input)\n function_to_execute = globals()[args.function_to_execute]\n\n executor = Executor(executor_input=executor_input,\n function_to_execute=function_to_execute)\n\n executor.execute()\n\n\nif __name__ == '__main__':\n executor_main()\n" ], - "image": "python:3.9" + "image": "public.ecr.aws/docker/library/python:3.12" } } } diff --git a/frontend/mock-backend/data/v2/pipeline/lightweight_python_functions_v2_pipeline_rev.yaml b/frontend/mock-backend/data/v2/pipeline/lightweight_python_functions_v2_pipeline_rev.yaml index 58b6df51e79..4876c05feaf 100644 --- a/frontend/mock-backend/data/v2/pipeline/lightweight_python_functions_v2_pipeline_rev.yaml +++ b/frontend/mock-backend/data/v2/pipeline/lightweight_python_functions_v2_pipeline_rev.yaml @@ -6,7 +6,7 @@ deploymentSpec: executors: exec-preprocess: container: - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 args: - --executor_input - '{{$}}' @@ -105,7 +105,7 @@ deploymentSpec: \ Model artifact, which has a .metadata dictionary\n # to store arbitrary\ \ metadata for the output artifact.\n model.metadata['accuracy'] = 0.9\n\ \n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 components: comp-preprocess: inputDefinitions: diff --git a/frontend/mock-backend/data/v2/pipeline/pipeline_with_loops_and_conditions.json b/frontend/mock-backend/data/v2/pipeline/pipeline_with_loops_and_conditions.json index 6f4f6f7a7ed..f4bf28af47e 100644 --- a/frontend/mock-backend/data/v2/pipeline/pipeline_with_loops_and_conditions.json +++ b/frontend/mock-backend/data/v2/pipeline/pipeline_with_loops_and_conditions.json @@ -929,7 +929,7 @@ "program_path=$(mktemp -d)\nprintf \"%s\" \"$0\" > \"$program_path/ephemeral_component.py\"\npython3 -m kfp.components.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n", "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef args_generator_op() -> list:\n return [\n {\n 'A_a': '1',\n 'B_b': ['2', '20'],\n },\n {\n 'A_a': '10',\n 'B_b': ['22', '222'],\n },\n ]\n\n" ], - "image": "python:3.9" + "image": "public.ecr.aws/docker/library/python:3.12" } }, "exec-args-generator-op-2": { @@ -949,7 +949,7 @@ "program_path=$(mktemp -d)\nprintf \"%s\" \"$0\" > \"$program_path/ephemeral_component.py\"\npython3 -m kfp.components.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n", "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef args_generator_op() -> list:\n return [\n {\n 'A_a': '1',\n 'B_b': ['2', '20'],\n },\n {\n 'A_a': '10',\n 'B_b': ['22', '222'],\n },\n ]\n\n" ], - "image": "python:3.9" + "image": "public.ecr.aws/docker/library/python:3.12" } }, "exec-flip-coin-op": { @@ -969,7 +969,7 @@ "program_path=$(mktemp -d)\nprintf \"%s\" \"$0\" > \"$program_path/ephemeral_component.py\"\npython3 -m kfp.components.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n", "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef flip_coin_op() -> str:\n \"\"\"Flip a coin and output heads or tails randomly.\"\"\"\n import random\n result = 'heads' if random.randint(0, 1) == 0 else 'tails'\n return result\n\n" ], - "image": "python:3.9" + "image": "public.ecr.aws/docker/library/python:3.12" } }, "exec-print-struct": { @@ -989,7 +989,7 @@ "program_path=$(mktemp -d)\nprintf \"%s\" \"$0\" > \"$program_path/ephemeral_component.py\"\npython3 -m kfp.components.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n", "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef print_struct(struct: dict):\n print(struct)\n\n" ], - "image": "python:3.9" + "image": "public.ecr.aws/docker/library/python:3.12" } }, "exec-print-text": { @@ -1009,7 +1009,7 @@ "program_path=$(mktemp -d)\nprintf \"%s\" \"$0\" > \"$program_path/ephemeral_component.py\"\npython3 -m kfp.components.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n", "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef print_text(msg: str, msg2: Optional[str] = None):\n print(f'msg: {msg}, msg2: {msg2}')\n\n" ], - "image": "python:3.9" + "image": "public.ecr.aws/docker/library/python:3.12" } }, "exec-print-text-2": { @@ -1029,7 +1029,7 @@ "program_path=$(mktemp -d)\nprintf \"%s\" \"$0\" > \"$program_path/ephemeral_component.py\"\npython3 -m kfp.components.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n", "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef print_text(msg: str, msg2: Optional[str] = None):\n print(f'msg: {msg}, msg2: {msg2}')\n\n" ], - "image": "python:3.9" + "image": "public.ecr.aws/docker/library/python:3.12" } }, "exec-print-text-3": { @@ -1049,7 +1049,7 @@ "program_path=$(mktemp -d)\nprintf \"%s\" \"$0\" > \"$program_path/ephemeral_component.py\"\npython3 -m kfp.components.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n", "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef print_text(msg: str, msg2: Optional[str] = None):\n print(f'msg: {msg}, msg2: {msg2}')\n\n" ], - "image": "python:3.9" + "image": "public.ecr.aws/docker/library/python:3.12" } }, "exec-print-text-4": { @@ -1069,7 +1069,7 @@ "program_path=$(mktemp -d)\nprintf \"%s\" \"$0\" > \"$program_path/ephemeral_component.py\"\npython3 -m kfp.components.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n", "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef print_text(msg: str, msg2: Optional[str] = None):\n print(f'msg: {msg}, msg2: {msg2}')\n\n" ], - "image": "python:3.9" + "image": "public.ecr.aws/docker/library/python:3.12" } }, "exec-print-text-5": { @@ -1089,7 +1089,7 @@ "program_path=$(mktemp -d)\nprintf \"%s\" \"$0\" > \"$program_path/ephemeral_component.py\"\npython3 -m kfp.components.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n", "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef print_text(msg: str, msg2: Optional[str] = None):\n print(f'msg: {msg}, msg2: {msg2}')\n\n" ], - "image": "python:3.9" + "image": "public.ecr.aws/docker/library/python:3.12" } }, "exec-print-text-6": { @@ -1109,7 +1109,7 @@ "program_path=$(mktemp -d)\nprintf \"%s\" \"$0\" > \"$program_path/ephemeral_component.py\"\npython3 -m kfp.components.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n", "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef print_text(msg: str, msg2: Optional[str] = None):\n print(f'msg: {msg}, msg2: {msg2}')\n\n" ], - "image": "python:3.9" + "image": "public.ecr.aws/docker/library/python:3.12" } }, "exec-print-text-7": { @@ -1129,7 +1129,7 @@ "program_path=$(mktemp -d)\nprintf \"%s\" \"$0\" > \"$program_path/ephemeral_component.py\"\npython3 -m kfp.components.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n", "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef print_text(msg: str, msg2: Optional[str] = None):\n print(f'msg: {msg}, msg2: {msg2}')\n\n" ], - "image": "python:3.9" + "image": "public.ecr.aws/docker/library/python:3.12" } }, "exec-print-text-8": { @@ -1149,7 +1149,7 @@ "program_path=$(mktemp -d)\nprintf \"%s\" \"$0\" > \"$program_path/ephemeral_component.py\"\npython3 -m kfp.components.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n", "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef print_text(msg: str, msg2: Optional[str] = None):\n print(f'msg: {msg}, msg2: {msg2}')\n\n" ], - "image": "python:3.9" + "image": "public.ecr.aws/docker/library/python:3.12" } }, "exec-print-text-9": { @@ -1169,7 +1169,7 @@ "program_path=$(mktemp -d)\nprintf \"%s\" \"$0\" > \"$program_path/ephemeral_component.py\"\npython3 -m kfp.components.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n", "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef print_text(msg: str, msg2: Optional[str] = None):\n print(f'msg: {msg}, msg2: {msg2}')\n\n" ], - "image": "python:3.9" + "image": "public.ecr.aws/docker/library/python:3.12" } } } diff --git a/frontend/mock-backend/data/v2/pipeline/pipeline_with_loops_and_conditions.yaml b/frontend/mock-backend/data/v2/pipeline/pipeline_with_loops_and_conditions.yaml index f12bc94a805..4311bd5dcbe 100644 --- a/frontend/mock-backend/data/v2/pipeline/pipeline_with_loops_and_conditions.yaml +++ b/frontend/mock-backend/data/v2/pipeline/pipeline_with_loops_and_conditions.yaml @@ -588,7 +588,7 @@ deploymentSpec: \ 'A_a': '1',\n 'B_b': ['2', '20'],\n },\n \ \ {\n 'A_a': '10',\n 'B_b': ['22', '222'],\n \ \ },\n ]\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-args-generator-op-2: container: args: @@ -617,7 +617,7 @@ deploymentSpec: \ 'A_a': '1',\n 'B_b': ['2', '20'],\n },\n \ \ {\n 'A_a': '10',\n 'B_b': ['22', '222'],\n \ \ },\n ]\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-flip-coin-op: container: args: @@ -645,7 +645,7 @@ deploymentSpec: \ *\n\ndef flip_coin_op() -> str:\n \"\"\"Flip a coin and output heads\ \ or tails randomly.\"\"\"\n import random\n result = 'heads' if random.randint(0,\ \ 1) == 0 else 'tails'\n return result\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-struct: container: args: @@ -671,7 +671,7 @@ deploymentSpec: ' - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_struct(struct: dict):\n print(struct)\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-text: container: args: @@ -698,7 +698,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_text(msg: str, msg2: Optional[str] = None):\n print(f'msg:\ \ {msg}, msg2: {msg2}')\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-text-2: container: args: @@ -725,7 +725,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_text(msg: str, msg2: Optional[str] = None):\n print(f'msg:\ \ {msg}, msg2: {msg2}')\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-text-3: container: args: @@ -752,7 +752,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_text(msg: str, msg2: Optional[str] = None):\n print(f'msg:\ \ {msg}, msg2: {msg2}')\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-text-4: container: args: @@ -779,7 +779,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_text(msg: str, msg2: Optional[str] = None):\n print(f'msg:\ \ {msg}, msg2: {msg2}')\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-text-5: container: args: @@ -806,7 +806,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_text(msg: str, msg2: Optional[str] = None):\n print(f'msg:\ \ {msg}, msg2: {msg2}')\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-text-6: container: args: @@ -833,7 +833,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_text(msg: str, msg2: Optional[str] = None):\n print(f'msg:\ \ {msg}, msg2: {msg2}')\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-text-7: container: args: @@ -860,7 +860,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_text(msg: str, msg2: Optional[str] = None):\n print(f'msg:\ \ {msg}, msg2: {msg2}')\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-text-8: container: args: @@ -887,7 +887,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_text(msg: str, msg2: Optional[str] = None):\n print(f'msg:\ \ {msg}, msg2: {msg2}')\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-text-9: container: args: @@ -914,7 +914,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_text(msg: str, msg2: Optional[str] = None):\n print(f'msg:\ \ {msg}, msg2: {msg2}')\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 pipelineInfo: name: pipeline-with-loops-and-conditions-multi-layers root: diff --git a/frontend/mock-backend/data/v2/pipeline/protobuf_value_params_v2.json b/frontend/mock-backend/data/v2/pipeline/protobuf_value_params_v2.json index 4bbf2bcb0df..3017b8a0677 100644 --- a/frontend/mock-backend/data/v2/pipeline/protobuf_value_params_v2.json +++ b/frontend/mock-backend/data/v2/pipeline/protobuf_value_params_v2.json @@ -48,7 +48,7 @@ "program_path=$(mktemp -d)\nprintf \"%s\" \"$0\" > \"$program_path/ephemeral_component.py\"\npython3 -m kfp.components.executor_main --component_module_path \"$program_path/ephemeral_component.py\" \"$@\"\n", "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef print_params(\n int_param: int = 1234, \n double_param: float = 56.78, \n string_param: str = 'lorem ipsum', \n bool_param: bool = True, \n list_string_param: List[str] = ['lorem', 'ipsum'], \n list_int_param: List[int] = [123, 456, 789], \n struct_param: Dict[str, int] = { 'key_1': 12345, 'key_2': 6789 }):\n print(\"int_param: \", int_param)\n print(\"double_param: \", double_param)\n print(\"string_param: \", string_param)\n print(\"bool_param: \", bool_param)\n print(\"list_string_param: \", list_string_param)\n print(\"list_int_param: \", list_int_param)\n print(\"struct_param: \", struct_param)\n\n" ], - "image": "python:3.9" + "image": "public.ecr.aws/docker/library/python:3.12" } } } diff --git a/frontend/mock-backend/data/v2/pipeline/xgboost_sample_pipeline.json b/frontend/mock-backend/data/v2/pipeline/xgboost_sample_pipeline.json index 00e235989eb..763abfd2415 100644 --- a/frontend/mock-backend/data/v2/pipeline/xgboost_sample_pipeline.json +++ b/frontend/mock-backend/data/v2/pipeline/xgboost_sample_pipeline.json @@ -327,7 +327,7 @@ "-c", "def _make_parent_dirs_and_return_path(file_path: str):\n import os\n os.makedirs(os.path.dirname(file_path), exist_ok=True)\n return file_path\n\ndef convert_csv_to_apache_parquet(\n data_path,\n output_data_path,\n):\n '''Converts CSV table to Apache Parquet.\n\n [Apache Parquet](https://parquet.apache.org/)\n\n Annotations:\n author: Alexey Volkov \n '''\n from pyarrow import csv, parquet\n\n table = csv.read_csv(data_path)\n parquet.write_table(table, output_data_path)\n\nimport argparse\n_parser = argparse.ArgumentParser(prog='Convert csv to apache parquet', description='Converts CSV table to Apache Parquet.\\n\\n [Apache Parquet](https://parquet.apache.org/)\\n\\n Annotations:\\n author: Alexey Volkov ')\n_parser.add_argument(\"--data\", dest=\"data_path\", type=str, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"--output-data\", dest=\"output_data_path\", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\n_parsed_args = vars(_parser.parse_args())\n_output_files = _parsed_args.pop(\"_output_paths\", [])\n\n_outputs = convert_csv_to_apache_parquet(**_parsed_args)\n\n_output_serializers = [\n\n]\n\nimport os\nfor idx, output_file in enumerate(_output_files):\n try:\n os.makedirs(os.path.dirname(output_file))\n except OSError:\n pass\n with open(output_file, 'w') as f:\n f.write(_output_serializers[idx](_outputs[idx]))\n" ], - "image": "python:3.9" + "image": "public.ecr.aws/docker/library/python:3.12" } }, "exec-xgboost-predict": { @@ -351,7 +351,7 @@ "-c", "def _make_parent_dirs_and_return_path(file_path: str):\n import os\n os.makedirs(os.path.dirname(file_path), exist_ok=True)\n return file_path\n\ndef xgboost_predict(\n data_path, # Also supports LibSVM\n model_path,\n predictions_path,\n label_column = None,\n):\n '''Make predictions using a trained XGBoost model.\n\n Args:\n data_path: Path for the feature data in CSV format.\n model_path: Path for the trained model in binary XGBoost format.\n predictions_path: Output path for the predictions.\n label_column: Column containing the label data.\n\n Annotations:\n author: Alexey Volkov \n '''\n from pathlib import Path\n\n import numpy\n import pandas\n import xgboost\n\n df = pandas.read_csv(\n data_path,\n )\n\n if label_column is not None:\n df = df.drop(columns=[df.columns[label_column]])\n\n testing_data = xgboost.DMatrix(\n data=df,\n )\n\n model = xgboost.Booster(model_file=model_path)\n\n predictions = model.predict(testing_data)\n\n Path(predictions_path).parent.mkdir(parents=True, exist_ok=True)\n numpy.savetxt(predictions_path, predictions)\n\nimport argparse\n_parser = argparse.ArgumentParser(prog='Xgboost predict', description='Make predictions using a trained XGBoost model.\\n\\n Args:\\n data_path: Path for the feature data in CSV format.\\n model_path: Path for the trained model in binary XGBoost format.\\n predictions_path: Output path for the predictions.\\n label_column: Column containing the label data.\\n\\n Annotations:\\n author: Alexey Volkov ')\n_parser.add_argument(\"--data\", dest=\"data_path\", type=str, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"--model\", dest=\"model_path\", type=str, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"--label-column\", dest=\"label_column\", type=int, required=False, default=argparse.SUPPRESS)\n_parser.add_argument(\"--predictions\", dest=\"predictions_path\", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\n_parsed_args = vars(_parser.parse_args())\n\n_outputs = xgboost_predict(**_parsed_args)\n" ], - "image": "python:3.9" + "image": "public.ecr.aws/docker/library/python:3.12" } }, "exec-xgboost-predict-2": { @@ -375,7 +375,7 @@ "-c", "def _make_parent_dirs_and_return_path(file_path: str):\n import os\n os.makedirs(os.path.dirname(file_path), exist_ok=True)\n return file_path\n\ndef xgboost_predict(\n data_path,\n model_path,\n predictions_path,\n label_column_name = None,\n):\n '''Make predictions using a trained XGBoost model.\n\n Args:\n data_path: Path for the feature data in Apache Parquet format.\n model_path: Path for the trained model in binary XGBoost format.\n predictions_path: Output path for the predictions.\n label_column_name: Optional. Name of the column containing the label data that is excluded during the prediction.\n\n Annotations:\n author: Alexey Volkov \n '''\n from pathlib import Path\n\n import numpy\n import pandas\n import xgboost\n\n # Loading data\n df = pandas.read_parquet(data_path)\n if label_column_name:\n df = df.drop(columns=[label_column_name])\n\n evaluation_data = xgboost.DMatrix(\n data=df,\n )\n\n # Training\n model = xgboost.Booster(model_file=model_path)\n\n predictions = model.predict(evaluation_data)\n\n Path(predictions_path).parent.mkdir(parents=True, exist_ok=True)\n numpy.savetxt(predictions_path, predictions)\n\nimport argparse\n_parser = argparse.ArgumentParser(prog='Xgboost predict', description='Make predictions using a trained XGBoost model.\\n\\n Args:\\n data_path: Path for the feature data in Apache Parquet format.\\n model_path: Path for the trained model in binary XGBoost format.\\n predictions_path: Output path for the predictions.\\n label_column_name: Optional. Name of the column containing the label data that is excluded during the prediction.\\n\\n Annotations:\\n author: Alexey Volkov ')\n_parser.add_argument(\"--data\", dest=\"data_path\", type=str, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"--model\", dest=\"model_path\", type=str, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"--label-column-name\", dest=\"label_column_name\", type=str, required=False, default=argparse.SUPPRESS)\n_parser.add_argument(\"--predictions\", dest=\"predictions_path\", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\n_parsed_args = vars(_parser.parse_args())\n\n_outputs = xgboost_predict(**_parsed_args)\n" ], - "image": "python:3.9" + "image": "public.ecr.aws/docker/library/python:3.12" } }, "exec-xgboost-predict-3": { @@ -399,7 +399,7 @@ "-c", "def _make_parent_dirs_and_return_path(file_path: str):\n import os\n os.makedirs(os.path.dirname(file_path), exist_ok=True)\n return file_path\n\ndef xgboost_predict(\n data_path,\n model_path,\n predictions_path,\n label_column_name = None,\n):\n '''Make predictions using a trained XGBoost model.\n\n Args:\n data_path: Path for the feature data in Apache Parquet format.\n model_path: Path for the trained model in binary XGBoost format.\n predictions_path: Output path for the predictions.\n label_column_name: Optional. Name of the column containing the label data that is excluded during the prediction.\n\n Annotations:\n author: Alexey Volkov \n '''\n from pathlib import Path\n\n import numpy\n import pandas\n import xgboost\n\n # Loading data\n df = pandas.read_parquet(data_path)\n if label_column_name:\n df = df.drop(columns=[label_column_name])\n\n evaluation_data = xgboost.DMatrix(\n data=df,\n )\n\n # Training\n model = xgboost.Booster(model_file=model_path)\n\n predictions = model.predict(evaluation_data)\n\n Path(predictions_path).parent.mkdir(parents=True, exist_ok=True)\n numpy.savetxt(predictions_path, predictions)\n\nimport argparse\n_parser = argparse.ArgumentParser(prog='Xgboost predict', description='Make predictions using a trained XGBoost model.\\n\\n Args:\\n data_path: Path for the feature data in Apache Parquet format.\\n model_path: Path for the trained model in binary XGBoost format.\\n predictions_path: Output path for the predictions.\\n label_column_name: Optional. Name of the column containing the label data that is excluded during the prediction.\\n\\n Annotations:\\n author: Alexey Volkov ')\n_parser.add_argument(\"--data\", dest=\"data_path\", type=str, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"--model\", dest=\"model_path\", type=str, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"--label-column-name\", dest=\"label_column_name\", type=str, required=False, default=argparse.SUPPRESS)\n_parser.add_argument(\"--predictions\", dest=\"predictions_path\", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\n_parsed_args = vars(_parser.parse_args())\n\n_outputs = xgboost_predict(**_parsed_args)\n" ], - "image": "python:3.9" + "image": "public.ecr.aws/docker/library/python:3.12" } }, "exec-xgboost-predict-4": { @@ -423,7 +423,7 @@ "-c", "def _make_parent_dirs_and_return_path(file_path: str):\n import os\n os.makedirs(os.path.dirname(file_path), exist_ok=True)\n return file_path\n\ndef xgboost_predict(\n data_path, # Also supports LibSVM\n model_path,\n predictions_path,\n label_column = None,\n):\n '''Make predictions using a trained XGBoost model.\n\n Args:\n data_path: Path for the feature data in CSV format.\n model_path: Path for the trained model in binary XGBoost format.\n predictions_path: Output path for the predictions.\n label_column: Column containing the label data.\n\n Annotations:\n author: Alexey Volkov \n '''\n from pathlib import Path\n\n import numpy\n import pandas\n import xgboost\n\n df = pandas.read_csv(\n data_path,\n )\n\n if label_column is not None:\n df = df.drop(columns=[df.columns[label_column]])\n\n testing_data = xgboost.DMatrix(\n data=df,\n )\n\n model = xgboost.Booster(model_file=model_path)\n\n predictions = model.predict(testing_data)\n\n Path(predictions_path).parent.mkdir(parents=True, exist_ok=True)\n numpy.savetxt(predictions_path, predictions)\n\nimport argparse\n_parser = argparse.ArgumentParser(prog='Xgboost predict', description='Make predictions using a trained XGBoost model.\\n\\n Args:\\n data_path: Path for the feature data in CSV format.\\n model_path: Path for the trained model in binary XGBoost format.\\n predictions_path: Output path for the predictions.\\n label_column: Column containing the label data.\\n\\n Annotations:\\n author: Alexey Volkov ')\n_parser.add_argument(\"--data\", dest=\"data_path\", type=str, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"--model\", dest=\"model_path\", type=str, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"--label-column\", dest=\"label_column\", type=int, required=False, default=argparse.SUPPRESS)\n_parser.add_argument(\"--predictions\", dest=\"predictions_path\", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\n_parsed_args = vars(_parser.parse_args())\n\n_outputs = xgboost_predict(**_parsed_args)\n" ], - "image": "python:3.9" + "image": "public.ecr.aws/docker/library/python:3.12" } }, "exec-xgboost-train": { @@ -459,7 +459,7 @@ "-c", "def _make_parent_dirs_and_return_path(file_path: str):\n import os\n os.makedirs(os.path.dirname(file_path), exist_ok=True)\n return file_path\n\ndef xgboost_train(\n training_data_path, # Also supports LibSVM\n model_path,\n model_config_path,\n starting_model_path = None,\n\n label_column = 0,\n num_iterations = 10,\n booster_params = None,\n\n # Booster parameters\n objective = 'reg:squarederror',\n booster = 'gbtree',\n learning_rate = 0.3,\n min_split_loss = 0,\n max_depth = 6,\n):\n '''Train an XGBoost model.\n\n Args:\n training_data_path: Path for the training data in CSV format.\n model_path: Output path for the trained model in binary XGBoost format.\n model_config_path: Output path for the internal parameter configuration of Booster as a JSON string.\n starting_model_path: Path for the existing trained model to start from.\n label_column: Column containing the label data.\n num_boost_rounds: Number of boosting iterations.\n booster_params: Parameters for the booster. See https://xgboost.readthedocs.io/en/latest/parameter.html\n objective: The learning task and the corresponding learning objective.\n See https://xgboost.readthedocs.io/en/latest/parameter.html#learning-task-parameters\n The most common values are:\n \"reg:squarederror\" - Regression with squared loss (default).\n \"reg:logistic\" - Logistic regression.\n \"binary:logistic\" - Logistic regression for binary classification, output probability.\n \"binary:logitraw\" - Logistic regression for binary classification, output score before logistic transformation\n \"rank:pairwise\" - Use LambdaMART to perform pairwise ranking where the pairwise loss is minimized\n \"rank:ndcg\" - Use LambdaMART to perform list-wise ranking where Normalized Discounted Cumulative Gain (NDCG) is maximized\n\n Annotations:\n author: Alexey Volkov \n '''\n import pandas\n import xgboost\n\n df = pandas.read_csv(\n training_data_path,\n )\n\n training_data = xgboost.DMatrix(\n data=df.drop(columns=[df.columns[label_column]]),\n label=df[df.columns[label_column]],\n )\n\n booster_params = booster_params or {}\n booster_params.setdefault('objective', objective)\n booster_params.setdefault('booster', booster)\n booster_params.setdefault('learning_rate', learning_rate)\n booster_params.setdefault('min_split_loss', min_split_loss)\n booster_params.setdefault('max_depth', max_depth)\n\n starting_model = None\n if starting_model_path:\n starting_model = xgboost.Booster(model_file=starting_model_path)\n\n model = xgboost.train(\n params=booster_params,\n dtrain=training_data,\n num_boost_round=num_iterations,\n xgb_model=starting_model\n )\n\n # Saving the model in binary format\n model.save_model(model_path)\n\n model_config_str = model.save_config()\n with open(model_config_path, 'w') as model_config_file:\n model_config_file.write(model_config_str)\n\nimport json\nimport argparse\n_parser = argparse.ArgumentParser(prog='Xgboost train', description='Train an XGBoost model.\\n\\n Args:\\n training_data_path: Path for the training data in CSV format.\\n model_path: Output path for the trained model in binary XGBoost format.\\n model_config_path: Output path for the internal parameter configuration of Booster as a JSON string.\\n starting_model_path: Path for the existing trained model to start from.\\n label_column: Column containing the label data.\\n num_boost_rounds: Number of boosting iterations.\\n booster_params: Parameters for the booster. See https://xgboost.readthedocs.io/en/latest/parameter.html\\n objective: The learning task and the corresponding learning objective.\\n See https://xgboost.readthedocs.io/en/latest/parameter.html#learning-task-parameters\\n The most common values are:\\n \"reg:squarederror\" - Regression with squared loss (default).\\n \"reg:logistic\" - Logistic regression.\\n \"binary:logistic\" - Logistic regression for binary classification, output probability.\\n \"binary:logitraw\" - Logistic regression for binary classification, output score before logistic transformation\\n \"rank:pairwise\" - Use LambdaMART to perform pairwise ranking where the pairwise loss is minimized\\n \"rank:ndcg\" - Use LambdaMART to perform list-wise ranking where Normalized Discounted Cumulative Gain (NDCG) is maximized\\n\\n Annotations:\\n author: Alexey Volkov ')\n_parser.add_argument(\"--training-data\", dest=\"training_data_path\", type=str, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"--starting-model\", dest=\"starting_model_path\", type=str, required=False, default=argparse.SUPPRESS)\n_parser.add_argument(\"--label-column\", dest=\"label_column\", type=int, required=False, default=argparse.SUPPRESS)\n_parser.add_argument(\"--num-iterations\", dest=\"num_iterations\", type=int, required=False, default=argparse.SUPPRESS)\n_parser.add_argument(\"--booster-params\", dest=\"booster_params\", type=json.loads, required=False, default=argparse.SUPPRESS)\n_parser.add_argument(\"--objective\", dest=\"objective\", type=str, required=False, default=argparse.SUPPRESS)\n_parser.add_argument(\"--booster\", dest=\"booster\", type=str, required=False, default=argparse.SUPPRESS)\n_parser.add_argument(\"--learning-rate\", dest=\"learning_rate\", type=float, required=False, default=argparse.SUPPRESS)\n_parser.add_argument(\"--min-split-loss\", dest=\"min_split_loss\", type=float, required=False, default=argparse.SUPPRESS)\n_parser.add_argument(\"--max-depth\", dest=\"max_depth\", type=int, required=False, default=argparse.SUPPRESS)\n_parser.add_argument(\"--model\", dest=\"model_path\", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"--model-config\", dest=\"model_config_path\", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\n_parsed_args = vars(_parser.parse_args())\n\n_outputs = xgboost_train(**_parsed_args)\n" ], - "image": "python:3.9" + "image": "public.ecr.aws/docker/library/python:3.12" } }, "exec-xgboost-train-2": { @@ -495,7 +495,7 @@ "-c", "def _make_parent_dirs_and_return_path(file_path: str):\n import os\n os.makedirs(os.path.dirname(file_path), exist_ok=True)\n return file_path\n\ndef xgboost_train(\n training_data_path,\n model_path,\n model_config_path,\n label_column_name,\n\n starting_model_path = None,\n\n num_iterations = 10,\n booster_params = None,\n\n # Booster parameters\n objective = 'reg:squarederror',\n booster = 'gbtree',\n learning_rate = 0.3,\n min_split_loss = 0,\n max_depth = 6,\n):\n '''Train an XGBoost model.\n\n Args:\n training_data_path: Path for the training data in Apache Parquet format.\n model_path: Output path for the trained model in binary XGBoost format.\n model_config_path: Output path for the internal parameter configuration of Booster as a JSON string.\n starting_model_path: Path for the existing trained model to start from.\n label_column_name: Name of the column containing the label data.\n num_boost_rounds: Number of boosting iterations.\n booster_params: Parameters for the booster. See https://xgboost.readthedocs.io/en/latest/parameter.html\n objective: The learning task and the corresponding learning objective.\n See https://xgboost.readthedocs.io/en/latest/parameter.html#learning-task-parameters\n The most common values are:\n \"reg:squarederror\" - Regression with squared loss (default).\n \"reg:logistic\" - Logistic regression.\n \"binary:logistic\" - Logistic regression for binary classification, output probability.\n \"binary:logitraw\" - Logistic regression for binary classification, output score before logistic transformation\n \"rank:pairwise\" - Use LambdaMART to perform pairwise ranking where the pairwise loss is minimized\n \"rank:ndcg\" - Use LambdaMART to perform list-wise ranking where Normalized Discounted Cumulative Gain (NDCG) is maximized\n\n Annotations:\n author: Alexey Volkov \n '''\n import pandas\n import xgboost\n\n # Loading data\n df = pandas.read_parquet(training_data_path)\n training_data = xgboost.DMatrix(\n data=df.drop(columns=[label_column_name]),\n label=df[[label_column_name]],\n )\n # Training\n booster_params = booster_params or {}\n booster_params.setdefault('objective', objective)\n booster_params.setdefault('booster', booster)\n booster_params.setdefault('learning_rate', learning_rate)\n booster_params.setdefault('min_split_loss', min_split_loss)\n booster_params.setdefault('max_depth', max_depth)\n\n starting_model = None\n if starting_model_path:\n starting_model = xgboost.Booster(model_file=starting_model_path)\n\n model = xgboost.train(\n params=booster_params,\n dtrain=training_data,\n num_boost_round=num_iterations,\n xgb_model=starting_model\n )\n\n # Saving the model in binary format\n model.save_model(model_path)\n\n model_config_str = model.save_config()\n with open(model_config_path, 'w') as model_config_file:\n model_config_file.write(model_config_str)\n\nimport json\nimport argparse\n_parser = argparse.ArgumentParser(prog='Xgboost train', description='Train an XGBoost model.\\n\\n Args:\\n training_data_path: Path for the training data in Apache Parquet format.\\n model_path: Output path for the trained model in binary XGBoost format.\\n model_config_path: Output path for the internal parameter configuration of Booster as a JSON string.\\n starting_model_path: Path for the existing trained model to start from.\\n label_column_name: Name of the column containing the label data.\\n num_boost_rounds: Number of boosting iterations.\\n booster_params: Parameters for the booster. See https://xgboost.readthedocs.io/en/latest/parameter.html\\n objective: The learning task and the corresponding learning objective.\\n See https://xgboost.readthedocs.io/en/latest/parameter.html#learning-task-parameters\\n The most common values are:\\n \"reg:squarederror\" - Regression with squared loss (default).\\n \"reg:logistic\" - Logistic regression.\\n \"binary:logistic\" - Logistic regression for binary classification, output probability.\\n \"binary:logitraw\" - Logistic regression for binary classification, output score before logistic transformation\\n \"rank:pairwise\" - Use LambdaMART to perform pairwise ranking where the pairwise loss is minimized\\n \"rank:ndcg\" - Use LambdaMART to perform list-wise ranking where Normalized Discounted Cumulative Gain (NDCG) is maximized\\n\\n Annotations:\\n author: Alexey Volkov ')\n_parser.add_argument(\"--training-data\", dest=\"training_data_path\", type=str, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"--label-column-name\", dest=\"label_column_name\", type=str, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"--starting-model\", dest=\"starting_model_path\", type=str, required=False, default=argparse.SUPPRESS)\n_parser.add_argument(\"--num-iterations\", dest=\"num_iterations\", type=int, required=False, default=argparse.SUPPRESS)\n_parser.add_argument(\"--booster-params\", dest=\"booster_params\", type=json.loads, required=False, default=argparse.SUPPRESS)\n_parser.add_argument(\"--objective\", dest=\"objective\", type=str, required=False, default=argparse.SUPPRESS)\n_parser.add_argument(\"--booster\", dest=\"booster\", type=str, required=False, default=argparse.SUPPRESS)\n_parser.add_argument(\"--learning-rate\", dest=\"learning_rate\", type=float, required=False, default=argparse.SUPPRESS)\n_parser.add_argument(\"--min-split-loss\", dest=\"min_split_loss\", type=float, required=False, default=argparse.SUPPRESS)\n_parser.add_argument(\"--max-depth\", dest=\"max_depth\", type=int, required=False, default=argparse.SUPPRESS)\n_parser.add_argument(\"--model\", dest=\"model_path\", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\n_parser.add_argument(\"--model-config\", dest=\"model_config_path\", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS)\n_parsed_args = vars(_parser.parse_args())\n\n_outputs = xgboost_train(**_parsed_args)\n" ], - "image": "python:3.9" + "image": "public.ecr.aws/docker/library/python:3.12" } } } diff --git a/frontend/mock-backend/data/v2/pipeline/xgboost_sample_pipeline.yaml b/frontend/mock-backend/data/v2/pipeline/xgboost_sample_pipeline.yaml index c5612c190d0..6769efa54c9 100644 --- a/frontend/mock-backend/data/v2/pipeline/xgboost_sample_pipeline.yaml +++ b/frontend/mock-backend/data/v2/pipeline/xgboost_sample_pipeline.yaml @@ -253,7 +253,7 @@ deploymentSpec: \ try:\n os.makedirs(os.path.dirname(output_file))\n except\ \ OSError:\n pass\n with open(output_file, 'w') as f:\n \ \ f.write(_output_serializers[idx](_outputs[idx]))\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-xgboost-predict: container: args: @@ -304,7 +304,7 @@ deploymentSpec: _parser.add_argument(\"--predictions\", dest=\"predictions_path\", type=_make_parent_dirs_and_return_path,\ \ required=True, default=argparse.SUPPRESS)\n_parsed_args = vars(_parser.parse_args())\n\ \n_outputs = xgboost_predict(**_parsed_args)\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-xgboost-predict-2: container: args: @@ -358,7 +358,7 @@ deploymentSpec: predictions_path\", type=_make_parent_dirs_and_return_path, required=True,\ \ default=argparse.SUPPRESS)\n_parsed_args = vars(_parser.parse_args())\n\ \n_outputs = xgboost_predict(**_parsed_args)\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-xgboost-predict-3: container: args: @@ -412,7 +412,7 @@ deploymentSpec: predictions_path\", type=_make_parent_dirs_and_return_path, required=True,\ \ default=argparse.SUPPRESS)\n_parsed_args = vars(_parser.parse_args())\n\ \n_outputs = xgboost_predict(**_parsed_args)\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-xgboost-predict-4: container: args: @@ -463,7 +463,7 @@ deploymentSpec: _parser.add_argument(\"--predictions\", dest=\"predictions_path\", type=_make_parent_dirs_and_return_path,\ \ required=True, default=argparse.SUPPRESS)\n_parsed_args = vars(_parser.parse_args())\n\ \n_outputs = xgboost_predict(**_parsed_args)\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-xgboost-train: container: args: @@ -571,7 +571,7 @@ deploymentSpec: , dest=\"model_config_path\", type=_make_parent_dirs_and_return_path, required=True,\ \ default=argparse.SUPPRESS)\n_parsed_args = vars(_parser.parse_args())\n\ \n_outputs = xgboost_train(**_parsed_args)\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-xgboost-train-2: container: args: @@ -679,7 +679,7 @@ deploymentSpec: , dest=\"model_config_path\", type=_make_parent_dirs_and_return_path, required=True,\ \ default=argparse.SUPPRESS)\n_parsed_args = vars(_parser.parse_args())\n\ \n_outputs = xgboost_train(**_parsed_args)\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 pipelineInfo: name: xgboost-sample-pipeline root: diff --git a/frontend/src/components/tabs/StaticNodeDetailsV2.test.tsx b/frontend/src/components/tabs/StaticNodeDetailsV2.test.tsx index d0948760c7a..a49bbf1ea78 100644 --- a/frontend/src/components/tabs/StaticNodeDetailsV2.test.tsx +++ b/frontend/src/components/tabs/StaticNodeDetailsV2.test.tsx @@ -76,7 +76,7 @@ describe('StaticNodeDetailsV2', () => { expect(screen.getAllByText('STRING').length).toEqual(2); screen.getByText('Image'); - screen.getByText('python:3.9'); + screen.getByText('public.ecr.aws/docker/library/python:3.12'); screen.getByText('Command'); expect(screen.getAllByText('sh').length).toEqual(2); @@ -131,7 +131,7 @@ describe('StaticNodeDetailsV2', () => { expect(screen.getAllByText('STRING').length).toEqual(1); screen.getByText('Image'); - screen.getByText('python:3.9'); + screen.getByText('public.ecr.aws/docker/library/python:3.12'); screen.getByText('Command'); expect(screen.getAllByText('sh').length).toEqual(2); diff --git a/frontend/src/data/test/create_mount_delete_dynamic_pvc.yaml b/frontend/src/data/test/create_mount_delete_dynamic_pvc.yaml index fc07ad77ade..3bdfab5ab68 100644 --- a/frontend/src/data/test/create_mount_delete_dynamic_pvc.yaml +++ b/frontend/src/data/test/create_mount_delete_dynamic_pvc.yaml @@ -76,7 +76,7 @@ deploymentSpec: \ *\n\ndef consumer() -> str:\n with open('/data/file.txt', 'r') as file:\n\ \ content = file.read()\n print(content)\n return content\n\ \n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-createpvc: container: image: argostub/createpvc @@ -111,7 +111,7 @@ deploymentSpec: \ file.write('Hello world')\n with open('/data/file.txt', 'r')\ \ as file:\n content = file.read()\n print(content)\n return\ \ content\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 pipelineInfo: name: my-pipeline root: diff --git a/frontend/src/data/test/lightweight_python_functions_v2_pipeline_rev.yaml b/frontend/src/data/test/lightweight_python_functions_v2_pipeline_rev.yaml index c32ada3c000..a715b53e26e 100644 --- a/frontend/src/data/test/lightweight_python_functions_v2_pipeline_rev.yaml +++ b/frontend/src/data/test/lightweight_python_functions_v2_pipeline_rev.yaml @@ -6,7 +6,7 @@ deploymentSpec: executors: exec-preprocess: container: - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 args: - --executor_input - '{{$}}' @@ -105,7 +105,7 @@ deploymentSpec: \ Model artifact, which has a .metadata dictionary\n # to store arbitrary\ \ metadata for the output artifact.\n model.metadata['accuracy'] = 0.9\n\ \n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 components: comp-preprocess: inputDefinitions: diff --git a/frontend/src/data/test/pipeline_with_loops_and_conditions.yaml b/frontend/src/data/test/pipeline_with_loops_and_conditions.yaml index f12bc94a805..4311bd5dcbe 100644 --- a/frontend/src/data/test/pipeline_with_loops_and_conditions.yaml +++ b/frontend/src/data/test/pipeline_with_loops_and_conditions.yaml @@ -588,7 +588,7 @@ deploymentSpec: \ 'A_a': '1',\n 'B_b': ['2', '20'],\n },\n \ \ {\n 'A_a': '10',\n 'B_b': ['22', '222'],\n \ \ },\n ]\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-args-generator-op-2: container: args: @@ -617,7 +617,7 @@ deploymentSpec: \ 'A_a': '1',\n 'B_b': ['2', '20'],\n },\n \ \ {\n 'A_a': '10',\n 'B_b': ['22', '222'],\n \ \ },\n ]\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-flip-coin-op: container: args: @@ -645,7 +645,7 @@ deploymentSpec: \ *\n\ndef flip_coin_op() -> str:\n \"\"\"Flip a coin and output heads\ \ or tails randomly.\"\"\"\n import random\n result = 'heads' if random.randint(0,\ \ 1) == 0 else 'tails'\n return result\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-struct: container: args: @@ -671,7 +671,7 @@ deploymentSpec: ' - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_struct(struct: dict):\n print(struct)\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-text: container: args: @@ -698,7 +698,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_text(msg: str, msg2: Optional[str] = None):\n print(f'msg:\ \ {msg}, msg2: {msg2}')\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-text-2: container: args: @@ -725,7 +725,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_text(msg: str, msg2: Optional[str] = None):\n print(f'msg:\ \ {msg}, msg2: {msg2}')\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-text-3: container: args: @@ -752,7 +752,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_text(msg: str, msg2: Optional[str] = None):\n print(f'msg:\ \ {msg}, msg2: {msg2}')\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-text-4: container: args: @@ -779,7 +779,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_text(msg: str, msg2: Optional[str] = None):\n print(f'msg:\ \ {msg}, msg2: {msg2}')\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-text-5: container: args: @@ -806,7 +806,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_text(msg: str, msg2: Optional[str] = None):\n print(f'msg:\ \ {msg}, msg2: {msg2}')\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-text-6: container: args: @@ -833,7 +833,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_text(msg: str, msg2: Optional[str] = None):\n print(f'msg:\ \ {msg}, msg2: {msg2}')\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-text-7: container: args: @@ -860,7 +860,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_text(msg: str, msg2: Optional[str] = None):\n print(f'msg:\ \ {msg}, msg2: {msg2}')\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-text-8: container: args: @@ -887,7 +887,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_text(msg: str, msg2: Optional[str] = None):\n print(f'msg:\ \ {msg}, msg2: {msg2}')\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-text-9: container: args: @@ -914,7 +914,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_text(msg: str, msg2: Optional[str] = None):\n print(f'msg:\ \ {msg}, msg2: {msg2}')\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 pipelineInfo: name: pipeline-with-loops-and-conditions-multi-layers root: diff --git a/frontend/src/data/test/xgboost_sample_pipeline.yaml b/frontend/src/data/test/xgboost_sample_pipeline.yaml index e7a54104173..216a46c611e 100644 --- a/frontend/src/data/test/xgboost_sample_pipeline.yaml +++ b/frontend/src/data/test/xgboost_sample_pipeline.yaml @@ -253,7 +253,7 @@ deploymentSpec: \ try:\n os.makedirs(os.path.dirname(output_file))\n except\ \ OSError:\n pass\n with open(output_file, 'w') as f:\n \ \ f.write(_output_serializers[idx](_outputs[idx]))\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-xgboost-predict: container: args: @@ -304,7 +304,7 @@ deploymentSpec: _parser.add_argument(\"--predictions\", dest=\"predictions_path\", type=_make_parent_dirs_and_return_path,\ \ required=True, default=argparse.SUPPRESS)\n_parsed_args = vars(_parser.parse_args())\n\ \n_outputs = xgboost_predict(**_parsed_args)\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-xgboost-predict-2: container: args: @@ -358,7 +358,7 @@ deploymentSpec: predictions_path\", type=_make_parent_dirs_and_return_path, required=True,\ \ default=argparse.SUPPRESS)\n_parsed_args = vars(_parser.parse_args())\n\ \n_outputs = xgboost_predict(**_parsed_args)\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-xgboost-predict-3: container: args: @@ -412,7 +412,7 @@ deploymentSpec: predictions_path\", type=_make_parent_dirs_and_return_path, required=True,\ \ default=argparse.SUPPRESS)\n_parsed_args = vars(_parser.parse_args())\n\ \n_outputs = xgboost_predict(**_parsed_args)\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-xgboost-predict-4: container: args: @@ -463,7 +463,7 @@ deploymentSpec: _parser.add_argument(\"--predictions\", dest=\"predictions_path\", type=_make_parent_dirs_and_return_path,\ \ required=True, default=argparse.SUPPRESS)\n_parsed_args = vars(_parser.parse_args())\n\ \n_outputs = xgboost_predict(**_parsed_args)\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-xgboost-train: container: args: @@ -571,7 +571,7 @@ deploymentSpec: , dest=\"model_config_path\", type=_make_parent_dirs_and_return_path, required=True,\ \ default=argparse.SUPPRESS)\n_parsed_args = vars(_parser.parse_args())\n\ \n_outputs = xgboost_train(**_parsed_args)\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-xgboost-train-2: container: args: @@ -679,7 +679,7 @@ deploymentSpec: , dest=\"model_config_path\", type=_make_parent_dirs_and_return_path, required=True,\ \ default=argparse.SUPPRESS)\n_parsed_args = vars(_parser.parse_args())\n\ \n_outputs = xgboost_train(**_parsed_args)\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 pipelineInfo: name: xgboost-sample-pipeline root: diff --git a/frontend/src/lib/v2/WorkflowUtils.test.ts b/frontend/src/lib/v2/WorkflowUtils.test.ts index d3d5f382634..7c76ea58488 100644 --- a/frontend/src/lib/v2/WorkflowUtils.test.ts +++ b/frontend/src/lib/v2/WorkflowUtils.test.ts @@ -143,7 +143,7 @@ PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-scr "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef preprocess(\n # An input parameter of type string.\n message: str,\n # An input parameter of type dict.\n input_dict_parameter: Dict[str, int],\n # An input parameter of type list.\n input_list_parameter: List[str],\n # Use Output[T] to get a metadata-rich handle to the output artifact\n # of type `Dataset`.\n output_dataset_one: Output[Dataset],\n # A locally accessible filepath for another output artifact of type\n # `Dataset`.\n output_dataset_two_path: OutputPath('Dataset'),\n # A locally accessible filepath for an output parameter of type string.\n output_parameter_path: OutputPath(str),\n # A locally accessible filepath for an output parameter of type bool.\n output_bool_parameter_path: OutputPath(bool),\n # A locally accessible filepath for an output parameter of type dict.\n output_dict_parameter_path: OutputPath(Dict[str, int]),\n # A locally accessible filepath for an output parameter of type list.\n output_list_parameter_path: OutputPath(List[str]),\n):\n \"\"\"Dummy preprocessing step.\"\"\"\n\n # Use Dataset.path to access a local file path for writing.\n # One can also use Dataset.uri to access the actual URI file path.\n with open(output_dataset_one.path, 'w') as f:\n f.write(message)\n\n # OutputPath is used to just pass the local file path of the output artifact\n # to the function.\n with open(output_dataset_two_path, 'w') as f:\n f.write(message)\n\n with open(output_parameter_path, 'w') as f:\n f.write(message)\n\n with open(output_bool_parameter_path, 'w') as f:\n f.write(\n str(True)) # use either `str()` or `json.dumps()` for bool values.\n\n import json\n with open(output_dict_parameter_path, 'w') as f:\n f.write(json.dumps(input_dict_parameter))\n\n with open(output_list_parameter_path, 'w') as f:\n f.write(json.dumps(input_list_parameter))\n\n", ], env: [], - image: 'python:3.9', + image: 'public.ecr.aws/docker/library/python:3.12', lifecycle: undefined, resources: undefined, }); @@ -174,7 +174,7 @@ PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-scr "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef producer() -> str:\n with open('/data/file.txt', 'w') as file:\n file.write('Hello world')\n with open('/data/file.txt', 'r') as file:\n content = file.read()\n print(content)\n return content\n\n", ], env: [], - image: 'python:3.9', + image: 'public.ecr.aws/docker/library/python:3.12', lifecycle: undefined, resources: undefined, }); diff --git a/hack/update-requirements.sh b/hack/update-requirements.sh index 382e95a82de..6b9c172e493 100755 --- a/hack/update-requirements.sh +++ b/hack/update-requirements.sh @@ -16,7 +16,7 @@ # Usage: ./update_requirements.sh requirements.txt set -euo pipefail -IMAGE=${1:-"python:3.9"} +IMAGE=${1:-"public.ecr.aws/docker/library/python:3.12"} docker run -i --rm --entrypoint "" "$IMAGE" sh -c ' python3 -m pip install pip setuptools --quiet --upgrade python3 -m pip install pip-tools==6.14.0 --quiet diff --git a/kubernetes_platform/python/test/snapshot/data/config_map_as_env.yaml b/kubernetes_platform/python/test/snapshot/data/config_map_as_env.yaml index 36d242bac9a..c6fa436092b 100644 --- a/kubernetes_platform/python/test/snapshot/data/config_map_as_env.yaml +++ b/kubernetes_platform/python/test/snapshot/data/config_map_as_env.yaml @@ -32,7 +32,7 @@ deploymentSpec: ' - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef comp():\n pass\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 pipelineInfo: name: my-pipeline root: diff --git a/kubernetes_platform/python/test/snapshot/data/config_map_as_vol.yaml b/kubernetes_platform/python/test/snapshot/data/config_map_as_vol.yaml index a68f3ce2db4..b8d4ac734bb 100644 --- a/kubernetes_platform/python/test/snapshot/data/config_map_as_vol.yaml +++ b/kubernetes_platform/python/test/snapshot/data/config_map_as_vol.yaml @@ -32,7 +32,7 @@ deploymentSpec: ' - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef comp():\n pass\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 pipelineInfo: name: my-pipeline root: diff --git a/kubernetes_platform/python/test/snapshot/data/create_mount_delete_dynamic_pvc.yaml b/kubernetes_platform/python/test/snapshot/data/create_mount_delete_dynamic_pvc.yaml index 124430a41d3..b4e732ea02c 100644 --- a/kubernetes_platform/python/test/snapshot/data/create_mount_delete_dynamic_pvc.yaml +++ b/kubernetes_platform/python/test/snapshot/data/create_mount_delete_dynamic_pvc.yaml @@ -113,7 +113,7 @@ deploymentSpec: \ *\n\ndef consumer() -> str:\n with open('/data/file.txt', 'r') as file:\n\ \ content = file.read()\n print(content)\n return content\n\ \n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-createpvc: container: image: argostub/createpvc @@ -150,7 +150,7 @@ deploymentSpec: \ file.write('Hello world')\n with open('/data/file.txt', 'r')\ \ as file:\n content = file.read()\n print(content)\n return\ \ content\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 pipelineInfo: name: my-pipeline root: diff --git a/kubernetes_platform/python/test/snapshot/data/create_mount_delete_existing_pvc.yaml b/kubernetes_platform/python/test/snapshot/data/create_mount_delete_existing_pvc.yaml index d42aadc2c9a..fa94da0ed8a 100644 --- a/kubernetes_platform/python/test/snapshot/data/create_mount_delete_existing_pvc.yaml +++ b/kubernetes_platform/python/test/snapshot/data/create_mount_delete_existing_pvc.yaml @@ -101,7 +101,7 @@ deploymentSpec: ' - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef comp():\n pass\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-createpvc: container: image: argostub/createpvc diff --git a/kubernetes_platform/python/test/snapshot/data/create_mount_delete_existing_pvc_from_task_output.yaml b/kubernetes_platform/python/test/snapshot/data/create_mount_delete_existing_pvc_from_task_output.yaml index 1348100c461..9151ef5874e 100644 --- a/kubernetes_platform/python/test/snapshot/data/create_mount_delete_existing_pvc_from_task_output.yaml +++ b/kubernetes_platform/python/test/snapshot/data/create_mount_delete_existing_pvc_from_task_output.yaml @@ -107,7 +107,7 @@ deploymentSpec: ' - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef comp():\n pass\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-createpvc: container: image: argostub/createpvc @@ -141,7 +141,7 @@ deploymentSpec: ' - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef get_pvc_name() -> str:\n return 'static-pvc-name'\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 pipelineInfo: name: my-pipeline root: diff --git a/kubernetes_platform/python/test/snapshot/data/empty_dir_mounts.yaml b/kubernetes_platform/python/test/snapshot/data/empty_dir_mounts.yaml index 177fbd2a7cf..cae6b95bc47 100644 --- a/kubernetes_platform/python/test/snapshot/data/empty_dir_mounts.yaml +++ b/kubernetes_platform/python/test/snapshot/data/empty_dir_mounts.yaml @@ -32,7 +32,7 @@ deploymentSpec: ' - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef comp():\n pass\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 pipelineInfo: name: my-pipeline root: diff --git a/kubernetes_platform/python/test/snapshot/data/field_path_as_env.yaml b/kubernetes_platform/python/test/snapshot/data/field_path_as_env.yaml index c7590fe8bc8..c810b009008 100644 --- a/kubernetes_platform/python/test/snapshot/data/field_path_as_env.yaml +++ b/kubernetes_platform/python/test/snapshot/data/field_path_as_env.yaml @@ -32,7 +32,7 @@ deploymentSpec: ' - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef comp():\n pass\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 pipelineInfo: name: my-pipeline root: diff --git a/kubernetes_platform/python/test/snapshot/data/general_ephemeral_volume.yaml b/kubernetes_platform/python/test/snapshot/data/general_ephemeral_volume.yaml index 87402a2f2f9..d51bf1f29a5 100644 --- a/kubernetes_platform/python/test/snapshot/data/general_ephemeral_volume.yaml +++ b/kubernetes_platform/python/test/snapshot/data/general_ephemeral_volume.yaml @@ -32,7 +32,7 @@ deploymentSpec: ' - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef comp():\n pass\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 pipelineInfo: name: my-pipeline root: diff --git a/kubernetes_platform/python/test/snapshot/data/image_pull_secrets.yaml b/kubernetes_platform/python/test/snapshot/data/image_pull_secrets.yaml index d399211f4cf..29987c45246 100644 --- a/kubernetes_platform/python/test/snapshot/data/image_pull_secrets.yaml +++ b/kubernetes_platform/python/test/snapshot/data/image_pull_secrets.yaml @@ -32,7 +32,7 @@ deploymentSpec: ' - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef comp():\n pass\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 pipelineInfo: name: my-pipeline root: diff --git a/kubernetes_platform/python/test/snapshot/data/node_selector.yaml b/kubernetes_platform/python/test/snapshot/data/node_selector.yaml index addc0db0d91..01fb3623bfd 100644 --- a/kubernetes_platform/python/test/snapshot/data/node_selector.yaml +++ b/kubernetes_platform/python/test/snapshot/data/node_selector.yaml @@ -32,7 +32,7 @@ deploymentSpec: ' - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef comp():\n pass\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 pipelineInfo: name: my-pipeline root: diff --git a/kubernetes_platform/python/test/snapshot/data/secret_as_env.yaml b/kubernetes_platform/python/test/snapshot/data/secret_as_env.yaml index 85aacaec96b..50157b624b3 100644 --- a/kubernetes_platform/python/test/snapshot/data/secret_as_env.yaml +++ b/kubernetes_platform/python/test/snapshot/data/secret_as_env.yaml @@ -32,7 +32,7 @@ deploymentSpec: ' - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef comp():\n pass\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 pipelineInfo: name: my-pipeline root: diff --git a/kubernetes_platform/python/test/snapshot/data/secret_as_vol.yaml b/kubernetes_platform/python/test/snapshot/data/secret_as_vol.yaml index 98f8011a4e6..b83b4a95591 100644 --- a/kubernetes_platform/python/test/snapshot/data/secret_as_vol.yaml +++ b/kubernetes_platform/python/test/snapshot/data/secret_as_vol.yaml @@ -32,7 +32,7 @@ deploymentSpec: ' - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef comp():\n pass\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 pipelineInfo: name: my-pipeline root: diff --git a/kubernetes_platform/python/test/snapshot/data/timeout.yaml b/kubernetes_platform/python/test/snapshot/data/timeout.yaml index 295ace7b58a..c03637e8bc0 100644 --- a/kubernetes_platform/python/test/snapshot/data/timeout.yaml +++ b/kubernetes_platform/python/test/snapshot/data/timeout.yaml @@ -32,7 +32,7 @@ deploymentSpec: ' - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef comp():\n pass\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 pipelineInfo: name: my-pipeline root: diff --git a/kubernetes_platform/python/test/snapshot/data/toleration.yaml b/kubernetes_platform/python/test/snapshot/data/toleration.yaml index 843ab9c9cc9..da1777b8b25 100644 --- a/kubernetes_platform/python/test/snapshot/data/toleration.yaml +++ b/kubernetes_platform/python/test/snapshot/data/toleration.yaml @@ -32,7 +32,7 @@ deploymentSpec: ' - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef comp():\n pass\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 pipelineInfo: name: my-pipeline root: diff --git a/manifests/kustomize/base/installs/multi-user/pipelines-profile-controller/deployment.yaml b/manifests/kustomize/base/installs/multi-user/pipelines-profile-controller/deployment.yaml index 636f7523f6b..fe2bdd0b7bb 100644 --- a/manifests/kustomize/base/installs/multi-user/pipelines-profile-controller/deployment.yaml +++ b/manifests/kustomize/base/installs/multi-user/pipelines-profile-controller/deployment.yaml @@ -11,7 +11,7 @@ spec: spec: containers: - name: profile-controller - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 command: ["python", "/hooks/sync.py"] envFrom: - configMapRef: diff --git a/samples/test/metrics_visualization_v2.py b/samples/test/metrics_visualization_v2.py index f192b667292..23e0c13298a 100644 --- a/samples/test/metrics_visualization_v2.py +++ b/samples/test/metrics_visualization_v2.py @@ -24,7 +24,7 @@ @component( packages_to_install=['scikit-learn'], - base_image='python:3.9', + base_image='public.ecr.aws/docker/library/python:3.12', kfp_package_path=_KFP_PACKAGE_PATH, ) def digit_classification(metrics: Output[Metrics]): @@ -68,7 +68,7 @@ def digit_classification(metrics: Output[Metrics]): @component( packages_to_install=['scikit-learn'], - base_image='python:3.9', + base_image='public.ecr.aws/docker/library/python:3.12', kfp_package_path=_KFP_PACKAGE_PATH, ) def wine_classification(metrics: Output[ClassificationMetrics]): @@ -99,7 +99,7 @@ def wine_classification(metrics: Output[ClassificationMetrics]): @component( packages_to_install=['scikit-learn'], - base_image='python:3.9', + base_image='public.ecr.aws/docker/library/python:3.12', kfp_package_path=_KFP_PACKAGE_PATH, ) def iris_sgdclassifier(test_samples_fraction: float, diff --git a/sdk/python/kfp/cli/compile_test.py b/sdk/python/kfp/cli/compile_test.py index c11d43e2a4e..b2082086857 100644 --- a/sdk/python/kfp/cli/compile_test.py +++ b/sdk/python/kfp/cli/compile_test.py @@ -29,7 +29,7 @@ def my_comp(): @dsl.container_component def my_container_comp(): return dsl.ContainerSpec( - image='python:3.9', + image='public.ecr.aws/docker/library/python:3.12', command=['echo', 'hello world'], args=[], ) @@ -55,7 +55,7 @@ def my_comp(): @dsl.container_component def my_container_comp(): return dsl.ContainerSpec( - image='python:3.9', + image='public.ecr.aws/docker/library/python:3.12', command=['echo', 'hello world'], args=[], ) diff --git a/sdk/python/kfp/cli/component_test.py b/sdk/python/kfp/cli/component_test.py index 67c257ecb52..a924b9224b7 100644 --- a/sdk/python/kfp/cli/component_test.py +++ b/sdk/python/kfp/cli/component_test.py @@ -494,7 +494,7 @@ def test_docker_file_is_created_correctly(self): textwrap.dedent('''\ # Generated by KFP. - FROM python:3.9 + FROM public.ecr.aws/docker/library/python:3.12 WORKDIR /usr/local/src/kfp/components COPY runtime-requirements.txt runtime-requirements.txt @@ -523,7 +523,7 @@ def test_docker_file_is_created_correctly_with_one_url(self): textwrap.dedent('''\ # Generated by KFP. - FROM python:3.9 + FROM public.ecr.aws/docker/library/python:3.12 WORKDIR /usr/local/src/kfp/components COPY runtime-requirements.txt runtime-requirements.txt @@ -554,7 +554,7 @@ def test_docker_file_is_created_correctly_with_two_urls(self): textwrap.dedent('''\ # Generated by KFP. - FROM python:3.9 + FROM public.ecr.aws/docker/library/python:3.12 WORKDIR /usr/local/src/kfp/components COPY runtime-requirements.txt runtime-requirements.txt @@ -597,7 +597,7 @@ def test_existing_dockerfile_can_be_overwritten(self): textwrap.dedent('''\ # Generated by KFP. - FROM python:3.9 + FROM public.ecr.aws/docker/library/python:3.12 WORKDIR /usr/local/src/kfp/components COPY runtime-requirements.txt runtime-requirements.txt @@ -636,7 +636,7 @@ def test_dockerfile_can_contain_custom_kfp_package(self): file_start = textwrap.dedent('''\ # Generated by KFP. - FROM python:3.9 + FROM public.ecr.aws/docker/library/python:3.12 WORKDIR /usr/local/src/kfp/components COPY runtime-requirements.txt runtime-requirements.txt @@ -665,7 +665,7 @@ def test_docker_file_is_created_one_trusted_host(self): textwrap.dedent('''\ # Generated by KFP. - FROM python:3.9 + FROM public.ecr.aws/docker/library/python:3.12 WORKDIR /usr/local/src/kfp/components COPY runtime-requirements.txt runtime-requirements.txt @@ -695,7 +695,7 @@ def test_docker_file_is_created_two_trusted_host(self): textwrap.dedent('''\ # Generated by KFP. - FROM python:3.9 + FROM public.ecr.aws/docker/library/python:3.12 WORKDIR /usr/local/src/kfp/components COPY runtime-requirements.txt runtime-requirements.txt diff --git a/sdk/python/kfp/compiler/compiler_test.py b/sdk/python/kfp/compiler/compiler_test.py index 98f521b0fbb..ee96b5df547 100644 --- a/sdk/python/kfp/compiler/compiler_test.py +++ b/sdk/python/kfp/compiler/compiler_test.py @@ -696,7 +696,7 @@ def test_use_task_final_status_in_non_exit_op_yaml(self): - {name: message, type: PipelineTaskFinalStatus} implementation: container: - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 command: - echo - {inputValue: message} @@ -912,14 +912,14 @@ def my_pipeline() -> NamedTuple('Outputs', [ def test_pipeline_with_parameterized_container_image(self): with tempfile.TemporaryDirectory() as tmpdir: - @dsl.component(base_image='docker.io/python:3.9.17') + @dsl.component(base_image='docker.io/public.ecr.aws/docker/library/python:3.12.17') def empty_component(): pass @dsl.pipeline() def simple_pipeline(img: str): task = empty_component() - # overwrite base_image="docker.io/python:3.9.17" + # overwrite base_image="docker.io/public.ecr.aws/docker/library/python:3.12.17" task.set_container_image(img) output_yaml = os.path.join(tmpdir, 'result.yaml') @@ -947,14 +947,14 @@ def simple_pipeline(img: str): def test_pipeline_with_constant_container_image(self): with tempfile.TemporaryDirectory() as tmpdir: - @dsl.component(base_image='docker.io/python:3.9.17') + @dsl.component(base_image='docker.io/public.ecr.aws/docker/library/python:3.12.17') def empty_component(): pass @dsl.pipeline() def simple_pipeline(): task = empty_component() - # overwrite base_image="docker.io/python:3.9.17" + # overwrite base_image="docker.io/public.ecr.aws/docker/library/python:3.12.17" task.set_container_image('constant-value') output_yaml = os.path.join(tmpdir, 'result.yaml') @@ -1315,7 +1315,7 @@ def test_compile_container_component_simple(self): def hello_world_container() -> dsl.ContainerSpec: """Hello world component.""" return dsl.ContainerSpec( - image='python:3.9', + image='public.ecr.aws/docker/library/python:3.12', command=['echo', 'hello world'], args=[], ) @@ -1338,7 +1338,7 @@ def test_compile_container_with_simple_io(self): @dsl.container_component def container_simple_io(text: str, output_path: dsl.OutputPath(str)): return dsl.ContainerSpec( - image='python:3.9', + image='public.ecr.aws/docker/library/python:3.12', command=['my_program', text], args=['--output_path', output_path]) @@ -2386,7 +2386,7 @@ def my_component(string: str, model: bool) -> str: def my_container_component(text: str, output_path: OutputPath(str)): """component description.""" return ContainerSpec( - image='python:3.9', + image='public.ecr.aws/docker/library/python:3.12', command=['my_program', text], args=['--output_path', output_path]) diff --git a/sdk/python/kfp/components/load_yaml_utilities_test.py b/sdk/python/kfp/components/load_yaml_utilities_test.py index 8130bb2e851..605e3a0a60f 100644 --- a/sdk/python/kfp/components/load_yaml_utilities_test.py +++ b/sdk/python/kfp/components/load_yaml_utilities_test.py @@ -121,7 +121,7 @@ def test_load_component_from_url(self): self.assertEqual(component.name, 'identity') self.assertEqual( component.component_spec.implementation.container.image, - 'python:3.9') + 'public.ecr.aws/docker/library/python:3.12') if __name__ == '__main__': diff --git a/sdk/python/kfp/dsl/component_decorator.py b/sdk/python/kfp/dsl/component_decorator.py index c2dee355b16..6285eb89fef 100644 --- a/sdk/python/kfp/dsl/component_decorator.py +++ b/sdk/python/kfp/dsl/component_decorator.py @@ -93,7 +93,7 @@ def my_function_one(input: str, output: Output[Model]): ... @dsl.component( - base_image='python:3.9', + base_image='public.ecr.aws/docker/library/python:3.12', output_component_file='my_function.yaml' ) def my_function_two(input: Input[Mode])): diff --git a/sdk/python/kfp/dsl/component_decorator_test.py b/sdk/python/kfp/dsl/component_decorator_test.py index 4b51de638f4..4c8db1f5e46 100644 --- a/sdk/python/kfp/dsl/component_decorator_test.py +++ b/sdk/python/kfp/dsl/component_decorator_test.py @@ -35,7 +35,7 @@ def hello_world(text: str) -> str: def test_as_decorator_syntactic_sugar_some_args(self): - @component(base_image='python:3.9') + @component(base_image='public.ecr.aws/docker/library/python:3.12') def hello_world(text: str) -> str: """Hello world component.""" return text @@ -52,7 +52,7 @@ def comp(text: str) -> str: def test_some_args(self): - @component(base_image='python:3.9') + @component(base_image='public.ecr.aws/docker/library/python:3.12') def comp(text: str) -> str: return text diff --git a/sdk/python/kfp/dsl/component_factory.py b/sdk/python/kfp/dsl/component_factory.py index 5e7a25c0edc..c5a48a08bd8 100644 --- a/sdk/python/kfp/dsl/component_factory.py +++ b/sdk/python/kfp/dsl/component_factory.py @@ -37,7 +37,7 @@ from kfp.dsl.types import type_annotations from kfp.dsl.types import type_utils -_DEFAULT_BASE_IMAGE = 'python:3.9' +_DEFAULT_BASE_IMAGE = 'public.ecr.aws/docker/library/python:3.12' SINGLE_OUTPUT_NAME = 'Output' @@ -570,7 +570,7 @@ def create_component_from_func( if base_image is None: base_image = _DEFAULT_BASE_IMAGE warnings.warn( - ("The default base_image used by the @dsl.component decorator will switch from 'python:3.9' to 'python:3.10' on Oct 1, 2025. To ensure your existing components work with versions of the KFP SDK released after that date, you should provide an explicit base_image argument and ensure your component works as intended on Python 3.10." + ("The default base_image used by the @dsl.component decorator will switch from 'public.ecr.aws/docker/library/python:3.12' to 'python:3.10' on Oct 1, 2025. To ensure your existing components work with versions of the KFP SDK released after that date, you should provide an explicit base_image argument and ensure your component works as intended on Python 3.10." ), FutureWarning, stacklevel=2, diff --git a/sdk/python/kfp/dsl/container_component_decorator_test.py b/sdk/python/kfp/dsl/container_component_decorator_test.py index b690fd5e375..90ce1edcbaf 100644 --- a/sdk/python/kfp/dsl/container_component_decorator_test.py +++ b/sdk/python/kfp/dsl/container_component_decorator_test.py @@ -47,7 +47,7 @@ def hello_world_io( text_output_path: dsl.OutputPath(str)) -> dsl.ContainerSpec: """Hello world component with input and output.""" return dsl.ContainerSpec( - image='python:3.9', + image='public.ecr.aws/docker/library/python:3.12', command=['echo'], args=['--text', text, '--output_path', text_output_path]) diff --git a/sdk/python/kfp/dsl/pipeline_task.py b/sdk/python/kfp/dsl/pipeline_task.py index 780f607a53b..b3b6d0851f9 100644 --- a/sdk/python/kfp/dsl/pipeline_task.py +++ b/sdk/python/kfp/dsl/pipeline_task.py @@ -646,7 +646,7 @@ def set_container_image( precedence over @component(base_image=...) Args: - name: The name of the image, e.g. "python:3.9-alpine". + name: The name of the image, e.g. "public.ecr.aws/docker/library/python:3.12-alpine". Returns: Self return to allow chained setting calls. diff --git a/sdk/python/kfp/dsl/placeholders.py b/sdk/python/kfp/dsl/placeholders.py index d8b0dfb9446..e649bfa41d9 100644 --- a/sdk/python/kfp/dsl/placeholders.py +++ b/sdk/python/kfp/dsl/placeholders.py @@ -179,7 +179,7 @@ class ConcatPlaceholder(Placeholder): def container_with_concat_placeholder(text1: str, text2: Output[Dataset], output_path: OutputPath(str)): return ContainerSpec( - image='python:3.9', + image='public.ecr.aws/docker/library/python:3.12', command=[ 'my_program', ConcatPlaceholder(['prefix-', text1, text2.uri]) @@ -227,7 +227,7 @@ def container_with_if_placeholder(output_path: OutputPath(str), dataset: Output[Dataset], optional_input: str = 'default'): return ContainerSpec( - image='python:3.9', + image='public.ecr.aws/docker/library/python:3.12', command=[ 'my_program', IfPresentPlaceholder( diff --git a/sdk/python/kfp/dsl/placeholders_test.py b/sdk/python/kfp/dsl/placeholders_test.py index 6c2fade8b6d..90c04a8aa0b 100644 --- a/sdk/python/kfp/dsl/placeholders_test.py +++ b/sdk/python/kfp/dsl/placeholders_test.py @@ -514,7 +514,7 @@ def container_with_placeholder_in_fstring( text1: str, ): return dsl.ContainerSpec( - image='python:3.9', + image='public.ecr.aws/docker/library/python:3.12', command=[ 'my_program', f'prefix-{text1}', @@ -540,7 +540,7 @@ def container_with_placeholder_in_fstring( text2: str, ): return dsl.ContainerSpec( - image='python:3.9', + image='public.ecr.aws/docker/library/python:3.12', command=[ 'my_program', f'another-prefix-{dsl.ConcatPlaceholder([text1, text2])}', @@ -557,7 +557,7 @@ def container_with_placeholder_in_fstring( text2: str, ): return dsl.ContainerSpec( - image='python:3.9', + image='public.ecr.aws/docker/library/python:3.12', command=[ 'echo', f"another-prefix-{dsl.IfPresentPlaceholder(input_name='text1', then=['val'])}", diff --git a/sdk/python/kfp/dsl/structures_test.py b/sdk/python/kfp/dsl/structures_test.py index 29ebc7f16a5..657ebac9f84 100644 --- a/sdk/python/kfp/dsl/structures_test.py +++ b/sdk/python/kfp/dsl/structures_test.py @@ -454,7 +454,7 @@ def test_env(self): def test_from_container_dict_no_placeholders(self): expected_container_spec = structures.ContainerSpecImplementation( - image='python:3.9', + image='public.ecr.aws/docker/library/python:3.12', command=['sh', '-c', 'dummy'], args=['--executor_input', '{{$}}', '--function_to_execute', 'func'], env={'ENV1': 'val1'}, @@ -465,7 +465,7 @@ def test_from_container_dict_no_placeholders(self): '--executor_input', '{{$}}', '--function_to_execute', 'func' ], 'command': ['sh', '-c', 'dummy'], - 'image': 'python:3.9', + 'image': 'public.ecr.aws/docker/library/python:3.12', 'env': { 'ENV1': 'val1' }, diff --git a/sdk/python/kfp/local/docker_task_handler_test.py b/sdk/python/kfp/local/docker_task_handler_test.py index 71f8be21361..94a72435ca3 100755 --- a/sdk/python/kfp/local/docker_task_handler_test.py +++ b/sdk/python/kfp/local/docker_task_handler_test.py @@ -190,7 +190,7 @@ def artifact_maker(x: str, a: Output[Artifact]): kwargs = run_mock.call_args[1] self.assertEqual( kwargs['image'], - 'python:3.9', + 'public.ecr.aws/docker/library/python:3.12', ) self.assertTrue( any('def artifact_maker' in c for c in kwargs['command'])) diff --git a/sdk/python/test_data/components/add_numbers.yaml b/sdk/python/test_data/components/add_numbers.yaml index 197646adec6..ffc7c97f677 100644 --- a/sdk/python/test_data/components/add_numbers.yaml +++ b/sdk/python/test_data/components/add_numbers.yaml @@ -47,7 +47,7 @@ deploymentSpec: ' - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef add_numbers(a: int, b: int) -> int:\n return a + b\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 pipelineInfo: name: add-numbers root: diff --git a/sdk/python/test_data/components/component_with_metadata_fields.yaml b/sdk/python/test_data/components/component_with_metadata_fields.yaml index 4547d34d722..e0e1bdb293c 100644 --- a/sdk/python/test_data/components/component_with_metadata_fields.yaml +++ b/sdk/python/test_data/components/component_with_metadata_fields.yaml @@ -72,7 +72,7 @@ deploymentSpec: \ as f:\n content_b = f.read()\n\n concatenated_string = content_a\ \ + content_b\n with open(out_dataset.path, 'w') as f:\n f.write(concatenated_string)\n\ \n return concatenated_string\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 pipelineInfo: name: dataset-joiner root: diff --git a/sdk/python/test_data/components/component_with_pip_install.yaml b/sdk/python/test_data/components/component_with_pip_install.yaml index e6b0fe11513..0ca4ca63da2 100644 --- a/sdk/python/test_data/components/component_with_pip_install.yaml +++ b/sdk/python/test_data/components/component_with_pip_install.yaml @@ -35,7 +35,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef component_with_pip_install():\n import yapf\n print(dir(yapf))\n\ \n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 pipelineInfo: name: component-with-pip-install root: diff --git a/sdk/python/test_data/components/component_with_pip_install_in_venv.yaml b/sdk/python/test_data/components/component_with_pip_install_in_venv.yaml index 135c9b469b2..860259fd120 100644 --- a/sdk/python/test_data/components/component_with_pip_install_in_venv.yaml +++ b/sdk/python/test_data/components/component_with_pip_install_in_venv.yaml @@ -38,7 +38,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef component_with_pip_install():\n import yapf\n\n print(dir(yapf))\n\ \n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 pipelineInfo: name: component-with-pip-install root: diff --git a/sdk/python/test_data/components/component_with_task_final_status.yaml b/sdk/python/test_data/components/component_with_task_final_status.yaml index 2b1fca875f4..87bcb56766f 100644 --- a/sdk/python/test_data/components/component_with_task_final_status.yaml +++ b/sdk/python/test_data/components/component_with_task_final_status.yaml @@ -40,7 +40,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef exit_comp(status: dsl.PipelineTaskFinalStatus):\n print(status)\n\ \n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 pipelineInfo: name: exit-comp root: diff --git a/sdk/python/test_data/components/concat_message.yaml b/sdk/python/test_data/components/concat_message.yaml index 381a8f22f73..0c92a2500e1 100644 --- a/sdk/python/test_data/components/concat_message.yaml +++ b/sdk/python/test_data/components/concat_message.yaml @@ -48,7 +48,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef concat_message(message1: str, message2: str) -> str:\n return\ \ message1 + message2\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 pipelineInfo: name: concat-message root: diff --git a/sdk/python/test_data/components/container_io.py b/sdk/python/test_data/components/container_io.py index 294de386d7d..66ece9c7bfe 100644 --- a/sdk/python/test_data/components/container_io.py +++ b/sdk/python/test_data/components/container_io.py @@ -19,7 +19,7 @@ @container_component def container_io(text: str, output_path: OutputPath(str)): return ContainerSpec( - image='python:3.9', + image='public.ecr.aws/docker/library/python:3.12', command=['my_program', text], args=['--output_path', output_path]) diff --git a/sdk/python/test_data/components/container_io.yaml b/sdk/python/test_data/components/container_io.yaml index 0b7d13d8ac5..ac63fc34e55 100644 --- a/sdk/python/test_data/components/container_io.yaml +++ b/sdk/python/test_data/components/container_io.yaml @@ -25,7 +25,7 @@ deploymentSpec: command: - my_program - '{{$.inputs.parameters[''text'']}}' - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 pipelineInfo: name: container-io root: diff --git a/sdk/python/test_data/components/container_no_input.py b/sdk/python/test_data/components/container_no_input.py index f4f1fb526d0..e01869edf5a 100644 --- a/sdk/python/test_data/components/container_no_input.py +++ b/sdk/python/test_data/components/container_no_input.py @@ -18,7 +18,7 @@ @container_component def container_no_input(): return ContainerSpec( - image='python:3.9', + image='public.ecr.aws/docker/library/python:3.12', command=['echo', 'hello world'], args=[], ) diff --git a/sdk/python/test_data/components/container_no_input.yaml b/sdk/python/test_data/components/container_no_input.yaml index 23c30e59a42..11988adb41c 100644 --- a/sdk/python/test_data/components/container_no_input.yaml +++ b/sdk/python/test_data/components/container_no_input.yaml @@ -10,7 +10,7 @@ deploymentSpec: command: - echo - hello world - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 pipelineInfo: name: container-no-input root: diff --git a/sdk/python/test_data/components/container_with_concat_placeholder.py b/sdk/python/test_data/components/container_with_concat_placeholder.py index e3da70a3851..4c02bf13f7f 100644 --- a/sdk/python/test_data/components/container_with_concat_placeholder.py +++ b/sdk/python/test_data/components/container_with_concat_placeholder.py @@ -23,7 +23,7 @@ def container_with_concat_placeholder(text1: str, text2: Output[Dataset], output_path: OutputPath(str)): return ContainerSpec( - image='python:3.9', + image='public.ecr.aws/docker/library/python:3.12', command=[ 'my_program', ConcatPlaceholder(['prefix-', text1, text2.uri]) diff --git a/sdk/python/test_data/components/container_with_concat_placeholder.yaml b/sdk/python/test_data/components/container_with_concat_placeholder.yaml index f0f02544f25..3a1ed816bd1 100644 --- a/sdk/python/test_data/components/container_with_concat_placeholder.yaml +++ b/sdk/python/test_data/components/container_with_concat_placeholder.yaml @@ -31,7 +31,7 @@ deploymentSpec: command: - my_program - '{"Concat": ["prefix-", "{{$.inputs.parameters[''text1'']}}", "{{$.outputs.artifacts[''text2''].uri}}"]}' - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 pipelineInfo: name: container-with-concat-placeholder root: diff --git a/sdk/python/test_data/components/container_with_if_placeholder.py b/sdk/python/test_data/components/container_with_if_placeholder.py index 7b21edc3e47..58d5b6d4ff3 100644 --- a/sdk/python/test_data/components/container_with_if_placeholder.py +++ b/sdk/python/test_data/components/container_with_if_placeholder.py @@ -24,7 +24,7 @@ def container_with_if_placeholder(output_path: OutputPath(str), dataset: Output[Dataset], optional_input: str = 'default'): return ContainerSpec( - image='python:3.9', + image='public.ecr.aws/docker/library/python:3.12', command=[ 'my_program', IfPresentPlaceholder( diff --git a/sdk/python/test_data/components/container_with_if_placeholder.yaml b/sdk/python/test_data/components/container_with_if_placeholder.yaml index 65aec216d69..a6eaaba80f2 100644 --- a/sdk/python/test_data/components/container_with_if_placeholder.yaml +++ b/sdk/python/test_data/components/container_with_if_placeholder.yaml @@ -37,7 +37,7 @@ deploymentSpec: - --dataset - '{"IfPresent": {"InputName": "optional_input", "Then": ["{{$.outputs.artifacts[''dataset''].uri}}"], "Else": ["bye"]}}' - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 pipelineInfo: name: container-with-if-placeholder root: diff --git a/sdk/python/test_data/components/container_with_placeholder_in_fstring.py b/sdk/python/test_data/components/container_with_placeholder_in_fstring.py index e5bc2f5cb0b..5a5e76effd1 100644 --- a/sdk/python/test_data/components/container_with_placeholder_in_fstring.py +++ b/sdk/python/test_data/components/container_with_placeholder_in_fstring.py @@ -24,7 +24,7 @@ def container_with_placeholder_in_fstring( text1: str = 'text!', ): return ContainerSpec( - image='python:3.9', + image='public.ecr.aws/docker/library/python:3.12', command=[ 'my_program', f'prefix-{text1}', diff --git a/sdk/python/test_data/components/container_with_placeholder_in_fstring.yaml b/sdk/python/test_data/components/container_with_placeholder_in_fstring.yaml index 03949eafb50..d22838c9bf6 100644 --- a/sdk/python/test_data/components/container_with_placeholder_in_fstring.yaml +++ b/sdk/python/test_data/components/container_with_placeholder_in_fstring.yaml @@ -27,7 +27,7 @@ deploymentSpec: - my_program - prefix-{{$.inputs.parameters['text1']}} - '{{$.outputs.artifacts[''output_artifact''].uri}}/0' - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 pipelineInfo: name: container-with-placeholder-in-fstring root: diff --git a/sdk/python/test_data/components/containerized_python_component.py b/sdk/python/test_data/components/containerized_python_component.py index e32d4ad454c..099276cbae6 100644 --- a/sdk/python/test_data/components/containerized_python_component.py +++ b/sdk/python/test_data/components/containerized_python_component.py @@ -14,7 +14,7 @@ from kfp import dsl -@dsl.component(base_image='python:3.9', target_image='kfp-image') +@dsl.component(base_image='public.ecr.aws/docker/library/python:3.12', target_image='kfp-image') def concat_message(message1: str, message2: str) -> str: return message1 + message2 diff --git a/sdk/python/test_data/components/dict_input.yaml b/sdk/python/test_data/components/dict_input.yaml index 4bcf5e61d53..4c27afdaa5c 100644 --- a/sdk/python/test_data/components/dict_input.yaml +++ b/sdk/python/test_data/components/dict_input.yaml @@ -38,7 +38,7 @@ deploymentSpec: ' - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef dict_input(struct: Dict):\n print(struct)\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 pipelineInfo: name: dict-input root: diff --git a/sdk/python/test_data/components/identity.yaml b/sdk/python/test_data/components/identity.yaml index 377911ccbcd..402f8ec5239 100644 --- a/sdk/python/test_data/components/identity.yaml +++ b/sdk/python/test_data/components/identity.yaml @@ -44,7 +44,7 @@ deploymentSpec: ' - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef identity(value: str) -> str:\n return value\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 pipelineInfo: name: identity root: diff --git a/sdk/python/test_data/components/input_artifact.yaml b/sdk/python/test_data/components/input_artifact.yaml index 71c983fe36d..c5c42f82fa4 100644 --- a/sdk/python/test_data/components/input_artifact.yaml +++ b/sdk/python/test_data/components/input_artifact.yaml @@ -41,7 +41,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef input_artifact(data: Input[Dataset]):\n print(data.name)\n\ \ print(data.uri)\n print(data.metadata)\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 pipelineInfo: name: input-artifact root: diff --git a/sdk/python/test_data/components/nested_return.yaml b/sdk/python/test_data/components/nested_return.yaml index f8d2fd169cc..b6ca3e7b608 100644 --- a/sdk/python/test_data/components/nested_return.yaml +++ b/sdk/python/test_data/components/nested_return.yaml @@ -39,7 +39,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef nested_return() -> List[Dict[str, str]]:\n return [{'A_a':\ \ '1', 'B_b': '2'}, {'A_a': '10', 'B_b': '20'}]\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 pipelineInfo: name: nested-return root: diff --git a/sdk/python/test_data/components/output_metrics.yaml b/sdk/python/test_data/components/output_metrics.yaml index dc7461949f6..c427196d912 100644 --- a/sdk/python/test_data/components/output_metrics.yaml +++ b/sdk/python/test_data/components/output_metrics.yaml @@ -44,7 +44,7 @@ deploymentSpec: \ that outputs metrics with a random accuracy.\"\"\"\n import random\n\ \ result = random.randint(0, 100)\n metrics.log_metric('accuracy',\ \ result)\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 pipelineInfo: name: output-metrics root: diff --git a/sdk/python/test_data/components/preprocess.yaml b/sdk/python/test_data/components/preprocess.yaml index 40315487468..054babe53b2 100644 --- a/sdk/python/test_data/components/preprocess.yaml +++ b/sdk/python/test_data/components/preprocess.yaml @@ -97,7 +97,7 @@ deploymentSpec: \ 'w') as f:\n f.write(json.dumps(input_dict_parameter))\n\n with\ \ open(output_list_parameter_path, 'w') as f:\n f.write(json.dumps(input_list_parameter))\n\ \n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 pipelineInfo: name: preprocess root: diff --git a/sdk/python/test_data/pipelines/component_with_optional_inputs.yaml b/sdk/python/test_data/pipelines/component_with_optional_inputs.yaml index b798786a856..a915721464f 100644 --- a/sdk/python/test_data/pipelines/component_with_optional_inputs.yaml +++ b/sdk/python/test_data/pipelines/component_with_optional_inputs.yaml @@ -48,7 +48,7 @@ deploymentSpec: \ {input1}, type: {type(input1)}')\n print(f'input2: {input2}, type:\ \ {type(input2)}')\n print(f'input3: {input3}, type: {type(input3)}')\n\ \n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 pipelineInfo: name: v2-component-optional-input root: diff --git a/sdk/python/test_data/pipelines/component_with_pip_index_urls.yaml b/sdk/python/test_data/pipelines/component_with_pip_index_urls.yaml index 0358182f92b..f564c22d481 100644 --- a/sdk/python/test_data/pipelines/component_with_pip_index_urls.yaml +++ b/sdk/python/test_data/pipelines/component_with_pip_index_urls.yaml @@ -34,7 +34,7 @@ deploymentSpec: ' - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef component_op():\n import yapf\n print(dir(yapf))\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 pipelineInfo: name: v2-component-pip-index-urls root: diff --git a/sdk/python/test_data/pipelines/components_with_optional_artifacts.yaml b/sdk/python/test_data/pipelines/components_with_optional_artifacts.yaml index 457902a8a51..316e28dda20 100644 --- a/sdk/python/test_data/pipelines/components_with_optional_artifacts.yaml +++ b/sdk/python/test_data/pipelines/components_with_optional_artifacts.yaml @@ -144,7 +144,7 @@ deploymentSpec: \ None):\n if artifact is not None:\n print(artifact.name)\n \ \ print(artifact.uri)\n print(artifact.metadata)\n else:\n\ \ print('No artifact provided!')\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-python-artifact-printer-2: container: args: @@ -175,7 +175,7 @@ deploymentSpec: \ None):\n if artifact is not None:\n print(artifact.name)\n \ \ print(artifact.uri)\n print(artifact.metadata)\n else:\n\ \ print('No artifact provided!')\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 pipelineInfo: name: optional-artifact-pipeline root: diff --git a/sdk/python/test_data/pipelines/container_component_with_no_inputs.py b/sdk/python/test_data/pipelines/container_component_with_no_inputs.py index da0c8e78735..9aa0e16f9bd 100644 --- a/sdk/python/test_data/pipelines/container_component_with_no_inputs.py +++ b/sdk/python/test_data/pipelines/container_component_with_no_inputs.py @@ -19,7 +19,7 @@ @dsl.container_component def hello_world_container(): return dsl.ContainerSpec( - image='python:3.9', + image='public.ecr.aws/docker/library/python:3.12', command=['echo', 'hello world'], args=[], ) diff --git a/sdk/python/test_data/pipelines/container_component_with_no_inputs.yaml b/sdk/python/test_data/pipelines/container_component_with_no_inputs.yaml index fba2c46ee03..e27bcdf0d0b 100644 --- a/sdk/python/test_data/pipelines/container_component_with_no_inputs.yaml +++ b/sdk/python/test_data/pipelines/container_component_with_no_inputs.yaml @@ -10,7 +10,7 @@ deploymentSpec: command: - echo - hello world - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 pipelineInfo: name: v2-container-component-no-input root: diff --git a/sdk/python/test_data/pipelines/cross_loop_after_topology.yaml b/sdk/python/test_data/pipelines/cross_loop_after_topology.yaml index 87d19b30d62..489a0790779 100644 --- a/sdk/python/test_data/pipelines/cross_loop_after_topology.yaml +++ b/sdk/python/test_data/pipelines/cross_loop_after_topology.yaml @@ -233,7 +233,7 @@ deploymentSpec: ' - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_op(message: str):\n print(message)\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-op-2: container: args: @@ -261,7 +261,7 @@ deploymentSpec: ' - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_op(message: str):\n print(message)\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-op-3: container: args: @@ -289,7 +289,7 @@ deploymentSpec: ' - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_op(message: str):\n print(message)\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-op-4: container: args: @@ -317,7 +317,7 @@ deploymentSpec: ' - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_op(message: str):\n print(message)\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-op-5: container: args: @@ -345,7 +345,7 @@ deploymentSpec: ' - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_op(message: str):\n print(message)\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-op-6: container: args: @@ -373,7 +373,7 @@ deploymentSpec: ' - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_op(message: str):\n print(message)\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-op-7: container: args: @@ -401,7 +401,7 @@ deploymentSpec: ' - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_op(message: str):\n print(message)\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-op-8: container: args: @@ -429,7 +429,7 @@ deploymentSpec: ' - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_op(message: str):\n print(message)\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 pipelineInfo: name: my-pipeline root: diff --git a/sdk/python/test_data/pipelines/if_elif_else_complex.yaml b/sdk/python/test_data/pipelines/if_elif_else_complex.yaml index df2ff1c76ca..6d608880142 100644 --- a/sdk/python/test_data/pipelines/if_elif_else_complex.yaml +++ b/sdk/python/test_data/pipelines/if_elif_else_complex.yaml @@ -712,7 +712,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef int_0_to_9999() -> int:\n import random\n return random.randint(0,\ \ 9999)\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-is-even-or-odd: container: args: @@ -741,7 +741,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef is_even_or_odd(num: int) -> str:\n return 'odd' if num % 2\ \ else 'even'\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-is-even-or-odd-2: container: args: @@ -770,7 +770,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef is_even_or_odd(num: int) -> str:\n return 'odd' if num % 2\ \ else 'even'\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-and-return: container: args: @@ -799,7 +799,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_and_return(text: str) -> str:\n print(text)\n return\ \ text\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-and-return-2: container: args: @@ -828,7 +828,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_and_return(text: str) -> str:\n print(text)\n return\ \ text\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-and-return-3: container: args: @@ -857,7 +857,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_and_return(text: str) -> str:\n print(text)\n return\ \ text\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-and-return-4: container: args: @@ -886,7 +886,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_and_return(text: str) -> str:\n print(text)\n return\ \ text\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-and-return-5: container: args: @@ -915,7 +915,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_and_return(text: str) -> str:\n print(text)\n return\ \ text\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-and-return-6: container: args: @@ -944,7 +944,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_and_return(text: str) -> str:\n print(text)\n return\ \ text\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-and-return-7: container: args: @@ -973,7 +973,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_and_return(text: str) -> str:\n print(text)\n return\ \ text\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-and-return-8: container: args: @@ -1002,7 +1002,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_and_return(text: str) -> str:\n print(text)\n return\ \ text\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-and-return-9: container: args: @@ -1031,7 +1031,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_and_return(text: str) -> str:\n print(text)\n return\ \ text\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-ints: container: args: @@ -1059,7 +1059,7 @@ deploymentSpec: ' - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_ints(ints: List[int]):\n print(ints)\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 pipelineInfo: name: lucky-number-pipeline root: diff --git a/sdk/python/test_data/pipelines/if_elif_else_with_oneof_parameters.yaml b/sdk/python/test_data/pipelines/if_elif_else_with_oneof_parameters.yaml index f6414225eda..61c44a3dddd 100644 --- a/sdk/python/test_data/pipelines/if_elif_else_with_oneof_parameters.yaml +++ b/sdk/python/test_data/pipelines/if_elif_else_with_oneof_parameters.yaml @@ -261,7 +261,7 @@ deploymentSpec: \ random.randint(0, 2)\n\n if val == 0:\n return 'heads'\n \ \ elif val == 1:\n return 'tails'\n else:\n return 'draw'\n\ \n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-and-return: container: args: @@ -290,7 +290,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_and_return(text: str) -> str:\n print(text)\n return\ \ text\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-and-return-2: container: args: @@ -319,7 +319,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_and_return(text: str) -> str:\n print(text)\n return\ \ text\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-and-return-3: container: args: @@ -348,7 +348,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_and_return(text: str) -> str:\n print(text)\n return\ \ text\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-special-print-and-return: container: args: @@ -378,7 +378,7 @@ deploymentSpec: \ *\n\ndef special_print_and_return(text: str, output_key: dsl.OutputPath(str)):\n\ \ print('Got the special state:', text)\n with open(output_key, 'w')\ \ as f:\n f.write(text)\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 pipelineInfo: name: outer-pipeline root: diff --git a/sdk/python/test_data/pipelines/if_else_with_oneof_artifacts.yaml b/sdk/python/test_data/pipelines/if_else_with_oneof_artifacts.yaml index 7ce1208b87c..4d728d0114a 100644 --- a/sdk/python/test_data/pipelines/if_else_with_oneof_artifacts.yaml +++ b/sdk/python/test_data/pipelines/if_else_with_oneof_artifacts.yaml @@ -232,7 +232,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef flip_coin() -> str:\n import random\n return 'heads' if\ \ random.randint(0, 1) == 0 else 'tails'\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-param-to-artifact: container: args: @@ -261,7 +261,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef param_to_artifact(val: str, a: Output[Artifact]):\n with open(a.path,\ \ 'w') as f:\n f.write(val)\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-param-to-artifact-2: container: args: @@ -290,7 +290,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef param_to_artifact(val: str, a: Output[Artifact]):\n with open(a.path,\ \ 'w') as f:\n f.write(val)\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-artifact: container: args: @@ -319,7 +319,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_artifact(a: Input[Artifact]):\n with open(a.path) as\ \ f:\n print(f.read())\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-artifact-2: container: args: @@ -348,7 +348,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_artifact(a: Input[Artifact]):\n with open(a.path) as\ \ f:\n print(f.read())\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 pipelineInfo: name: outer-pipeline root: diff --git a/sdk/python/test_data/pipelines/if_else_with_oneof_parameters.yaml b/sdk/python/test_data/pipelines/if_else_with_oneof_parameters.yaml index aa2c7cf5a9f..e9f3eec56ee 100644 --- a/sdk/python/test_data/pipelines/if_else_with_oneof_parameters.yaml +++ b/sdk/python/test_data/pipelines/if_else_with_oneof_parameters.yaml @@ -171,7 +171,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef flip_coin() -> str:\n import random\n return 'heads' if\ \ random.randint(0, 1) == 0 else 'tails'\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-and-return: container: args: @@ -200,7 +200,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_and_return(text: str) -> str:\n print(text)\n return\ \ text\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-and-return-2: container: args: @@ -229,7 +229,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_and_return(text: str) -> str:\n print(text)\n return\ \ text\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-and-return-3: container: args: @@ -258,7 +258,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_and_return(text: str) -> str:\n print(text)\n return\ \ text\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 pipelineInfo: name: flip-coin-pipeline root: diff --git a/sdk/python/test_data/pipelines/lightweight_python_functions_pipeline.yaml b/sdk/python/test_data/pipelines/lightweight_python_functions_pipeline.yaml index 52d0037d0ea..597717873f9 100644 --- a/sdk/python/test_data/pipelines/lightweight_python_functions_pipeline.yaml +++ b/sdk/python/test_data/pipelines/lightweight_python_functions_pipeline.yaml @@ -119,7 +119,7 @@ deploymentSpec: \ 'w') as f:\n f.write(json.dumps(input_dict_parameter))\n\n with\ \ open(output_list_parameter_path, 'w') as f:\n f.write(json.dumps(input_list_parameter))\n\ \n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-train: container: args: @@ -170,7 +170,7 @@ deploymentSpec: \ {i}\\n{line}\\n=====\\n')\n\n # model is an instance of Model artifact,\ \ which has a .metadata dictionary\n # to store arbitrary metadata for\ \ the output artifact.\n model.metadata['accuracy'] = 0.9\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 pipelineInfo: name: my-test-pipeline-beta root: diff --git a/sdk/python/test_data/pipelines/lightweight_python_functions_with_outputs.yaml b/sdk/python/test_data/pipelines/lightweight_python_functions_with_outputs.yaml index 63dc4138f21..e476751e3da 100644 --- a/sdk/python/test_data/pipelines/lightweight_python_functions_with_outputs.yaml +++ b/sdk/python/test_data/pipelines/lightweight_python_functions_with_outputs.yaml @@ -95,7 +95,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef add_numbers(first: int, second: int) -> int:\n return first\ \ + second\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-concat-message: container: args: @@ -124,7 +124,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef concat_message(first: str, second: str) -> str:\n return first\ \ + second\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-output-artifact: container: args: @@ -153,7 +153,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef output_artifact(number: int, message: str) -> Dataset:\n result\ \ = [message for _ in range(number)]\n return '\\n'.join(result)\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-output-named-tuple: container: args: @@ -189,7 +189,7 @@ deploymentSpec: \ = 'Model contents: ' + artifact_contents\n\n from collections import\ \ namedtuple\n output = namedtuple('Outputs', ['scalar', 'metrics', 'model'])\n\ \ return output(scalar, metrics, model)\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 pipelineInfo: name: functions-with-outputs root: diff --git a/sdk/python/test_data/pipelines/parallelfor_fan_in/artifacts_complex.yaml b/sdk/python/test_data/pipelines/parallelfor_fan_in/artifacts_complex.yaml index 6994b0a68dc..b0d7a671150 100644 --- a/sdk/python/test_data/pipelines/parallelfor_fan_in/artifacts_complex.yaml +++ b/sdk/python/test_data/pipelines/parallelfor_fan_in/artifacts_complex.yaml @@ -304,7 +304,7 @@ deploymentSpec: \ with open(dataset.path) as f:\n nums.append(int(f.read()))\n\ \ with open(out_dataset.path, 'w') as f:\n f.write(str(sum(nums)))\n\ \n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-add-2: container: args: @@ -336,7 +336,7 @@ deploymentSpec: \ with open(dataset.path) as f:\n nums.append(int(f.read()))\n\ \ with open(out_dataset.path, 'w') as f:\n f.write(str(sum(nums)))\n\ \n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-add-two-ints: container: args: @@ -368,7 +368,7 @@ deploymentSpec: \ as f:\n in_dataset1 = int(f.read())\n\n with open(in_dataset2.path)\ \ as f:\n in_dataset2 = int(f.read())\n\n with open(out_dataset.path,\ \ 'w') as f:\n f.write(str(in_dataset1 + in_dataset2))\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-double: container: args: @@ -398,7 +398,7 @@ deploymentSpec: \ *\n\ndef double(\n num: int,\n out_dataset: Output[Dataset],\n):\n\ \ with open(out_dataset.path, 'w') as f:\n f.write(str(2 * num))\n\ \n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-double-2: container: args: @@ -428,7 +428,7 @@ deploymentSpec: \ *\n\ndef double(\n num: int,\n out_dataset: Output[Dataset],\n):\n\ \ with open(out_dataset.path, 'w') as f:\n f.write(str(2 * num))\n\ \n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 pipelineInfo: name: math-pipeline root: diff --git a/sdk/python/test_data/pipelines/parallelfor_fan_in/artifacts_simple.py b/sdk/python/test_data/pipelines/parallelfor_fan_in/artifacts_simple.py index 04910311de0..8f5946ae5ce 100644 --- a/sdk/python/test_data/pipelines/parallelfor_fan_in/artifacts_simple.py +++ b/sdk/python/test_data/pipelines/parallelfor_fan_in/artifacts_simple.py @@ -28,7 +28,7 @@ def add(in_datasets: Input[List[Dataset]], out_dataset: Output[Dataset]): def add_container(in_datasets: Input[List[Dataset]], out_dataset: Output[Dataset]): return dsl.ContainerSpec( - image='python:3.9', + image='public.ecr.aws/docker/library/python:3.12', command=['python', '-c'], args=[ textwrap.dedent(""" diff --git a/sdk/python/test_data/pipelines/parallelfor_fan_in/artifacts_simple.yaml b/sdk/python/test_data/pipelines/parallelfor_fan_in/artifacts_simple.yaml index e923d788ec2..6ea538886d0 100644 --- a/sdk/python/test_data/pipelines/parallelfor_fan_in/artifacts_simple.yaml +++ b/sdk/python/test_data/pipelines/parallelfor_fan_in/artifacts_simple.yaml @@ -108,7 +108,7 @@ deploymentSpec: \ nums = []\n for dataset in in_datasets:\n with open(dataset.path)\ \ as f:\n nums.append(int(f.read()))\n with open(out_dataset.path,\ \ 'w') as f:\n f.write(str(sum(nums)))\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-add-container: container: args: @@ -125,7 +125,7 @@ deploymentSpec: command: - python - -c - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-double: container: args: @@ -154,7 +154,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef double(num: int, out_dataset: Output[Dataset]):\n with open(out_dataset.path,\ \ 'w') as f:\n f.write(str(2 * num))\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 pipelineInfo: name: math-pipeline root: diff --git a/sdk/python/test_data/pipelines/parallelfor_fan_in/conditional_producer_and_consumers.yaml b/sdk/python/test_data/pipelines/parallelfor_fan_in/conditional_producer_and_consumers.yaml index ac95760cb19..a7fc34ceebd 100644 --- a/sdk/python/test_data/pipelines/parallelfor_fan_in/conditional_producer_and_consumers.yaml +++ b/sdk/python/test_data/pipelines/parallelfor_fan_in/conditional_producer_and_consumers.yaml @@ -147,7 +147,7 @@ deploymentSpec: ' - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef add(nums: List[int]) -> int:\n return sum(nums)\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-double: container: args: @@ -175,7 +175,7 @@ deploymentSpec: ' - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef double(num: int) -> int:\n return 2 * num\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 pipelineInfo: name: math-pipeline root: diff --git a/sdk/python/test_data/pipelines/parallelfor_fan_in/nested_with_parameters.yaml b/sdk/python/test_data/pipelines/parallelfor_fan_in/nested_with_parameters.yaml index 221ee317b5d..3b9ae6a0f5c 100644 --- a/sdk/python/test_data/pipelines/parallelfor_fan_in/nested_with_parameters.yaml +++ b/sdk/python/test_data/pipelines/parallelfor_fan_in/nested_with_parameters.yaml @@ -166,7 +166,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef add(nums: List[List[int]]) -> int:\n import itertools\n \ \ return sum(itertools.chain(*nums))\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-add-two-nums: container: args: @@ -194,7 +194,7 @@ deploymentSpec: ' - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef add_two_nums(x: int, y: int) -> int:\n return x + y\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-double: container: args: @@ -222,7 +222,7 @@ deploymentSpec: ' - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef double(num: int) -> int:\n return 2 * num\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-double-2: container: args: @@ -250,7 +250,7 @@ deploymentSpec: ' - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef double(num: int) -> int:\n return 2 * num\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 pipelineInfo: name: math-pipeline root: diff --git a/sdk/python/test_data/pipelines/parallelfor_fan_in/parameters_complex.yaml b/sdk/python/test_data/pipelines/parallelfor_fan_in/parameters_complex.yaml index 1fb44a7fd7e..9a56e5c7c85 100644 --- a/sdk/python/test_data/pipelines/parallelfor_fan_in/parameters_complex.yaml +++ b/sdk/python/test_data/pipelines/parallelfor_fan_in/parameters_complex.yaml @@ -240,7 +240,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef add_two_numbers(x: List[int], y: List[int]) -> int:\n return\ \ sum(x) + sum(y)\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-double: container: args: @@ -268,7 +268,7 @@ deploymentSpec: ' - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef double(num: int) -> int:\n return 2 * num\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-double-2: container: args: @@ -296,7 +296,7 @@ deploymentSpec: ' - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef double(num: int) -> int:\n return 2 * num\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-nested-add: container: args: @@ -325,7 +325,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef nested_add(nums: List[List[int]]) -> int:\n import itertools\n\ \ return sum(itertools.chain(*nums))\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-nested-add-2: container: args: @@ -354,7 +354,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef nested_add(nums: List[List[int]]) -> int:\n import itertools\n\ \ return sum(itertools.chain(*nums))\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-simple-add: container: args: @@ -382,7 +382,7 @@ deploymentSpec: ' - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef simple_add(nums: List[int]) -> int:\n return sum(nums)\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-simple-add-2: container: args: @@ -410,7 +410,7 @@ deploymentSpec: ' - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef simple_add(nums: List[int]) -> int:\n return sum(nums)\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 pipelineInfo: name: math-pipeline root: diff --git a/sdk/python/test_data/pipelines/parallelfor_fan_in/parameters_simple.yaml b/sdk/python/test_data/pipelines/parallelfor_fan_in/parameters_simple.yaml index 47fb058803f..0df4c83b840 100644 --- a/sdk/python/test_data/pipelines/parallelfor_fan_in/parameters_simple.yaml +++ b/sdk/python/test_data/pipelines/parallelfor_fan_in/parameters_simple.yaml @@ -90,7 +90,7 @@ deploymentSpec: ' - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef add(nums: List[int]) -> int:\n return sum(nums)\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-add-container: container: args: @@ -128,7 +128,7 @@ deploymentSpec: ' - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef double(num: int) -> int:\n return 2 * num\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 pipelineInfo: name: math-pipeline root: diff --git a/sdk/python/test_data/pipelines/parallelfor_fan_in/pipeline_producer_consumer.yaml b/sdk/python/test_data/pipelines/parallelfor_fan_in/pipeline_producer_consumer.yaml index 015d9066115..3d9e5a691df 100644 --- a/sdk/python/test_data/pipelines/parallelfor_fan_in/pipeline_producer_consumer.yaml +++ b/sdk/python/test_data/pipelines/parallelfor_fan_in/pipeline_producer_consumer.yaml @@ -222,7 +222,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef add(nums: List[List[int]]) -> int:\n import itertools\n \ \ return sum(itertools.chain(*nums))\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-double: container: args: @@ -250,7 +250,7 @@ deploymentSpec: ' - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef double(num: int) -> int:\n return 2 * num\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-echo-and-return: container: args: @@ -279,7 +279,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef echo_and_return(string: str) -> str:\n print(string)\n \ \ return string\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-join-and-print: container: args: @@ -308,7 +308,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef join_and_print(strings: List[str]):\n print(''.join(strings))\n\ \n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 pipelineInfo: name: math-pipeline root: diff --git a/sdk/python/test_data/pipelines/pipeline_as_exit_task.yaml b/sdk/python/test_data/pipelines/pipeline_as_exit_task.yaml index 13bfb5acd14..8891966d2f7 100644 --- a/sdk/python/test_data/pipelines/pipeline_as_exit_task.yaml +++ b/sdk/python/test_data/pipelines/pipeline_as_exit_task.yaml @@ -145,7 +145,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef fail_op(message: str):\n \"\"\"Fails.\"\"\"\n import sys\n\ \ print(message)\n sys.exit(1)\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-get-run-state: container: args: @@ -174,7 +174,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef get_run_state(status: dict) -> str:\n print('Pipeline status:\ \ ', status)\n return status['state']\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-op: container: args: @@ -203,7 +203,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_op(message: str):\n \"\"\"Prints a message.\"\"\"\n\ \ print(message)\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-op-2: container: args: @@ -232,7 +232,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_op(message: str):\n \"\"\"Prints a message.\"\"\"\n\ \ print(message)\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 pipelineInfo: name: pipeline-with-task-final-status-conditional root: diff --git a/sdk/python/test_data/pipelines/pipeline_in_pipeline.yaml b/sdk/python/test_data/pipelines/pipeline_in_pipeline.yaml index 45efa979a99..489a97150dd 100644 --- a/sdk/python/test_data/pipelines/pipeline_in_pipeline.yaml +++ b/sdk/python/test_data/pipelines/pipeline_in_pipeline.yaml @@ -90,7 +90,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_op1(msg: str) -> str:\n print(msg)\n return msg\n\ \n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-op1-2: container: args: @@ -119,7 +119,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_op1(msg: str) -> str:\n print(msg)\n return msg\n\ \n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-op2: container: command: diff --git a/sdk/python/test_data/pipelines/pipeline_in_pipeline_complex.yaml b/sdk/python/test_data/pipelines/pipeline_in_pipeline_complex.yaml index 268581c358a..ea17de61da6 100644 --- a/sdk/python/test_data/pipelines/pipeline_in_pipeline_complex.yaml +++ b/sdk/python/test_data/pipelines/pipeline_in_pipeline_complex.yaml @@ -177,7 +177,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_op1(msg: str) -> str:\n print(msg)\n return msg\n\ \n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-op1-2: container: args: @@ -206,7 +206,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_op1(msg: str) -> str:\n print(msg)\n return msg\n\ \n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-op2: container: command: diff --git a/sdk/python/test_data/pipelines/pipeline_in_pipeline_loaded_from_yaml.yaml b/sdk/python/test_data/pipelines/pipeline_in_pipeline_loaded_from_yaml.yaml index 97977ea3d99..62ed2de12d0 100644 --- a/sdk/python/test_data/pipelines/pipeline_in_pipeline_loaded_from_yaml.yaml +++ b/sdk/python/test_data/pipelines/pipeline_in_pipeline_loaded_from_yaml.yaml @@ -168,7 +168,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_op1(msg: str) -> str:\n print(msg)\n return msg\n\ \n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-op1-2: container: args: @@ -197,7 +197,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_op1(msg: str) -> str:\n print(msg)\n return msg\n\ \n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-op1-3: container: args: @@ -226,7 +226,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_op1(data: Input[Artifact]):\n with open(data.path, 'r')\ \ as f:\n print(f.read())\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-op2: container: args: diff --git a/sdk/python/test_data/pipelines/pipeline_with_condition.yaml b/sdk/python/test_data/pipelines/pipeline_with_condition.yaml index eb350488970..b253d4e6f2d 100644 --- a/sdk/python/test_data/pipelines/pipeline_with_condition.yaml +++ b/sdk/python/test_data/pipelines/pipeline_with_condition.yaml @@ -105,7 +105,7 @@ deploymentSpec: \ *\n\ndef flip_coin_op() -> str:\n \"\"\"Flip a coin and output heads\ \ or tails randomly.\"\"\"\n import random\n result = 'heads' if random.randint(0,\ \ 1) == 0 else 'tails'\n return result\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-flip-coin-op-2: container: args: @@ -135,7 +135,7 @@ deploymentSpec: \ *\n\ndef flip_coin_op() -> str:\n \"\"\"Flip a coin and output heads\ \ or tails randomly.\"\"\"\n import random\n result = 'heads' if random.randint(0,\ \ 1) == 0 else 'tails'\n return result\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-op: container: args: @@ -164,7 +164,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_op(msg: str):\n \"\"\"Print a message.\"\"\"\n print(msg)\n\ \n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-op-2: container: args: @@ -193,7 +193,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_op(msg: str):\n \"\"\"Print a message.\"\"\"\n print(msg)\n\ \n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-op-3: container: args: @@ -222,7 +222,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_op(msg: str):\n \"\"\"Print a message.\"\"\"\n print(msg)\n\ \n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 pipelineInfo: name: single-condition-pipeline root: diff --git a/sdk/python/test_data/pipelines/pipeline_with_condition_dynamic_task_output_custom_training_job.yaml b/sdk/python/test_data/pipelines/pipeline_with_condition_dynamic_task_output_custom_training_job.yaml index 12754cacc2f..562170419ae 100644 --- a/sdk/python/test_data/pipelines/pipeline_with_condition_dynamic_task_output_custom_training_job.yaml +++ b/sdk/python/test_data/pipelines/pipeline_with_condition_dynamic_task_output_custom_training_job.yaml @@ -222,7 +222,7 @@ deploymentSpec: ' - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef accelerator_count() -> int:\n return 1\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-accelerator-type: container: args: @@ -250,7 +250,7 @@ deploymentSpec: ' - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef accelerator_type() -> str:\n return 'NVIDIA_TESLA_P4'\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-custom-training-job: container: args: @@ -307,7 +307,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef flip_biased_coin_op() -> str:\n \"\"\"Flip a coin and output\ \ heads.\"\"\"\n return 'heads'\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-machine-type: container: args: @@ -335,7 +335,7 @@ deploymentSpec: ' - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef machine_type() -> str:\n return 'n1-standard-4'\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 pipelineInfo: name: pipeline root: diff --git a/sdk/python/test_data/pipelines/pipeline_with_dynamic_importer_metadata.yaml b/sdk/python/test_data/pipelines/pipeline_with_dynamic_importer_metadata.yaml index 3e788001c0e..67ebc830777 100644 --- a/sdk/python/test_data/pipelines/pipeline_with_dynamic_importer_metadata.yaml +++ b/sdk/python/test_data/pipelines/pipeline_with_dynamic_importer_metadata.yaml @@ -109,7 +109,7 @@ deploymentSpec: ' - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef make_name(name: str) -> str:\n return name\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 pipelineInfo: name: pipeline-with-importer root: diff --git a/sdk/python/test_data/pipelines/pipeline_with_dynamic_task_output_custom_training_job.yaml b/sdk/python/test_data/pipelines/pipeline_with_dynamic_task_output_custom_training_job.yaml index 3c688b6bab3..5344eb610fa 100644 --- a/sdk/python/test_data/pipelines/pipeline_with_dynamic_task_output_custom_training_job.yaml +++ b/sdk/python/test_data/pipelines/pipeline_with_dynamic_task_output_custom_training_job.yaml @@ -159,7 +159,7 @@ deploymentSpec: ' - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef accelerator_count() -> int:\n return 1\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-accelerator-type: container: args: @@ -187,7 +187,7 @@ deploymentSpec: ' - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef accelerator_type() -> str:\n return 'NVIDIA_TESLA_P4'\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-custom-training-job: container: args: @@ -243,7 +243,7 @@ deploymentSpec: ' - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef machine_type() -> str:\n return 'n1-standard-4'\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 pipelineInfo: name: pipeline root: diff --git a/sdk/python/test_data/pipelines/pipeline_with_env.yaml b/sdk/python/test_data/pipelines/pipeline_with_env.yaml index 9663641b9ee..3da0ed62b11 100644 --- a/sdk/python/test_data/pipelines/pipeline_with_env.yaml +++ b/sdk/python/test_data/pipelines/pipeline_with_env.yaml @@ -60,7 +60,7 @@ deploymentSpec: env: - name: ENV1 value: val1 - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 pipelineInfo: name: pipeline-with-env root: diff --git a/sdk/python/test_data/pipelines/pipeline_with_exit_handler.yaml b/sdk/python/test_data/pipelines/pipeline_with_exit_handler.yaml index ca5d65cd408..9b5e1a5ad36 100644 --- a/sdk/python/test_data/pipelines/pipeline_with_exit_handler.yaml +++ b/sdk/python/test_data/pipelines/pipeline_with_exit_handler.yaml @@ -81,7 +81,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef fail_op(message: str):\n \"\"\"Fails.\"\"\"\n import sys\n\ \ print(message)\n sys.exit(1)\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-op: container: args: @@ -110,7 +110,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_op(message: str):\n \"\"\"Prints a message.\"\"\"\n\ \ print(message)\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-op-2: container: args: @@ -139,7 +139,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_op(message: str):\n \"\"\"Prints a message.\"\"\"\n\ \ print(message)\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 pipelineInfo: name: pipeline-with-exit-handler root: diff --git a/sdk/python/test_data/pipelines/pipeline_with_google_artifact_type.yaml b/sdk/python/test_data/pipelines/pipeline_with_google_artifact_type.yaml index ae54d2aef4d..e409deacca6 100644 --- a/sdk/python/test_data/pipelines/pipeline_with_google_artifact_type.yaml +++ b/sdk/python/test_data/pipelines/pipeline_with_google_artifact_type.yaml @@ -79,7 +79,7 @@ deploymentSpec: \ print('Dataset')\n print('artifact.type: ', type(dataset))\n print('artifact.name:\ \ ', dataset.name)\n print('artifact.uri: ', dataset.uri)\n print('artifact.metadata:\ \ ', dataset.metadata)\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-model-producer: container: args: @@ -110,7 +110,7 @@ deploymentSpec: \ *\nimport aiplatform\n\ndef model_producer(model: Output[aiplatform.VertexModel]):\n\ \n assert isinstance(model, aiplatform.VertexModel), type(model)\n \ \ with open(model.path, 'w') as f:\n f.write('my model')\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 pipelineInfo: name: pipeline-with-google-types root: diff --git a/sdk/python/test_data/pipelines/pipeline_with_importer.yaml b/sdk/python/test_data/pipelines/pipeline_with_importer.yaml index 530a881afa4..8612be42cf4 100644 --- a/sdk/python/test_data/pipelines/pipeline_with_importer.yaml +++ b/sdk/python/test_data/pipelines/pipeline_with_importer.yaml @@ -148,7 +148,7 @@ deploymentSpec: \ trained using data: {data}'\n\n from collections import namedtuple\n\ \ output = namedtuple('Outputs', ['scalar', 'model'])\n return output(scalar,\ \ model)\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-train-2: container: args: @@ -182,7 +182,7 @@ deploymentSpec: \ trained using data: {data}'\n\n from collections import namedtuple\n\ \ output = namedtuple('Outputs', ['scalar', 'model'])\n return output(scalar,\ \ model)\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 pipelineInfo: name: pipeline-with-importer root: diff --git a/sdk/python/test_data/pipelines/pipeline_with_loops.yaml b/sdk/python/test_data/pipelines/pipeline_with_loops.yaml index df8c9c3fc69..891df7a6120 100644 --- a/sdk/python/test_data/pipelines/pipeline_with_loops.yaml +++ b/sdk/python/test_data/pipelines/pipeline_with_loops.yaml @@ -187,7 +187,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef args_generator_op() -> List[Dict[str, str]]:\n return [{'A_a':\ \ '1', 'B_b': '2'}, {'A_a': '10', 'B_b': '20'}]\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-struct: container: args: @@ -215,7 +215,7 @@ deploymentSpec: ' - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_struct(struct: Dict):\n print(struct)\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-struct-2: container: args: @@ -243,7 +243,7 @@ deploymentSpec: ' - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_struct(struct: Dict):\n print(struct)\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-text: container: args: @@ -271,7 +271,7 @@ deploymentSpec: ' - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_text(msg: str):\n print(msg)\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-text-2: container: args: @@ -299,7 +299,7 @@ deploymentSpec: ' - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_text(msg: str):\n print(msg)\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-text-3: container: args: @@ -327,7 +327,7 @@ deploymentSpec: ' - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_text(msg: str):\n print(msg)\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-text-4: container: args: @@ -355,7 +355,7 @@ deploymentSpec: ' - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_text(msg: str):\n print(msg)\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-text-5: container: args: @@ -383,7 +383,7 @@ deploymentSpec: ' - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_text(msg: str):\n print(msg)\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 pipelineInfo: name: pipeline-with-loops root: diff --git a/sdk/python/test_data/pipelines/pipeline_with_loops_and_conditions.yaml b/sdk/python/test_data/pipelines/pipeline_with_loops_and_conditions.yaml index 77ae492d702..0332043892c 100644 --- a/sdk/python/test_data/pipelines/pipeline_with_loops_and_conditions.yaml +++ b/sdk/python/test_data/pipelines/pipeline_with_loops_and_conditions.yaml @@ -620,7 +620,7 @@ deploymentSpec: \ 'A_a': '1',\n 'B_b': ['2', '20'],\n },\n \ \ {\n 'A_a': '10',\n 'B_b': ['22', '222'],\n \ \ },\n ]\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-args-generator-op-2: container: args: @@ -651,7 +651,7 @@ deploymentSpec: \ 'A_a': '1',\n 'B_b': ['2', '20'],\n },\n \ \ {\n 'A_a': '10',\n 'B_b': ['22', '222'],\n \ \ },\n ]\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-flip-coin-op: container: args: @@ -681,7 +681,7 @@ deploymentSpec: \ *\n\ndef flip_coin_op() -> str:\n \"\"\"Flip a coin and output heads\ \ or tails randomly.\"\"\"\n import random\n result = 'heads' if random.randint(0,\ \ 1) == 0 else 'tails'\n return result\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-struct: container: args: @@ -709,7 +709,7 @@ deploymentSpec: ' - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_struct(struct: dict):\n print(struct)\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-text: container: args: @@ -738,7 +738,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_text(msg: str, msg2: Optional[str] = None):\n print(f'msg:\ \ {msg}, msg2: {msg2}')\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-text-2: container: args: @@ -767,7 +767,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_text(msg: str, msg2: Optional[str] = None):\n print(f'msg:\ \ {msg}, msg2: {msg2}')\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-text-3: container: args: @@ -796,7 +796,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_text(msg: str, msg2: Optional[str] = None):\n print(f'msg:\ \ {msg}, msg2: {msg2}')\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-text-4: container: args: @@ -825,7 +825,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_text(msg: str, msg2: Optional[str] = None):\n print(f'msg:\ \ {msg}, msg2: {msg2}')\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-text-5: container: args: @@ -854,7 +854,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_text(msg: str, msg2: Optional[str] = None):\n print(f'msg:\ \ {msg}, msg2: {msg2}')\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-text-6: container: args: @@ -883,7 +883,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_text(msg: str, msg2: Optional[str] = None):\n print(f'msg:\ \ {msg}, msg2: {msg2}')\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-text-7: container: args: @@ -912,7 +912,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_text(msg: str, msg2: Optional[str] = None):\n print(f'msg:\ \ {msg}, msg2: {msg2}')\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-text-8: container: args: @@ -941,7 +941,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_text(msg: str, msg2: Optional[str] = None):\n print(f'msg:\ \ {msg}, msg2: {msg2}')\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-text-9: container: args: @@ -970,7 +970,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_text(msg: str, msg2: Optional[str] = None):\n print(f'msg:\ \ {msg}, msg2: {msg2}')\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 pipelineInfo: name: pipeline-with-loops-and-conditions-multi-layers root: diff --git a/sdk/python/test_data/pipelines/pipeline_with_metadata_fields.yaml b/sdk/python/test_data/pipelines/pipeline_with_metadata_fields.yaml index b3181c95eda..8cf228c0639 100644 --- a/sdk/python/test_data/pipelines/pipeline_with_metadata_fields.yaml +++ b/sdk/python/test_data/pipelines/pipeline_with_metadata_fields.yaml @@ -84,7 +84,7 @@ deploymentSpec: \ as f:\n content_b = f.read()\n\n concatenated_string = content_a\ \ + content_b\n with open(out_dataset.path, 'w') as f:\n f.write(concatenated_string)\n\ \n return concatenated_string\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-str-to-dataset: container: args: @@ -115,7 +115,7 @@ deploymentSpec: \"\"Convert string to dataset.\n\n Args:\n string: The string.\n\ \n Returns:\n dataset: The dataset.\n \"\"\"\n with open(dataset.path,\ \ 'w') as f:\n f.write(string)\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 pipelineInfo: description: A pipeline that joins string to in_dataset. displayName: Concatenation pipeline diff --git a/sdk/python/test_data/pipelines/pipeline_with_metrics_outputs.yaml b/sdk/python/test_data/pipelines/pipeline_with_metrics_outputs.yaml index 3d42ec7d9b6..6bc05a6a8db 100644 --- a/sdk/python/test_data/pipelines/pipeline_with_metrics_outputs.yaml +++ b/sdk/python/test_data/pipelines/pipeline_with_metrics_outputs.yaml @@ -63,7 +63,7 @@ deploymentSpec: \ that outputs metrics with a random accuracy.\"\"\"\n import random\n\ \ result = random.randint(0, 100)\n metrics.log_metric('accuracy',\ \ result)\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-output-metrics-2: container: args: @@ -94,7 +94,7 @@ deploymentSpec: \ that outputs metrics with a random accuracy.\"\"\"\n import random\n\ \ result = random.randint(0, 100)\n metrics.log_metric('accuracy',\ \ result)\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 pipelineInfo: name: pipeline-with-metrics-outputs root: diff --git a/sdk/python/test_data/pipelines/pipeline_with_multiple_exit_handlers.yaml b/sdk/python/test_data/pipelines/pipeline_with_multiple_exit_handlers.yaml index 1e99f2eb8cf..5b98f523bd7 100644 --- a/sdk/python/test_data/pipelines/pipeline_with_multiple_exit_handlers.yaml +++ b/sdk/python/test_data/pipelines/pipeline_with_multiple_exit_handlers.yaml @@ -141,7 +141,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef fail_op(message: str):\n \"\"\"Fails.\"\"\"\n import sys\n\ \ print(message)\n sys.exit(1)\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-op: container: args: @@ -170,7 +170,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_op(message: str):\n \"\"\"Prints a message.\"\"\"\n\ \ print(message)\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-op-2: container: args: @@ -199,7 +199,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_op(message: str):\n \"\"\"Prints a message.\"\"\"\n\ \ print(message)\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-op-3: container: args: @@ -228,7 +228,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_op(message: str):\n \"\"\"Prints a message.\"\"\"\n\ \ print(message)\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-op-4: container: args: @@ -257,7 +257,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_op(message: str):\n \"\"\"Prints a message.\"\"\"\n\ \ print(message)\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-op-5: container: args: @@ -286,7 +286,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_op(message: str):\n \"\"\"Prints a message.\"\"\"\n\ \ print(message)\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-op-6: container: args: @@ -315,7 +315,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_op(message: str):\n \"\"\"Prints a message.\"\"\"\n\ \ print(message)\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 pipelineInfo: name: pipeline-with-multiple-exit-handlers root: diff --git a/sdk/python/test_data/pipelines/pipeline_with_nested_conditions.yaml b/sdk/python/test_data/pipelines/pipeline_with_nested_conditions.yaml index 62b04e0f2ab..3053efd7abb 100644 --- a/sdk/python/test_data/pipelines/pipeline_with_nested_conditions.yaml +++ b/sdk/python/test_data/pipelines/pipeline_with_nested_conditions.yaml @@ -164,7 +164,7 @@ deploymentSpec: \ *\n\ndef flip_coin_op() -> str:\n \"\"\"Flip a coin and output heads\ \ or tails randomly.\"\"\"\n import random\n result = 'heads' if random.randint(0,\ \ 1) == 0 else 'tails'\n return result\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-flip-coin-op-2: container: args: @@ -194,7 +194,7 @@ deploymentSpec: \ *\n\ndef flip_coin_op() -> str:\n \"\"\"Flip a coin and output heads\ \ or tails randomly.\"\"\"\n import random\n result = 'heads' if random.randint(0,\ \ 1) == 0 else 'tails'\n return result\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-flip-coin-op-3: container: args: @@ -224,7 +224,7 @@ deploymentSpec: \ *\n\ndef flip_coin_op() -> str:\n \"\"\"Flip a coin and output heads\ \ or tails randomly.\"\"\"\n import random\n result = 'heads' if random.randint(0,\ \ 1) == 0 else 'tails'\n return result\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-flip-coin-op-4: container: args: @@ -254,7 +254,7 @@ deploymentSpec: \ *\n\ndef flip_coin_op() -> str:\n \"\"\"Flip a coin and output heads\ \ or tails randomly.\"\"\"\n import random\n result = 'heads' if random.randint(0,\ \ 1) == 0 else 'tails'\n return result\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-op: container: args: @@ -283,7 +283,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_op(msg: str):\n \"\"\"Print a message.\"\"\"\n print(msg)\n\ \n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-op-2: container: args: @@ -312,7 +312,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_op(msg: str):\n \"\"\"Print a message.\"\"\"\n print(msg)\n\ \n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-op-3: container: args: @@ -341,7 +341,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_op(msg: str):\n \"\"\"Print a message.\"\"\"\n print(msg)\n\ \n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-op-4: container: args: @@ -370,7 +370,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_op(msg: str):\n \"\"\"Print a message.\"\"\"\n print(msg)\n\ \n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 pipelineInfo: name: nested-conditions-pipeline root: diff --git a/sdk/python/test_data/pipelines/pipeline_with_nested_loops.yaml b/sdk/python/test_data/pipelines/pipeline_with_nested_loops.yaml index 960d091d1ef..ccf28fe5ff3 100644 --- a/sdk/python/test_data/pipelines/pipeline_with_nested_loops.yaml +++ b/sdk/python/test_data/pipelines/pipeline_with_nested_loops.yaml @@ -161,7 +161,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_op(msg: str, msg2: Optional[str] = None):\n print(f'msg:\ \ {msg}, msg2: {msg2}')\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-op-2: container: args: @@ -190,7 +190,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_op(msg: str, msg2: Optional[str] = None):\n print(f'msg:\ \ {msg}, msg2: {msg2}')\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-op-3: container: args: @@ -219,7 +219,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_op(msg: str, msg2: Optional[str] = None):\n print(f'msg:\ \ {msg}, msg2: {msg2}')\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 pipelineInfo: name: pipeline-with-nested-loops root: diff --git a/sdk/python/test_data/pipelines/pipeline_with_outputs.yaml b/sdk/python/test_data/pipelines/pipeline_with_outputs.yaml index 914937e18f4..c3074d52cdc 100644 --- a/sdk/python/test_data/pipelines/pipeline_with_outputs.yaml +++ b/sdk/python/test_data/pipelines/pipeline_with_outputs.yaml @@ -120,7 +120,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_op1(msg: str) -> str:\n print(msg)\n return msg\n\ \n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-op1-2: container: args: @@ -149,7 +149,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_op1(msg: str) -> str:\n print(msg)\n return msg\n\ \n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-op2: container: args: diff --git a/sdk/python/test_data/pipelines/pipeline_with_parallelfor_list_artifacts.yaml b/sdk/python/test_data/pipelines/pipeline_with_parallelfor_list_artifacts.yaml index 7df9b8930a9..e30a49fb528 100644 --- a/sdk/python/test_data/pipelines/pipeline_with_parallelfor_list_artifacts.yaml +++ b/sdk/python/test_data/pipelines/pipeline_with_parallelfor_list_artifacts.yaml @@ -274,7 +274,7 @@ deploymentSpec: \ *\n\ndef make_artifact(data: str) -> Artifact:\n artifact = Artifact(uri=dsl.get_uri(),\ \ metadata={'length': len(data)})\n with open(artifact.path, 'w') as\ \ f:\n f.write(data)\n return artifact\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-make-dataset: container: args: @@ -304,7 +304,7 @@ deploymentSpec: \ *\n\ndef make_dataset(data: str) -> Dataset:\n dataset = Dataset(uri=dsl.get_uri(),\ \ metadata={'length': len(data)})\n with open(dataset.path, 'w') as f:\n\ \ f.write(data)\n return dataset\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-artifact-name: container: args: @@ -333,7 +333,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_artifact_name(artifact: Artifact) -> str:\n print(artifact.name)\n\ \ return artifact.name\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-artifact-name-2: container: args: @@ -362,7 +362,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_artifact_name(artifact: Artifact) -> str:\n print(artifact.name)\n\ \ return artifact.name\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 pipelineInfo: name: pipeline-parallelfor-artifacts root: diff --git a/sdk/python/test_data/pipelines/pipeline_with_parallelfor_parallelism.yaml b/sdk/python/test_data/pipelines/pipeline_with_parallelfor_parallelism.yaml index 0fcf820fc14..d7f8fcad884 100644 --- a/sdk/python/test_data/pipelines/pipeline_with_parallelfor_parallelism.yaml +++ b/sdk/python/test_data/pipelines/pipeline_with_parallelfor_parallelism.yaml @@ -388,7 +388,7 @@ deploymentSpec: \ are strings and values are integers. For testing type \n handling during\ \ compilation.\"\"\"\n return [{'a': 1, 'b': 2}, {'a': 2, 'b': 3}, {'a':\ \ 3, 'b': 4}]\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-list-dict-maker-1: container: args: @@ -419,7 +419,7 @@ deploymentSpec: \ dictionary typing (no enforcement of specific key or\n value types).\n\ \n Tests flexibility in type handling.\n \"\"\"\n return [{'a':\ \ 1, 'b': 2}, {'a': 2, 'b': 3}, {'a': 3, 'b': 4}]\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-list-dict-maker-1-2: container: args: @@ -450,7 +450,7 @@ deploymentSpec: \ dictionary typing (no enforcement of specific key or\n value types).\n\ \n Tests flexibility in type handling.\n \"\"\"\n return [{'a':\ \ 1, 'b': 2}, {'a': 2, 'b': 3}, {'a': 3, 'b': 4}]\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-list-dict-maker-2: container: args: @@ -481,7 +481,7 @@ deploymentSpec: \ of dictionaries without type enforcement.\n\n Tests flexibility in\ \ type handling.\n \"\"\"\n return [{'a': 1, 'b': 2}, {'a': 2, 'b':\ \ 3}, {'a': 3, 'b': 4}]\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-list-dict-maker-3: container: args: @@ -512,7 +512,7 @@ deploymentSpec: \ (no typing or structure guarantees).\n\n Tests the limits of compiler\ \ type handling.\n \"\"\"\n return [{'a': 1, 'b': 2}, {'a': 2, 'b':\ \ 3}, {'a': 3, 'b': 4}]\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-int: container: args: @@ -540,7 +540,7 @@ deploymentSpec: ' - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_int(x: int):\n print(x)\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-int-2: container: args: @@ -568,7 +568,7 @@ deploymentSpec: ' - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_int(x: int):\n print(x)\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-int-3: container: args: @@ -596,7 +596,7 @@ deploymentSpec: ' - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_int(x: int):\n print(x)\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-int-4: container: args: @@ -624,7 +624,7 @@ deploymentSpec: ' - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_int(x: int):\n print(x)\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-int-5: container: args: @@ -652,7 +652,7 @@ deploymentSpec: ' - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_int(x: int):\n print(x)\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-int-6: container: args: @@ -680,7 +680,7 @@ deploymentSpec: ' - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_int(x: int):\n print(x)\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-text: container: args: @@ -708,7 +708,7 @@ deploymentSpec: ' - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_text(msg: str):\n print(msg)\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-text-2: container: args: @@ -736,7 +736,7 @@ deploymentSpec: ' - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_text(msg: str):\n print(msg)\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-text-3: container: args: @@ -764,7 +764,7 @@ deploymentSpec: ' - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_text(msg: str):\n print(msg)\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-text-4: container: args: @@ -792,7 +792,7 @@ deploymentSpec: ' - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_text(msg: str):\n print(msg)\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-text-5: container: args: @@ -820,7 +820,7 @@ deploymentSpec: ' - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_text(msg: str):\n print(msg)\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-text-6: container: args: @@ -848,7 +848,7 @@ deploymentSpec: ' - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_text(msg: str):\n print(msg)\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 pipelineInfo: name: pipeline-with-loops root: diff --git a/sdk/python/test_data/pipelines/pipeline_with_params_containing_format.yaml b/sdk/python/test_data/pipelines/pipeline_with_params_containing_format.yaml index 2ca766c8f3b..8888a176ea1 100644 --- a/sdk/python/test_data/pipelines/pipeline_with_params_containing_format.yaml +++ b/sdk/python/test_data/pipelines/pipeline_with_params_containing_format.yaml @@ -90,7 +90,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_op(text: str) -> str:\n print(text)\n return text\n\ \n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-op-2: container: args: @@ -119,7 +119,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_op(text: str) -> str:\n print(text)\n return text\n\ \n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-op2: container: args: @@ -148,7 +148,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_op2(text1: str, text2: str) -> str:\n print(text1 +\ \ text2)\n return text1 + text2\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 pipelineInfo: name: pipeline-with-pipelineparam-containing-format root: diff --git a/sdk/python/test_data/pipelines/pipeline_with_placeholders.yaml b/sdk/python/test_data/pipelines/pipeline_with_placeholders.yaml index 44f63c95d61..fa7167cfc6e 100644 --- a/sdk/python/test_data/pipelines/pipeline_with_placeholders.yaml +++ b/sdk/python/test_data/pipelines/pipeline_with_placeholders.yaml @@ -70,7 +70,7 @@ deploymentSpec: ' - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_op(msg: str, value: str):\n print(msg, value)\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-op-2: container: args: @@ -98,7 +98,7 @@ deploymentSpec: ' - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_op(msg: str, value: str):\n print(msg, value)\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-op-3: container: args: @@ -126,7 +126,7 @@ deploymentSpec: ' - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_op(msg: str, value: str):\n print(msg, value)\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-op-4: container: args: @@ -154,7 +154,7 @@ deploymentSpec: ' - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_op(msg: str, value: str):\n print(msg, value)\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-op-5: container: args: @@ -182,7 +182,7 @@ deploymentSpec: ' - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_op(msg: str, value: str):\n print(msg, value)\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 pipelineInfo: name: pipeline-with-placeholders root: diff --git a/sdk/python/test_data/pipelines/pipeline_with_retry.yaml b/sdk/python/test_data/pipelines/pipeline_with_retry.yaml index e82aefc825b..4947f2404a0 100644 --- a/sdk/python/test_data/pipelines/pipeline_with_retry.yaml +++ b/sdk/python/test_data/pipelines/pipeline_with_retry.yaml @@ -45,7 +45,7 @@ deploymentSpec: ' - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef add(a: float, b: float) -> float:\n return a + b\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 pipelineInfo: name: test-pipeline root: diff --git a/sdk/python/test_data/pipelines/pipeline_with_task_final_status.yaml b/sdk/python/test_data/pipelines/pipeline_with_task_final_status.yaml index 61c916495cd..c00386c5417 100644 --- a/sdk/python/test_data/pipelines/pipeline_with_task_final_status.yaml +++ b/sdk/python/test_data/pipelines/pipeline_with_task_final_status.yaml @@ -88,7 +88,7 @@ deploymentSpec: \ print('Pipeline task name: ', status.pipeline_task_name)\n print('Error\ \ code: ', status.error_code)\n print('Error message: ', status.error_message)\n\ \n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-fail-op: container: args: @@ -117,7 +117,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef fail_op(message: str):\n \"\"\"Fails.\"\"\"\n import sys\n\ \ print(message)\n sys.exit(1)\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-op: container: args: @@ -146,7 +146,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_op(message: str):\n \"\"\"Prints a message.\"\"\"\n\ \ print(message)\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 pipelineInfo: name: pipeline-with-task-final-status root: diff --git a/sdk/python/test_data/pipelines/pipeline_with_task_final_status_yaml.py b/sdk/python/test_data/pipelines/pipeline_with_task_final_status_yaml.py index caada43bc53..736b2ae0476 100644 --- a/sdk/python/test_data/pipelines/pipeline_with_task_final_status_yaml.py +++ b/sdk/python/test_data/pipelines/pipeline_with_task_final_status_yaml.py @@ -24,7 +24,7 @@ - {name: status, type: PipelineTaskFinalStatus} implementation: container: - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 command: - echo - "user input:" @@ -39,7 +39,7 @@ - {name: message, type: String} implementation: container: - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 command: - echo - {inputValue: message} diff --git a/sdk/python/test_data/pipelines/pipeline_with_task_final_status_yaml.yaml b/sdk/python/test_data/pipelines/pipeline_with_task_final_status_yaml.yaml index fd6ef9e3a85..6a7807cb853 100644 --- a/sdk/python/test_data/pipelines/pipeline_with_task_final_status_yaml.yaml +++ b/sdk/python/test_data/pipelines/pipeline_with_task_final_status_yaml.yaml @@ -46,13 +46,13 @@ deploymentSpec: - '{{$.inputs.parameters[''user_input'']}}' - 'pipeline status:' - '{{$.inputs.parameters[''status'']}}' - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-op: container: command: - echo - '{{$.inputs.parameters[''message'']}}' - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 pipelineInfo: name: pipeline-with-task-final-status-yaml root: diff --git a/sdk/python/test_data/pipelines/pipeline_with_task_using_ignore_upstream_failure.yaml b/sdk/python/test_data/pipelines/pipeline_with_task_using_ignore_upstream_failure.yaml index ffc0928698a..07b59aa3ae7 100644 --- a/sdk/python/test_data/pipelines/pipeline_with_task_using_ignore_upstream_failure.yaml +++ b/sdk/python/test_data/pipelines/pipeline_with_task_using_ignore_upstream_failure.yaml @@ -51,7 +51,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef fail_op(message: str) -> str:\n \"\"\"Fails.\"\"\"\n import\ \ sys\n print(message)\n sys.exit(1)\n return message\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-op: container: args: @@ -80,7 +80,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_op(message: str = 'default'):\n \"\"\"Prints a message.\"\ \"\"\n print(message)\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 pipelineInfo: name: my-pipeline root: diff --git a/sdk/python/test_data/pipelines/pythonic_artifact_with_single_return.yaml b/sdk/python/test_data/pipelines/pythonic_artifact_with_single_return.yaml index fa6aa54dae0..01c6fce1eee 100644 --- a/sdk/python/test_data/pipelines/pythonic_artifact_with_single_return.yaml +++ b/sdk/python/test_data/pipelines/pythonic_artifact_with_single_return.yaml @@ -74,7 +74,7 @@ deploymentSpec: \ str) -> str:\n return x\n\n model = Model(\n uri=dsl.get_uri(suffix='model'),\n\ \ metadata={'data': text_dataset.name},\n )\n\n with open(model.path,\ \ 'wb') as f:\n dill.dump(dummy_model, f)\n\n return model\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 pipelineInfo: name: make-language-model-pipeline root: diff --git a/sdk/python/test_data/pipelines/pythonic_artifacts_with_list_of_artifacts.yaml b/sdk/python/test_data/pipelines/pythonic_artifacts_with_list_of_artifacts.yaml index 985e26f026a..77ff37b7f27 100644 --- a/sdk/python/test_data/pipelines/pythonic_artifacts_with_list_of_artifacts.yaml +++ b/sdk/python/test_data/pipelines/pythonic_artifacts_with_list_of_artifacts.yaml @@ -97,7 +97,7 @@ deploymentSpec: \ = []\n for dataset in datasets:\n with open(dataset.path, 'r')\ \ as f:\n texts.append(f.read())\n\n return ''.join(texts)\n\ \n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-make-dataset: container: args: @@ -127,7 +127,7 @@ deploymentSpec: \ *\n\ndef make_dataset(text: str) -> Dataset:\n dataset = Dataset(uri=dsl.get_uri(),\ \ metadata={'length': len(text)})\n with open(dataset.path, 'w') as f:\n\ \ f.write(text)\n return dataset\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 pipelineInfo: name: make-and-join-datasets root: diff --git a/sdk/python/test_data/pipelines/pythonic_artifacts_with_multiple_returns.yaml b/sdk/python/test_data/pipelines/pythonic_artifacts_with_multiple_returns.yaml index 247484fbe28..13898ce241e 100644 --- a/sdk/python/test_data/pipelines/pythonic_artifacts_with_multiple_returns.yaml +++ b/sdk/python/test_data/pipelines/pythonic_artifacts_with_multiple_returns.yaml @@ -110,7 +110,7 @@ deploymentSpec: \ f.write(out_data2)\n\n outputs = NamedTuple(\n 'outputs',\n\ \ dataset1=Dataset,\n dataset2=Dataset,\n )\n return\ \ outputs(dataset1=dataset1, dataset2=dataset2)\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-make-dataset: container: args: @@ -140,7 +140,7 @@ deploymentSpec: \ *\n\ndef make_dataset() -> Artifact:\n artifact = Artifact(uri=dsl.get_uri('dataset'))\n\ \ with open(artifact.path, 'w') as f:\n f.write('Hello, world')\n\ \ return artifact\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 pipelineInfo: name: split-datasets-and-return-first root: diff --git a/sdk/runtime_tests/test_data/pipeline_with_task_final_status.yaml b/sdk/runtime_tests/test_data/pipeline_with_task_final_status.yaml index a2d61308664..6853dce560e 100644 --- a/sdk/runtime_tests/test_data/pipeline_with_task_final_status.yaml +++ b/sdk/runtime_tests/test_data/pipeline_with_task_final_status.yaml @@ -88,7 +88,7 @@ deploymentSpec: \ print('Pipeline task name: ', status.pipeline_task_name)\n print('Error\ \ code: ', status.error_code)\n print('Error message: ', status.error_message)\n\ \n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-fail-op: container: args: @@ -117,7 +117,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef fail_op(message: str):\n \"\"\"Fails.\"\"\"\n import sys\n\ \ print(message)\n sys.exit(1)\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 exec-print-op: container: args: @@ -146,7 +146,7 @@ deploymentSpec: - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ \ *\n\ndef print_op(message: str):\n \"\"\"Prints a message.\"\"\"\n\ \ print(message)\n\n" - image: python:3.9 + image: public.ecr.aws/docker/library/python:3.12 pipelineInfo: name: pipeline-with-task-final-status root: diff --git a/test/kfp-functional-test/README.md b/test/kfp-functional-test/README.md index 2387cc7d775..6658fe06077 100644 --- a/test/kfp-functional-test/README.md +++ b/test/kfp-functional-test/README.md @@ -65,13 +65,13 @@ dependencies. To update dependencies: Using Docker: ```bash - docker run -it -v $(pwd):/tmp/src -w /tmp/src python:3.9-slim\ + docker run -it -v $(pwd):/tmp/src -w /tmp/src public.ecr.aws/docker/library/python:3.12-slim\ /tmp/src/test/kfp-functional-test/kfp-functional-test.sh --host "http://localhost:8080" ``` Using Podman: ```bash - podman run -it -v $(pwd):/tmp/src:Z -w /tmp/src python:3.9-slim \ + podman run -it -v $(pwd):/tmp/src:Z -w /tmp/src public.ecr.aws/docker/library/python:3.12-slim \ /tmp/src/test/kfp-functional-test/kfp-functional-test.sh --host "http://localhost:8080" ```