Skip to content

Commit

Permalink
add setup/teardown of prereqs and secret tests
Browse files Browse the repository at this point in the history
Signed-off-by: Humair Khan <HumairAK@users.noreply.github.com>
  • Loading branch information
HumairAK committed Feb 19, 2025
1 parent 66f4881 commit 9e3efbd
Show file tree
Hide file tree
Showing 7 changed files with 144 additions and 16 deletions.
3 changes: 2 additions & 1 deletion .github/workflows/kfp-samples.yml
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,8 @@ jobs:

- name: Run Samples Tests
id: tests
env:
PULL_NUMBER: ${{ github.event.pull_request.number }}
run: |
./backend/src/v2/test/sample-test.sh
continue-on-error: true
Expand All @@ -57,4 +59,3 @@ jobs:
with:
name: kfp-samples-tests-artifacts-k8s-${{ matrix.k8s_version }}
path: /tmp/tmp*/*

1 change: 0 additions & 1 deletion backend/src/v2/test/requirements-sample-test.txt
Original file line number Diff line number Diff line change
@@ -1,2 +1 @@
../../../../sdk/python
kfp[kubernetes]
10 changes: 7 additions & 3 deletions backend/src/v2/test/sample-test.sh
Original file line number Diff line number Diff line change
Expand Up @@ -16,12 +16,16 @@

set -ex

pushd ./backend/src/v2/test
if [[ -n "${PULL_NUMBER}" ]]; then
export KFP_PACKAGE_PATH="git+https://github.com/kubeflow/pipelines@refs/pull/${PULL_NUMBER}/merge#egg=kfp&subdirectory=sdk/python"
else
export KFP_PACKAGE_PATH='git+https://github.com/kubeflow/pipelines#egg=kfp&subdirectory=sdk/python'
fi

python3 -m pip install --upgrade pip
python3 -m pip install -r ./requirements-sample-test.txt
python3 -m pip install -e kubernetes_platform/python/
python3 -m pip install -e sdk/python/

popd

# The -u flag makes python output unbuffered, so that we can see real time log.
# Reference: https://stackoverflow.com/a/107717
Expand Down
6 changes: 6 additions & 0 deletions kubernetes_platform/python/kfp/kubernetes/secret.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,12 @@
from kfp.kubernetes import kubernetes_executor_config_pb2 as pb


def use_secret_as_env_parameter(
task: PipelineTask,
secret_name: Union[pipeline_channel.PipelineParameterChannel, str],
secret_key_to_env: Dict[str, str],
):
pass
def use_secret_as_env(
task: PipelineTask,
secret_name: Union[pipeline_channel.PipelineParameterChannel, str],
Expand Down
37 changes: 27 additions & 10 deletions samples/v2/pipeline_with_secret_as_env.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,8 @@
"""A pipeline that passes a secret as an env variable to a container."""
from kfp import dsl
from kfp import kubernetes
from kfp.dsl import OutputPath
import os

# Note: this sample will only work if this secret is pre-created before running this pipeline.
# Is is pre-created by default only in the Google Cloud distribution listed here:
Expand All @@ -25,24 +27,39 @@
@dsl.component
def comp():
import os
import sys
if os.environ['SECRET_VAR'] == "service_account":
print("Success")
return 0
else:
print(os.environ['SECRET_VAR'] + " is not service_account")
sys.exit("Failure: cannot access secret as env variable")
username = os.getenv("USER_NAME", "")
psw1 = os.getenv("PASSWORD_VAR1", "")
psw2 = os.getenv("PASSWORD_VAR2", "")
assert username == "user1"
assert psw1 == "psw1"
assert psw2 == "psw2"


@dsl.component
def generate_secret_name(some_output: OutputPath(str)):
secret_name = "test-secret-3"
with open(some_output, 'w') as f:
f.write(secret_name)

# Secrets are referenced from samples/v2/pre-requisites/test-secrets.yaml
@dsl.pipeline
def pipeline_secret_env():
def pipeline_secret_env(secret_parm: str = "test-secret-1"):
task = comp()
kubernetes.use_secret_as_env(
task,
secret_name=SECRET_NAME,
secret_key_to_env={'type': 'SECRET_VAR'})
secret_name=secret_parm,
secret_key_to_env={'username': 'USER_NAME'})

kubernetes.use_secret_as_env(
task,
secret_name="test-secret-2",
secret_key_to_env={'password': 'PASSWORD_VAR1'})

task2 = generate_secret_name()
kubernetes.use_secret_as_env(
task,
secret_name=task2.output,
secret_key_to_env={'password': 'PASSWORD_VAR2'})

if __name__ == '__main__':
from kfp import compiler
Expand Down
23 changes: 23 additions & 0 deletions samples/v2/pre-requisites/test-secrets.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
kind: Secret
apiVersion: v1
metadata:
name: test-secret-1
stringData:
username: user1
type: Opaque
---
kind: Secret
apiVersion: v1
metadata:
name: test-secret-2
stringData:
password: psw1
type: Opaque
---
kind: Secret
apiVersion: v1
metadata:
name: test-secret-3
stringData:
password: psw2
type: Opaque
80 changes: 79 additions & 1 deletion samples/v2/sample_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,9 +30,19 @@
import subdagio
import two_step_pipeline_containerized
import pipeline_with_placeholders
import pipeline_with_secret_as_env
from kubernetes import client, config, utils
import yaml

_MINUTE = 60 # seconds
_DEFAULT_TIMEOUT = 5 * _MINUTE
SAMPLES_DIR = os.path.realpath(os.path.dirname(os.path.dirname(__file__)))
PRE_REQ_DIR = os.path.join(SAMPLES_DIR, 'v2', 'pre-requisites')
PREREQS = [
os.path.join(PRE_REQ_DIR, 'test-secrets.yaml')
]

_KFP_NAMESPACE = os.getenv('KFP_NAMESPACE', 'kubeflow')


@dataclass
Expand All @@ -41,13 +51,81 @@ class TestCase:
timeout: int = _DEFAULT_TIMEOUT


def deploy_k8s_yaml(namespace: str, yaml_file: str):
config.load_kube_config()
api_client = client.ApiClient()
try:
utils.create_from_yaml(api_client, yaml_file, namespace=namespace)
print(f"Resource(s) from {yaml_file} deployed successfully.")
except Exception as e:
raise RuntimeError(f"Exception when deploying from YAML: {e}")


def delete_k8s_yaml(namespace: str, yaml_file: str):
config.load_kube_config()
v1 = client.CoreV1Api()
apps_v1 = client.AppsV1Api()

try:
with open(yaml_file, "r") as f:
yaml_docs = yaml.safe_load_all(f)

for doc in yaml_docs:
if not doc:
continue # Skip empty documents

kind = doc.get("kind", "").lower()
name = doc["metadata"]["name"]

print(f"Deleting {kind} named {name}...")

# There's no utils.delete_from_yaml
# as a workaround we manually fetch required data
if kind == "deployment":
apps_v1.delete_namespaced_deployment(name, namespace)
elif kind == "service":
v1.delete_namespaced_service(name, namespace)
elif kind == "configmap":
v1.delete_namespaced_config_map(name, namespace)
elif kind == "pod":
v1.delete_namespaced_pod(name, namespace)
elif kind == "secret":
v1.delete_namespaced_secret(name, namespace)
elif kind == "persistentvolumeclaim":
v1.delete_namespaced_persistent_volume_claim(name, namespace)
elif kind == "namespace":
client.CoreV1Api().delete_namespace(name)
else:
print(f"Skipping unsupported resource type: {kind}")

print(f"Resource(s) from {yaml_file} deleted successfully.")
except Exception as e:
print(f"Exception when deleting from YAML: {e}")


class SampleTest(unittest.TestCase):
_kfp_host_and_port = os.getenv('KFP_API_HOST_AND_PORT',
'http://localhost:8888')
_kfp_ui_and_port = os.getenv('KFP_UI_HOST_AND_PORT',
'http://localhost:8080')
_client = kfp.Client(host=_kfp_host_and_port, ui_host=_kfp_ui_and_port)

@classmethod
def setUpClass(cls):
"""Runs once before all tests."""
print("Deploying pre-requisites....")
for p in PREREQS:
deploy_k8s_yaml(_KFP_NAMESPACE, p)
print("Done deploying pre-requisites.")

@classmethod
def tearDownClass(cls):
"""Runs once after all tests in this class."""
print("Cleaning up resources....")
for p in PREREQS:
delete_k8s_yaml(_KFP_NAMESPACE, p)
print("Done clean up.")

def test(self):
test_cases: List[TestCase] = [
TestCase(pipeline_func=hello_world.pipeline_hello_world),
Expand All @@ -64,7 +142,7 @@ def test(self):
# TestCase(pipeline_func=pipeline_with_importer.pipeline_with_importer),
# TestCase(pipeline_func=pipeline_with_volume.pipeline_with_volume),
# TestCase(pipeline_func=pipeline_with_secret_as_volume.pipeline_secret_volume),
# TestCase(pipeline_func=pipeline_with_secret_as_env.pipeline_secret_env),
TestCase(pipeline_func=pipeline_with_secret_as_env.pipeline_secret_env),
TestCase(pipeline_func=subdagio.parameter.crust),
TestCase(pipeline_func=subdagio.parameter_cache.crust),
TestCase(pipeline_func=subdagio.mixed_parameters.crust),
Expand Down

0 comments on commit 9e3efbd

Please sign in to comment.