diff --git a/CHANGELOG.md b/CHANGELOG.md index 1d58d19..ed4246d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,37 @@ [//]: # (Features) [//]: # (BREAKING CHANGES) +## May 16th, 2024 + +### Download Application Source Code + +A new script was added to download platform-generated source code: + +* `fetch_apps_source_code.py` + +Use the following parameters to generate more human-readable outputs and facilitate the compilation of the source code: + +* --friendly_package_names: source code packages with user-friendly names. +* --include_all_refs: adds to .csproj file all assemblies in the bin folder as references. +* --remove_resources_files: removes references to embedded resources files from the.csproj file. + +### Solution Download and Deploy + +Added new functions to leverage the recently released/improved APIs to download and deploy outsystems packages: + +* `fetch_lifetime_solution_from_manifest.py` - downloads a solution file based on manifest data. +* `deploy_package_to_target_env.py` - deploys an outsystems package (solution or application) to a target environment. +* `deploy_package_to_target_env_with_osptool.py` - deploys an outsystems package (solution or application) using OSP Tool. + +### Improved OSPTool Operations + +OSP Tool command line calls now have live output callback and catalog mapping support. + +### Updated Package Dependencies + +* Updated python-dateutil dependency to version 2.9.0.post0 +* Updated python-dotenv dependency to version 1.0.1 + ## November 15th, 2023 ### Config File Support diff --git a/INSTALL.md b/INSTALL.md index e50fd28..8a06d24 100644 --- a/INSTALL.md +++ b/INSTALL.md @@ -5,7 +5,7 @@ In order to be able to test locally, there's a few things you'll have to install ## Install Python * Go to -* Install Python v3.7.x (the code was tested with v3.7.1) +* Install Python v3.11.x (the code was tested with v3.11.3) ## Install Python dependencies diff --git a/build-requirements.txt b/build-requirements.txt index 719bd63..5afe776 100644 --- a/build-requirements.txt +++ b/build-requirements.txt @@ -1,6 +1,6 @@ -python-dateutil==2.8.2 +python-dateutil==2.9.0.post0 requests==2.31.0 unittest-xml-reporting==3.2.0 xunitparser==1.3.4 toposort==1.10 -python-dotenv==1.0.0 \ No newline at end of file +python-dotenv==1.0.1 \ No newline at end of file diff --git a/outsystems/exceptions/invalid_os_package.py b/outsystems/exceptions/invalid_os_package.py new file mode 100644 index 0000000..5656057 --- /dev/null +++ b/outsystems/exceptions/invalid_os_package.py @@ -0,0 +1,2 @@ +class InvalidOutSystemsPackage(Exception): + pass diff --git a/outsystems/file_helpers/file.py b/outsystems/file_helpers/file.py index 4523c9a..89234ca 100644 --- a/outsystems/file_helpers/file.py +++ b/outsystems/file_helpers/file.py @@ -1,15 +1,6 @@ # Python Modules import json import os -import requests - - -def download_oap(file_path: str, auth_token: str, oap_url: str): - response = requests.get(oap_url, headers={"Authorization": auth_token}) - # Makes sure that, if a directory is in the filename, that directory exists - os.makedirs(os.path.dirname(file_path), exist_ok=True) - with open(file_path, "wb") as f: - f.write(response.content) def store_data(artifact_dir: str, filename: str, data: str): @@ -43,3 +34,12 @@ def clear_cache(artifact_dir: str, filename: str): return filename = os.path.join(artifact_dir, filename) os.remove(filename) + + +# Returns a human readable string representation of bytes +def bytes_human_readable_size(bytes, units=[' bytes', 'KB', 'MB', 'GB', 'TB', 'PB', 'EB']): + return str(bytes) + units[0] if bytes < 1024 else bytes_human_readable_size(bytes >> 10, units[1:]) + + +def is_valid_os_package(filename: str): + return filename.lower().split('.')[-1] in ("osp", "oap") diff --git a/outsystems/lifetime/lifetime_applications.py b/outsystems/lifetime/lifetime_applications.py index 4932cba..b02f9aa 100644 --- a/outsystems/lifetime/lifetime_applications.py +++ b/outsystems/lifetime/lifetime_applications.py @@ -12,8 +12,9 @@ from outsystems.exceptions.environment_not_found import EnvironmentNotFoundError from outsystems.exceptions.app_version_error import AppVersionsError # Functions -from outsystems.file_helpers.file import store_data, load_data, clear_cache, download_oap +from outsystems.file_helpers.file import store_data, load_data, clear_cache from outsystems.lifetime.lifetime_base import send_get_request, send_post_request +from outsystems.lifetime.lifetime_downloads import download_package # Variables from outsystems.vars.file_vars import APPLICATION_FOLDER, APPLICATIONS_FILE, APPLICATION_FILE, APPLICATION_VERSIONS_FILE, APPLICATION_VERSION_FILE from outsystems.vars.lifetime_vars import APPLICATIONS_ENDPOINT, APPLICATION_VERSIONS_ENDPOINT, APPLICATIONS_SUCCESS_CODE, \ @@ -158,7 +159,8 @@ def get_running_app_version(artifact_dir: str, endpoint: str, auth_token: str, e "ApplicationName": app_tuple[0], "ApplicationKey": app_tuple[1], "Version": app_version_data["Version"], - "VersionKey": status_in_env["BaseApplicationVersionKey"] + "VersionKey": status_in_env["BaseApplicationVersionKey"], + "IsModified": status_in_env["IsModified"] } # Since these 2 fields were only introduced in a minor of OS11, we check here if they exist # We can't just use the version @@ -212,7 +214,7 @@ def export_app_oap(file_path: str, endpoint: str, auth_token: str, env_key: str, # Stores the result url_string = response["response"] url_string = url_string["url"] - download_oap(file_path, auth_token, url_string) + download_package(file_path, auth_token, url_string) return elif status_code == APPLICATION_VERSION_NO_PERMISSION_CODE: raise NotEnoughPermissionsError( diff --git a/outsystems/lifetime/lifetime_base.py b/outsystems/lifetime/lifetime_base.py index 74cc179..93e457e 100644 --- a/outsystems/lifetime/lifetime_base.py +++ b/outsystems/lifetime/lifetime_base.py @@ -9,6 +9,7 @@ from outsystems.vars.lifetime_vars import LIFETIME_SSL_CERT_VERIFY # Functions from outsystems.vars.vars_base import get_configuration_value +from outsystems.file_helpers.file import check_file # Method that builds the LifeTime endpoint based on the LT host @@ -55,6 +56,29 @@ def send_post_request(lt_api: str, token: str, api_endpoint: str, payload: str): return response_obj +# Sends a POST request to LT, with binary content. +def send_binary_post_request(lt_api: str, token: str, api_endpoint: str, dest_env: str, lt_endpont: str, binary_file_path: str): + # Auth token + content type octet-stream + headers = {'content-type': 'application/octet-stream', + 'authorization': 'Bearer ' + token} + # Format the request URL to include the api endpoint + request_string = "{}/{}/{}/{}".format(lt_api, api_endpoint, dest_env, lt_endpont) + + if check_file("", binary_file_path): + with open(binary_file_path, 'rb') as f: + data = f.read() + response = requests.post(request_string, data=data, headers=headers, verify=get_configuration_value("LIFETIME_SSL_CERT_VERIFY", LIFETIME_SSL_CERT_VERIFY)) + response_obj = {"http_status": response.status_code, "response": {}} + # Since LT API POST requests do not reply with native JSON, we have to make it ourselves + if len(response.text) > 0: + try: + response_obj["response"] = response.json() + except: + # Workaround for POST /deployments/ since the response is not JSON, just text + response_obj["response"] = json.loads('"{}"'.format(response.text)) + return response_obj + + # Sends a DELETE request to LT def send_delete_request(lt_api: str, token: str, api_endpoint: str): # Auth token + content type json @@ -71,3 +95,14 @@ def send_delete_request(lt_api: str, token: str, api_endpoint: str): raise InvalidJsonResponseError( "DELETE {}: The JSON response could not be parsed. Response: {}".format(request_string, response.text)) return response_obj + + +# Sends a GET request to LT, with url_params +def send_download_request(pkg_url: str, token: str): + # Auth token + content type json + headers = {'content-type': 'application/json', + 'authorization': token} + # Format the request URL to include the api endpoint + response = requests.get(pkg_url, headers=headers, verify=get_configuration_value("LIFETIME_SSL_CERT_VERIFY", LIFETIME_SSL_CERT_VERIFY)) + response_obj = {"http_status": response.status_code, "response": response.content} + return response_obj diff --git a/outsystems/lifetime/lifetime_deployments.py b/outsystems/lifetime/lifetime_deployments.py index 75fe638..ae26723 100644 --- a/outsystems/lifetime/lifetime_deployments.py +++ b/outsystems/lifetime/lifetime_deployments.py @@ -12,7 +12,7 @@ from outsystems.exceptions.environment_not_found import EnvironmentNotFoundError from outsystems.exceptions.impossible_action_deployment import ImpossibleApplyActionDeploymentError # Functions -from outsystems.lifetime.lifetime_base import send_get_request, send_post_request, send_delete_request +from outsystems.lifetime.lifetime_base import send_get_request, send_post_request, send_delete_request, send_binary_post_request from outsystems.lifetime.lifetime_environments import get_environment_key from outsystems.file_helpers.file import store_data # Variables @@ -24,7 +24,8 @@ DEPLOYMENT_SUCCESS_CODE, DEPLOYMENT_INVALID_CODE, DEPLOYMENT_NO_PERMISSION_CODE, DEPLOYMENT_NO_ENVIRONMENT_CODE, DEPLOYMENT_FAILED_CODE, \ DEPLOYMENT_DELETE_SUCCESS_CODE, DEPLOYMENT_DELETE_IMPOSSIBLE_CODE, DEPLOYMENT_DELETE_NO_PERMISSION_CODE, DEPLOYMENT_DELETE_NO_DEPLOYMENT_CODE, \ DEPLOYMENT_DELETE_FAILED_CODE, DEPLOYMENT_ACTION_SUCCESS_CODE, DEPLOYMENT_ACTION_IMPOSSIBLE_CODE, DEPLOYMENT_ACTION_NO_PERMISSION_CODE, \ - DEPLOYMENT_ACTION_NO_DEPLOYMENT_CODE, DEPLOYMENT_ACTION_FAILED_CODE, DEPLOYMENT_PLAN_V1_API_OPS, DEPLOYMENT_PLAN_V2_API_OPS + DEPLOYMENT_ACTION_NO_DEPLOYMENT_CODE, DEPLOYMENT_ACTION_FAILED_CODE, DEPLOYMENT_PLAN_V1_API_OPS, DEPLOYMENT_PLAN_V2_API_OPS, \ + ENVIRONMENTS_ENDPOINT, DEPLOYMENT_ENDPOINT from outsystems.vars.file_vars import DEPLOYMENTS_FILE, DEPLOYMENT_FILE, DEPLOYMENT_FOLDER, DEPLOYMENT_STATUS_FILE from outsystems.vars.pipeline_vars import DEPLOYMENT_STATUS_LIST, DEPLOYMENT_SAVED_STATUS @@ -193,6 +194,32 @@ def send_deployment(artifact_dir: str, endpoint: str, auth_token: str, lt_api_ve "There was an error. Response from server: {}".format(response)) +# Creates a deployment to a target environment. +# The input is a binary file. +def send_binary_deployment(artifact_dir: str, endpoint: str, auth_token: str, lt_api_version: int, dest_env: str, binary_path: str): + # Sends the request + response = send_binary_post_request( + endpoint, auth_token, ENVIRONMENTS_ENDPOINT, dest_env, DEPLOYMENT_ENDPOINT, binary_path) + status_code = int(response["http_status"]) + if status_code == DEPLOYMENT_SUCCESS_CODE: + return response["response"] + elif status_code == DEPLOYMENT_INVALID_CODE: + raise InvalidParametersError("The request is invalid. Check the body of the request for errors. Details: {}.".format( + response["response"])) + elif status_code == DEPLOYMENT_NO_PERMISSION_CODE: + raise NotEnoughPermissionsError( + "You don't have enough permissions to create the deployment. Details: {}".format(response["response"])) + elif status_code == DEPLOYMENT_NO_ENVIRONMENT_CODE: + raise EnvironmentNotFoundError( + "Can't find the source or target environment. Details: {}.".format(response["response"])) + elif status_code == DEPLOYMENT_FAILED_CODE: + raise ServerError( + "Failed to create the deployment. Details: {}".format(response["response"])) + else: + raise NotImplementedError( + "There was an error. Response from server: {}".format(response)) + + # Discards a deployment, if possible. Only deployments whose state is “saved” can be deleted. def delete_deployment(endpoint: str, auth_token: str, deployment_key: str): # Builds the API call diff --git a/outsystems/lifetime/lifetime_downloads.py b/outsystems/lifetime/lifetime_downloads.py new file mode 100644 index 0000000..85148a1 --- /dev/null +++ b/outsystems/lifetime/lifetime_downloads.py @@ -0,0 +1,45 @@ +# Python Modules +import os + +# Custom Modules +# Exceptions +from outsystems.exceptions.invalid_parameters import InvalidParametersError +from outsystems.exceptions.environment_not_found import EnvironmentNotFoundError +from outsystems.exceptions.not_enough_permissions import NotEnoughPermissionsError +from outsystems.exceptions.server_error import ServerError +# Functions +from outsystems.lifetime.lifetime_base import send_download_request + +# Variables +from outsystems.vars.lifetime_vars import DOWNLOAD_SUCCESS_CODE, DOWNLOAD_INVALID_KEY_CODE, \ + DOWNLOAD_NO_PERMISSION_CODE, DOWNLOAD_NOT_FOUND, DOWNLOAD_FAILED_CODE + + +# Downloads a binary file from a LifeTime download link +def download_package(file_path: str, auth_token: str, pkg_url: str): + # Sends the request + response = send_download_request(pkg_url, auth_token) + status_code = int(response["http_status"]) + + if status_code == DOWNLOAD_SUCCESS_CODE: + # Remove the spaces in the filename + file_path = file_path.replace(" ", "_") + # Makes sure that, if a directory is in the filename, that directory exists + os.makedirs(os.path.dirname(file_path), exist_ok=True) + with open(file_path, "wb") as f: + f.write(response["response"]) + elif status_code == DOWNLOAD_INVALID_KEY_CODE: + raise InvalidParametersError("The required type is invalid for given keys (EnvironmentKey; ApplicationKey). Details: {}".format( + response["response"])) + elif status_code == DOWNLOAD_NO_PERMISSION_CODE: + raise NotEnoughPermissionsError("User doesn't have permissions for the given keys (EnvironmentKey; ApplicationKey). Details: {}".format( + response["response"])) + elif status_code == DOWNLOAD_NOT_FOUND: + raise EnvironmentNotFoundError("No environment or application found. Please check that the EnvironmentKey and ApplicationKey exist. Details: {}".format( + response["response"])) + elif status_code == DOWNLOAD_FAILED_CODE: + raise ServerError("Failed to start the operation to package. Details: {}".format( + response["response"])) + else: + raise NotImplementedError( + "There was an error. Response from server: {}".format(response)) diff --git a/outsystems/lifetime/lifetime_environments.py b/outsystems/lifetime/lifetime_environments.py index a81f5c0..e5c6923 100644 --- a/outsystems/lifetime/lifetime_environments.py +++ b/outsystems/lifetime/lifetime_environments.py @@ -9,7 +9,7 @@ from outsystems.exceptions.app_does_not_exist import AppDoesNotExistError from outsystems.exceptions.server_error import ServerError # Functions -from outsystems.lifetime.lifetime_base import send_get_request +from outsystems.lifetime.lifetime_base import send_get_request, send_post_request from outsystems.lifetime.lifetime_applications import _get_application_info from outsystems.file_helpers.file import load_data, store_data, clear_cache # Variables @@ -17,8 +17,11 @@ ENVIRONMENTS_NOT_FOUND_CODE, ENVIRONMENTS_FAILED_CODE, ENVIRONMENT_APP_SUCCESS_CODE, ENVIRONMENT_APP_NOT_STATUS_CODE, \ ENVIRONMENT_APP_NO_PERMISSION_CODE, ENVIRONMENT_APP_NOT_FOUND, ENVIRONMENT_APP_FAILED_CODE, ENVIRONMENT_DEPLOYMENT_ZONES_ENDPOINT, \ ENVIRONMENT_ZONES_SUCCESS_CODE, ENVIRONMENT_ZONES_NOT_STATUS_CODE, ENVIRONMENT_ZONES_NO_PERMISSION_CODE, ENVIRONMENT_ZONES_NOT_FOUND, \ - ENVIRONMENT_ZONES_FAILED_CODE -from outsystems.vars.file_vars import ENVIRONMENTS_FILE, ENVIRONMENT_FOLDER, ENVIRONMENT_APPLICATION_FILE, ENVIRONMENT_DEPLOYMENT_ZONES_FILE + ENVIRONMENT_ZONES_FAILED_CODE, ENVIRONMENT_APPLICATIONS_SOURCECODE_ENDPOINT, ENVIRONMENT_SOURCECODE_PACKAGE_SUCCESS_CODE, \ + ENVIRONMENT_SOURCECODE_LINK_SUCCESS_CODE, ENVIRONMENT_SOURCECODE_FAILED_CODE +from outsystems.vars.file_vars import ENVIRONMENTS_FILE, ENVIRONMENT_FOLDER, ENVIRONMENT_APPLICATION_FILE, \ + ENVIRONMENT_DEPLOYMENT_ZONES_FILE, ENVIRONMENT_SOURCECODE_FOLDER, ENVIRONMENT_SOURCECODE_STATUS_FILE, \ + ENVIRONMENT_SOURCECODE_LINK_FILE # Lists all the environments in the infrastructure. @@ -120,6 +123,91 @@ def get_environment_deployment_zones(artifact_dir: str, endpoint: str, auth_toke "There was an error. Response from server: {}".format(response)) +# Returns the package key to download the source code of the specified application in a given environment. +def get_environment_app_source_code(artifact_dir: str, endpoint: str, auth_token: str, **kwargs): + # Tuple with (AppName, AppKey): app_tuple[0] = AppName; app_tuple[1] = AppKey + app_tuple = _get_application_info( + artifact_dir, endpoint, auth_token, **kwargs) + # Tuple with (EnvName, EnvKey): env_tuple[0] = EnvName; env_tuple[1] = EnvKey + env_tuple = _get_environment_info( + artifact_dir, endpoint, auth_token, **kwargs) + # Builds the query and arguments for the call to the API + query = "{}/{}/{}/{}/{}".format(ENVIRONMENTS_ENDPOINT, env_tuple[1], + ENVIRONMENT_APPLICATIONS_ENDPOINT, app_tuple[1], + ENVIRONMENT_APPLICATIONS_SOURCECODE_ENDPOINT) + # Sends the request + response = send_post_request(endpoint, auth_token, query, None) + status_code = int(response["http_status"]) + if status_code == ENVIRONMENT_SOURCECODE_PACKAGE_SUCCESS_CODE: + return response["response"] + elif status_code == ENVIRONMENT_SOURCECODE_FAILED_CODE: + raise ServerError("Failed to access the source code of an application. Details: {}".format( + response["response"])) + else: + raise NotImplementedError( + "There was an error. Response from server: {}".format(response)) + + +# Returns current status of source code package of the specified application in a given environment. +def get_environment_app_source_code_status(artifact_dir: str, endpoint: str, auth_token: str, **kwargs): + # Tuple with (AppName, AppKey): app_tuple[0] = AppName; app_tuple[1] = AppKey + app_tuple = _get_application_info( + artifact_dir, endpoint, auth_token, **kwargs) + # Tuple with (EnvName, EnvKey): env_tuple[0] = EnvName; env_tuple[1] = EnvKey + env_tuple = _get_environment_info( + artifact_dir, endpoint, auth_token, **kwargs) + # Builds the query and arguments for the call to the API + query = "{}/{}/{}/{}/{}/{}/status".format(ENVIRONMENTS_ENDPOINT, env_tuple[1], + ENVIRONMENT_APPLICATIONS_ENDPOINT, app_tuple[1], + ENVIRONMENT_APPLICATIONS_SOURCECODE_ENDPOINT, kwargs["pkg_key"]) + # Sends the request + response = send_get_request(endpoint, auth_token, query, None) + status_code = int(response["http_status"]) + if status_code == ENVIRONMENT_SOURCECODE_PACKAGE_SUCCESS_CODE: + # Stores the result + filename = "{}{}".format( + kwargs["pkg_key"], ENVIRONMENT_SOURCECODE_STATUS_FILE) + filename = os.path.join(ENVIRONMENT_SOURCECODE_FOLDER, filename) + store_data(artifact_dir, filename, response["response"]) + return response["response"] + elif status_code == ENVIRONMENT_SOURCECODE_FAILED_CODE: + raise ServerError("Failed to access the source code package status of an application. Details: {}".format( + response["response"])) + else: + raise NotImplementedError( + "There was an error. Response from server: {}".format(response)) + + +# Returns download link of source code package of the specified application in a given environment. +def get_environment_app_source_code_link(artifact_dir: str, endpoint: str, auth_token: str, **kwargs): + # Tuple with (AppName, AppKey): app_tuple[0] = AppName; app_tuple[1] = AppKey + app_tuple = _get_application_info( + artifact_dir, endpoint, auth_token, **kwargs) + # Tuple with (EnvName, EnvKey): env_tuple[0] = EnvName; env_tuple[1] = EnvKey + env_tuple = _get_environment_info( + artifact_dir, endpoint, auth_token, **kwargs) + # Builds the query and arguments for the call to the API + query = "{}/{}/{}/{}/{}/{}/download".format(ENVIRONMENTS_ENDPOINT, env_tuple[1], + ENVIRONMENT_APPLICATIONS_ENDPOINT, app_tuple[1], + ENVIRONMENT_APPLICATIONS_SOURCECODE_ENDPOINT, kwargs["pkg_key"]) + # Sends the request + response = send_get_request(endpoint, auth_token, query, None) + status_code = int(response["http_status"]) + if status_code == ENVIRONMENT_SOURCECODE_LINK_SUCCESS_CODE: + # Stores the result + filename = "{}{}".format( + kwargs["pkg_key"], ENVIRONMENT_SOURCECODE_LINK_FILE) + filename = os.path.join(ENVIRONMENT_SOURCECODE_FOLDER, filename) + store_data(artifact_dir, filename, response["response"]) + return response["response"] + elif status_code == ENVIRONMENT_SOURCECODE_FAILED_CODE: + raise ServerError("Failed to access the source code package link of an application. Details: {}".format( + response["response"])) + else: + raise NotImplementedError( + "There was an error. Response from server: {}".format(response)) + + # ---------------------- PRIVATE METHODS ---------------------- # Private method to get the App name or key into a tuple (name,key). def _get_environment_info(artifact_dir: str, api_url: str, auth_token: str, **kwargs): diff --git a/outsystems/lifetime/lifetime_solutions.py b/outsystems/lifetime/lifetime_solutions.py new file mode 100644 index 0000000..2641092 --- /dev/null +++ b/outsystems/lifetime/lifetime_solutions.py @@ -0,0 +1,96 @@ +# Python Modules +import json +import os + +# Custom Modules +# Exceptions +from outsystems.exceptions.no_deployments import NoDeploymentsError +from outsystems.exceptions.not_enough_permissions import NotEnoughPermissionsError +from outsystems.exceptions.server_error import ServerError +# Functions +from outsystems.lifetime.lifetime_base import send_post_request, send_get_request +from outsystems.file_helpers.file import store_data +# Variables +from outsystems.vars.lifetime_vars import ENVIRONMENTS_ENDPOINT, ENVIRONMENT_SOLUTION_ENDPOINT, ENVIRONMENT_SOLUTION_SUCCESS_CODE, \ + ENVIRONMENT_SOLUTION_STATUS_ENDPOINT, ENVIRONMENT_SOLUTION_STATUS_SUCCESS_CODE, ENVIRONMENT_SOLUTION_STATUS_NOT_STATUS_CODE, ENVIRONMENT_SOLUTION_STATUS_NO_PERMISSION_CODE, \ + ENVIRONMENT_SOLUTION_STATUS_FAILED_CODE, ENVIRONMENT_SOLUTION_LINK_SUCCESS_CODE, ENVIRONMENT_SOLUTION_LINK_FAILED_CODE +from outsystems.vars.file_vars import SOLUTIONS_LINK_FILE, SOLUTIONS_FOLDER # , SOLUTIONS_STATUS_FILE + + +# Sends a request to create a solution, on a target environment, for a specific set of app keys. +# Returns a solution key. +def create_solution(artifact_dir: str, endpoint: str, auth_token: str, environment_key: str, solution_name: str, app_keys: list, include_refs: bool): + # Builds the API call + query = "{}/{}/{}".format(ENVIRONMENTS_ENDPOINT, environment_key, ENVIRONMENT_SOLUTION_ENDPOINT) + + # Builds the body for the request + solution_request = _create_solution_request(solution_name, app_keys, include_refs) + # Sends the request + response = send_post_request( + endpoint, auth_token, query, solution_request) + status_code = int(response["http_status"]) + if status_code == ENVIRONMENT_SOLUTION_SUCCESS_CODE: + return response["response"] + else: + raise NotImplementedError( + "There was an error. Response from server: {}".format(response)) + + +# Returns the status of a given solution key +def get_solution_status(artifact_dir: str, endpoint: str, auth_token: str, environment_key: str, solution_key: str): + # Builds the API call + query = "{}/{}/{}/{}".format(ENVIRONMENTS_ENDPOINT, environment_key, ENVIRONMENT_SOLUTION_STATUS_ENDPOINT, solution_key) + + # Sends the request + response = send_get_request(endpoint, auth_token, query, None) + status_code = int(response["http_status"]) + if status_code == ENVIRONMENT_SOLUTION_STATUS_SUCCESS_CODE: + # Stores the result + # filename = "{}{}".format(solution_key, SOLUTIONS_STATUS_FILE) + # filename = os.path.join(SOLUTIONS_FOLDER, filename) + # store_data(artifact_dir, filename, response["response"]) + return response["response"] + elif status_code == ENVIRONMENT_SOLUTION_STATUS_NO_PERMISSION_CODE: + raise NotEnoughPermissionsError( + "You don't have enough permissions to see the details of that solution. Details: {}".format(response["response"])) + elif status_code == ENVIRONMENT_SOLUTION_STATUS_NOT_STATUS_CODE: + raise NoDeploymentsError("There is no solution with the key {}. Details: {}".format( + solution_key, response["response"])) + elif status_code == ENVIRONMENT_SOLUTION_STATUS_FAILED_CODE: + raise ServerError("Failed to get the status of solution with key {}. Details: {}".format( + solution_key, response["response"])) + else: + raise NotImplementedError( + "There was an error. Response from server: {}".format(response)) + + +# Returns download link of source code package of the specified application in a given environment. +def get_solution_url(artifact_dir: str, endpoint: str, auth_token: str, environment_key: str, solution_key: str): + # Builds the API call + query = "{}/{}/{}/{}".format(ENVIRONMENTS_ENDPOINT, environment_key, ENVIRONMENT_SOLUTION_ENDPOINT, solution_key) + + # Sends the request + response = send_get_request(endpoint, auth_token, query, None) + status_code = int(response["http_status"]) + if status_code == ENVIRONMENT_SOLUTION_LINK_SUCCESS_CODE: + # Stores the result + filename = "{}{}".format(solution_key, SOLUTIONS_LINK_FILE) + filename = os.path.join(SOLUTIONS_FOLDER, filename) + store_data(artifact_dir, filename, response["response"]) + return response["response"]["url"] + elif status_code == ENVIRONMENT_SOLUTION_LINK_FAILED_CODE: + raise ServerError("Failed to access the solution package link. Details: {}".format( + response["response"])) + else: + raise NotImplementedError( + "There was an error. Response from server: {}".format(response)) + + +# ---------------------- PRIVATE METHODS ---------------------- +def _create_solution_request(solution_name: str, app_keys: str, include_refs: bool): + + solution_request = {"SolutionName": solution_name, + "ApplicationKeys": app_keys, + "IncludeReferences": include_refs} + + return json.dumps(solution_request) diff --git a/outsystems/osp_tool/osp_base.py b/outsystems/osp_tool/osp_base.py index 8644d6d..4cf13cd 100644 --- a/outsystems/osp_tool/osp_base.py +++ b/outsystems/osp_tool/osp_base.py @@ -1,13 +1,62 @@ -from subprocess import PIPE, run +import subprocess +import threading + +# Custom Modules +# Variables +from outsystems.vars.pipeline_vars import SOLUTION_TIMEOUT_IN_SECS +# Functions +from outsystems.vars.vars_base import get_configuration_value # Exceptions from outsystems.exceptions.osptool_error import OSPToolDeploymentError -# Deploys an OutSystems Application Package (.oap) on a target environment -def call_osptool(osp_tool_path: str, package_file_path: str, env_hostname: str, credentials: str): +def run_command(command, live_output_callback=None, timeout=None): + process = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True) + + def read_output(pipe, callback, output_list): + with pipe: + for line in iter(pipe.readline, ''): + callback(line.strip()) + output_list.append(line) + + # List to capture the live output + live_output = [] + + # Create a thread for reading and displaying live output + live_output_thread = threading.Thread(target=read_output, args=(process.stdout, live_output_callback, live_output)) + live_output_thread.start() + + # Wait for the process to finish and get the return code + try: + return_code = process.wait(timeout=timeout) + except subprocess.TimeoutExpired: + # Process has exceeded the timeout + process.terminate() + raise OSPToolDeploymentError("OSPTool Deployment timed out.") + + # Wait for the live output thread to finish + live_output_thread.join() + + # Combine the live output into a single string (execution log) + execution_log = ''.join(live_output) + + return return_code, execution_log + + +def call_osptool(osp_tool_path: str, package_file_path: str, env_hostname: str, credentials: str, catalogmappings_path: str): + + if catalogmappings_path: + # Construct the command using a formatted string + command = '"{}" "{}" "{}" {} /catalogmappings "{}"'.format(osp_tool_path, package_file_path, env_hostname, credentials, catalogmappings_path) + else: + # Construct the command using a formatted string + command = '"{}" "{}" "{}" {}'.format(osp_tool_path, package_file_path, env_hostname, credentials) + + # Define a callback function for live output + def live_output_callback(output_line): + print(output_line) - command = "{} {} {} {}".format(osp_tool_path, package_file_path, env_hostname, credentials) - result = run(command, stderr=PIPE, universal_newlines=True) + # Run the command and get the return code and execution log + return_code, execution_log = run_command(command, live_output_callback, timeout=get_configuration_value("SOLUTION_TIMEOUT_IN_SECS", SOLUTION_TIMEOUT_IN_SECS)) - if result.returncode != 0: - raise OSPToolDeploymentError("OSPTool deployment failed, please check the logs for more detail.") + return return_code, execution_log diff --git a/outsystems/pipeline/deploy_package_to_target_env.py b/outsystems/pipeline/deploy_package_to_target_env.py new file mode 100644 index 0000000..e09d0ad --- /dev/null +++ b/outsystems/pipeline/deploy_package_to_target_env.py @@ -0,0 +1,192 @@ +# Python Modules +import sys +import os +import argparse +from time import sleep + +# Workaround for Jenkins: +# Set the path to include the outsystems module +# Jenkins exposes the workspace directory through env. +if "WORKSPACE" in os.environ: + sys.path.append(os.environ['WORKSPACE']) +else: # Else just add the project dir + sys.path.append(os.getcwd()) + +# Custom Modules +# Variables +from outsystems.vars.file_vars import ARTIFACT_FOLDER +from outsystems.vars.lifetime_vars import LIFETIME_HTTP_PROTO, LIFETIME_API_ENDPOINT, LIFETIME_API_VERSION +from outsystems.vars.pipeline_vars import QUEUE_TIMEOUT_IN_SECS, SLEEP_PERIOD_IN_SECS, CONFLICTS_FILE, \ + REDEPLOY_OUTDATED_APPS, DEPLOYMENT_TIMEOUT_IN_SECS, DEPLOYMENT_RUNNING_STATUS, DEPLOYMENT_WAITING_STATUS, \ + DEPLOYMENT_ERROR_STATUS_LIST, DEPLOY_ERROR_FILE, ALLOW_CONTINUE_WITH_ERRORS +# Functions +from outsystems.lifetime.lifetime_environments import get_environment_key +from outsystems.lifetime.lifetime_deployments import get_deployment_status, get_deployment_info, \ + send_binary_deployment, delete_deployment, start_deployment, continue_deployment, get_running_deployment, \ + check_deployment_two_step_deploy_status +from outsystems.file_helpers.file import store_data, is_valid_os_package +from outsystems.lifetime.lifetime_base import build_lt_endpoint +from outsystems.vars.vars_base import get_configuration_value, load_configuration_file +# Exceptions +from outsystems.exceptions.invalid_os_package import InvalidOutSystemsPackage + + +# ############################################################# SCRIPT ############################################################## +def main(artifact_dir: str, lt_http_proto: str, lt_url: str, lt_api_endpoint: str, lt_api_version: int, lt_token: str, dest_env_label: str, force_two_step_deployment: bool, package_path: str): + + # Builds the LifeTime endpoint + lt_endpoint = build_lt_endpoint(lt_http_proto, lt_url, lt_api_endpoint, lt_api_version) + + # Gets the environment key for the destination environment + dest_env_key = get_environment_key(artifact_dir, lt_endpoint, lt_token, dest_env_label) + + wait_counter = 0 + deployments = get_running_deployment(artifact_dir, lt_endpoint, lt_token, dest_env_key) + while len(deployments) > 0: + if wait_counter >= get_configuration_value("QUEUE_TIMEOUT_IN_SECS", QUEUE_TIMEOUT_IN_SECS): + print("Timeout occurred while waiting for LifeTime to be free, to create the new deployment plan.", flush=True) + sys.exit(1) + sleep_value = get_configuration_value("SLEEP_PERIOD_IN_SECS", SLEEP_PERIOD_IN_SECS) + sleep(sleep_value) + wait_counter += sleep_value + print("Waiting for LifeTime to be free. Elapsed time: {} seconds...".format(wait_counter), flush=True) + deployments = get_running_deployment(artifact_dir, lt_endpoint, lt_token, dest_env_key) + + # LT is free to deploy + # Validate if file has OutSystems package extension + if not is_valid_os_package(package_path): + raise InvalidOutSystemsPackage("Binary file is not an OutSystems package. Expected 'osp' or 'oap' as file extension.") + + # Send the deployment plan and grab the key + dep_plan_key = send_binary_deployment(artifact_dir, lt_endpoint, lt_token, lt_api_version, dest_env_key, package_path) + print("Deployment plan {} created successfully.".format(dep_plan_key), flush=True) + + # Check if created deployment plan has conflicts + dep_details = get_deployment_info(artifact_dir, lt_endpoint, lt_token, dep_plan_key) + has_conflicts = len(dep_details["ApplicationConflicts"]) > 0 + if has_conflicts: + store_data(artifact_dir, CONFLICTS_FILE, dep_details["ApplicationConflicts"]) + if not get_configuration_value("ALLOW_CONTINUE_WITH_ERRORS", ALLOW_CONTINUE_WITH_ERRORS) or lt_api_version == 1: + print("Deployment plan {} has conflicts and will be aborted. Check {} artifact for more details.".format(dep_plan_key, CONFLICTS_FILE), flush=True) + # Abort previously created deployment plan to target environment + delete_deployment(lt_endpoint, lt_token, dep_plan_key) + print("Deployment plan {} was deleted successfully.".format(dep_plan_key), flush=True) + sys.exit(1) + else: + print("Deployment plan {} has conflicts but will continue with errors. Check {} artifact for more details.".format(dep_plan_key, CONFLICTS_FILE), flush=True) + + # Check if outdated consumer applications (outside of deployment plan) should be redeployed and start the deployment plan execution + if lt_api_version == 1: # LT for OS version < 11 + start_deployment(lt_endpoint, lt_token, dep_plan_key) + elif lt_api_version == 2: # LT for OS v11 + if has_conflicts: + start_deployment(lt_endpoint, lt_token, dep_plan_key, redeploy_outdated=False, continue_with_errors=True) + else: + start_deployment(lt_endpoint, lt_token, dep_plan_key, redeploy_outdated=get_configuration_value("REDEPLOY_OUTDATED_APPS", REDEPLOY_OUTDATED_APPS)) + else: + raise NotImplementedError("Please make sure the API version is compatible with the module.") + print("Deployment plan {} started being executed.".format(dep_plan_key), flush=True) + + # Flag to only alert the user once + alert_user = False + # Sleep thread until deployment has finished + wait_counter = 0 + while wait_counter < get_configuration_value("DEPLOYMENT_TIMEOUT_IN_SECS", DEPLOYMENT_TIMEOUT_IN_SECS): + # Check Deployment Plan status. + dep_status = get_deployment_status( + artifact_dir, lt_endpoint, lt_token, dep_plan_key) + if dep_status["DeploymentStatus"] != DEPLOYMENT_RUNNING_STATUS: + # Check deployment status is pending approval. + if dep_status["DeploymentStatus"] == DEPLOYMENT_WAITING_STATUS: + # Check if deployment waiting status is due to 2-Step + if check_deployment_two_step_deploy_status(dep_status): + # Force it to continue in case of force_two_step_deployment parameter + if force_two_step_deployment: + continue_deployment(lt_endpoint, lt_token, dep_plan_key) + print("Deployment plan {} resumed execution.".format(dep_plan_key), flush=True) + else: + # Exit the script to continue with the pipeline execution + print("Deployment plan {} first step finished successfully.".format(dep_plan_key), flush=True) + sys.exit(0) + # Send notification to alert deployment manual intervention. + elif not alert_user: + alert_user = True + print("A manual intervention is required to continue the execution of the deployment plan {}.".format(dep_plan_key), flush=True) + elif dep_status["DeploymentStatus"] in DEPLOYMENT_ERROR_STATUS_LIST: + print("Deployment plan finished with status {}.".format(dep_status["DeploymentStatus"]), flush=True) + store_data(artifact_dir, DEPLOY_ERROR_FILE, dep_status) + sys.exit(1) + else: + # If it reaches here, it means the deployment was successful + print("Deployment plan finished with status {}.".format(dep_status["DeploymentStatus"]), flush=True) + # Exit the script to continue with the pipeline + sys.exit(0) + # Deployment status is still running. Go back to sleep. + sleep_value = get_configuration_value("SLEEP_PERIOD_IN_SECS", SLEEP_PERIOD_IN_SECS) + sleep(sleep_value) + wait_counter += sleep_value + print("{} secs have passed since the deployment started...".format(wait_counter), flush=True) + + # Deployment timeout reached. Exit script with error + print("Timeout occurred while deployment plan is still in {} status.".format(DEPLOYMENT_RUNNING_STATUS), flush=True) + sys.exit(1) + + +# End of main() + + +if __name__ == "__main__": + # Argument menu / parsing + parser = argparse.ArgumentParser() + parser.add_argument("-a", "--artifacts", type=str, default=ARTIFACT_FOLDER, + help="(Optional) Name of the artifacts folder. Default: \"Artifacts\"") + parser.add_argument("-u", "--lt_url", type=str, required=True, + help="URL for LifeTime environment, without the API endpoint. Example: \"https://\"") + parser.add_argument("-t", "--lt_token", type=str, required=True, + help="Token for LifeTime API calls.") + parser.add_argument("-v", "--lt_api_version", type=int, default=LIFETIME_API_VERSION, + help="(Optional) LifeTime API version number. If version <= 10, use 1, if version >= 11, use 2. Default: 2") + parser.add_argument("-e", "--lt_endpoint", type=str, default=LIFETIME_API_ENDPOINT, + help="(Optional) Used to set the API endpoint for LifeTime, without the version. Default: \"lifetimeapi/rest\"") + parser.add_argument("-d", "--destination_env_label", type=str, required=True, + help="Label, as configured in the manifest, of the destination environment where you want to deploy the apps.") + parser.add_argument("-p", "--package_path", type=str, required=True, + help="Package file path") + parser.add_argument("-c", "--force_two_step_deployment", action='store_true', + help="Force the execution of the 2-Step deployment.") + parser.add_argument("-cf", "--config_file", type=str, + help="Config file path. Contains configuration values to override the default ones.") + + args = parser.parse_args() + + # Load config file if exists + if args.config_file: + load_configuration_file(args.config_file) + # Parse the artifact directory + artifact_dir = args.artifacts + # Parse the API endpoint + lt_api_endpoint = args.lt_endpoint + # Parse the LT Url and split the LT hostname from the HTTP protocol + # Assumes the default HTTP protocol = https + lt_http_proto = LIFETIME_HTTP_PROTO + lt_url = args.lt_url + if lt_url.startswith("http://"): + lt_http_proto = "http" + lt_url = lt_url.replace("http://", "") + else: + lt_url = lt_url.replace("https://", "") + if lt_url.endswith("/"): + lt_url = lt_url[:-1] + # Parte LT API Version + lt_version = args.lt_api_version + # Parse the LT Token + lt_token = args.lt_token + # Parse Destination Environment + dest_env_label = args.destination_env_label + # Parse the package directory + package_path = args.package_path + # Parse Force Two-step Deployment flag + force_two_step_deployment = args.force_two_step_deployment + + # Calls the main script + main(artifact_dir, lt_http_proto, lt_url, lt_api_endpoint, lt_version, lt_token, dest_env_label, force_two_step_deployment, package_path) diff --git a/outsystems/pipeline/deploy_package_to_target_env_with_osptool.py b/outsystems/pipeline/deploy_package_to_target_env_with_osptool.py new file mode 100644 index 0000000..98fa12b --- /dev/null +++ b/outsystems/pipeline/deploy_package_to_target_env_with_osptool.py @@ -0,0 +1,101 @@ +# Python Modules +import sys +import os +import argparse + +# Workaround for Jenkins: +# Set the path to include the outsystems module +# Jenkins exposes the workspace directory through env. +if "WORKSPACE" in os.environ: + sys.path.append(os.environ['WORKSPACE']) +else: # Else just add the project dir + sys.path.append(os.getcwd()) + +# Custom Modules +# Variables +from outsystems.vars.file_vars import ARTIFACT_FOLDER, SOLUTIONS_FOLDER, SOLUTIONS_DEPLOY_FILE +# Functions +from outsystems.osp_tool.osp_base import call_osptool +from outsystems.vars.vars_base import load_configuration_file +from outsystems.file_helpers.file import store_data +# Exceptions +from outsystems.exceptions.osptool_error import OSPToolDeploymentError + + +# ############################################################# SCRIPT ############################################################## +def main(artifact_dir: str, dest_env: str, package_path: str, catalogmappings_path: str, osp_tool_path: str, credentials: str): + + # Get solution file name from path + solution_file = os.path.split(package_path)[1] + + print("Starting deployment of '{}' into '{}' environment...".format(solution_file, dest_env), flush=True) + + # Call OSP Tool + return_code, execution_log = call_osptool(osp_tool_path, package_path, dest_env, credentials, catalogmappings_path) + + # Split the output into lines + execution_log = execution_log.splitlines() + + # Stores the execution log + filename = "{}{}".format(solution_file, SOLUTIONS_DEPLOY_FILE) + filename = os.path.join(SOLUTIONS_FOLDER, filename) + store_data(artifact_dir, filename, execution_log) + + error_validation_list = ['Incompatible Dependency', 'Execution Plan Abort', 'Outdated Consumer', 'Missing Configuration'] + + # Validate the presence of each error validation + deploy_error_flag = False + for error_validation in error_validation_list: + existing_error_list = [s for s in execution_log if error_validation in s] + if existing_error_list: + deploy_error_flag = True + print(f'\nFound "{error_validation}" validation:') + for error in existing_error_list: + print(f' - {error}') + + if deploy_error_flag: + # Exit script with error + raise OSPToolDeploymentError( + "OSP Tool Deployment finished with errors. Please check the logs for further details.") + + +if __name__ == "__main__": + # Argument menu / parsing + parser = argparse.ArgumentParser() + parser.add_argument("-a", "--artifacts", type=str, default=ARTIFACT_FOLDER, + help="Name of the artifacts folder. Default: \"Artifacts\"") + parser.add_argument("-d", "--destination_env", type=str, required=True, + help="Name, as displayed in LifeTime, of the destination environment where you want to deploy the apps. (if in Airgap mode should be the hostname of the destination environment where you want to deploy the apps)") + parser.add_argument("-p", "--package_path", type=str, required=True, + help="Package file path") + parser.add_argument("-c", "--catalogmappings_path", type=str, + help="(Optional) Catalog mappings file path") + parser.add_argument("-o", "--osp_tool_path", type=str, required=True, + help="OSP Tool file path") + parser.add_argument("-user", "--osptool_user", type=str, required=True, + help="Username with privileges to deploy applications on target environment") + parser.add_argument("-pwd", "--osptool_pwd", type=str, required=True, + help="Password of the Username with priveleges to deploy applications on target environment") + parser.add_argument("-cf", "--config_file", type=str, + help="Config file path. Contains configuration values to override the default ones.") + + args = parser.parse_args() + + # Load config file if exists + if args.config_file: + load_configuration_file(args.config_file) + # Parse the artifact directory + artifact_dir = args.artifacts + # Parse the package path + package_path = args.package_path + # Parse the Catalog Mapping path + catalogmappings_path = args.catalogmappings_path + # Parse Destination Environment + dest_env = args.destination_env + # Parse OSP Tool path + osp_tool_path = args.osp_tool_path + # Parse Credentials for OSP Tool + credentials = args.osptool_user + " " + args.osptool_pwd + + # Calls the main script + main(artifact_dir, dest_env, package_path, catalogmappings_path, osp_tool_path, credentials) diff --git a/outsystems/pipeline/fetch_apps_source_code.py b/outsystems/pipeline/fetch_apps_source_code.py new file mode 100644 index 0000000..3086cb5 --- /dev/null +++ b/outsystems/pipeline/fetch_apps_source_code.py @@ -0,0 +1,329 @@ +# Python Modules +import sys +import os +import argparse +import re +from time import sleep +import xml.etree.ElementTree as ET +from zipfile import ZipFile +from io import BytesIO + +# Workaround for Jenkins: +# Set the path to include the outsystems module +# Jenkins exposes the workspace directory through env. +if "WORKSPACE" in os.environ: + sys.path.append(os.environ['WORKSPACE']) +else: # Else just add the project dir + sys.path.append(os.getcwd()) + +# Custom Modules +# Variables +from outsystems.vars.file_vars import ARTIFACT_FOLDER, ENVIRONMENT_SOURCECODE_FOLDER, ENVIRONMENT_SOURCECODE_DOWNLOAD_FILE +from outsystems.vars.lifetime_vars import LIFETIME_HTTP_PROTO, LIFETIME_API_ENDPOINT, LIFETIME_API_VERSION +from outsystems.vars.manifest_vars import MANIFEST_APPLICATION_VERSIONS, MANIFEST_FLAG_IS_TEST_APPLICATION, \ + MANIFEST_APPLICATION_NAME +from outsystems.vars.pipeline_vars import SOURCECODE_SLEEP_PERIOD_IN_SECS, SOURCECODE_TIMEOUT_IN_SECS, SOURCECODE_ONGOING_STATUS, \ + SOURCECODE_FINISHED_STATUS +from outsystems.vars.dotnet_vars import MS_BUILD_NAMESPACE, ASSEMBLY_BLACKLIST + +# Functions +from outsystems.lifetime.lifetime_base import build_lt_endpoint +from outsystems.lifetime.lifetime_environments import get_environment_app_source_code, get_environment_app_source_code_status, \ + get_environment_app_source_code_link, get_environment_key +from outsystems.lifetime.lifetime_applications import get_running_app_version +from outsystems.lifetime.lifetime_downloads import download_package +from outsystems.file_helpers.file import load_data +from outsystems.vars.vars_base import get_configuration_value, load_configuration_file + +# ############################################################# SCRIPT ############################################################## + + +# Extract content of downloaded source code package (one folder per application module) +def extract_package_content(file_path: str, include_all_refs: bool, remove_resources_files: bool): + module_count = 0 + with ZipFile(file_path, 'r') as zf: + # Iterate through the content of the source code package + for archive_name in zf.namelist(): + match = re.search(r'(.*)\.v\d+.zip$', archive_name) + # Each package will have one .zip file per module + if match: + module_name = match.group(1) + module_folder = os.path.join(os.path.dirname(file_path), "modules", module_name) + file_data = BytesIO(zf.read(archive_name)) + with ZipFile(file_data) as zf2: + # Extract generated source code of each module to a subfolder + zf2.extractall(path=module_folder) + + # Check if any post-processing action is needed over the extracted resources + if (include_all_refs or remove_resources_files): + process_csproj_files(module_name, module_folder, include_all_refs, remove_resources_files) + + # Update package modules count + module_count += 1 + + # Return number of modules inside the source code package + return module_count + + +# Return the list of .csproj relative paths referenced in the module .sln file +def find_csproj_files(module_name: str, module_folder: str): + + # Builds the solution full path + sln_file = os.path.join(module_folder, "{}.sln".format(module_name)) + + # Final list of csprojs relative path found in the solution file + csprojs = [] + + # Read module solution file + with open(sln_file, 'rb') as f: + line = f.readline() + while line: + line = f.readline().decode('utf-8') + if line.startswith("Project"): + # Gets line's second object (i.e: csproj relative path) + # Trim spaces and double quotes + csproj = line.split(",")[1].strip().strip('\"') + # Check if module's main csproj + is_main = csproj == "{}.csproj".format(module_name) + # Check if module's referencesProxy csproj + is_referencesProxy = csproj.startswith("referencesProxy") + # Append csproj details + csprojs.append({"RelativePath": csproj, "IsMain": is_main, "IsReferencesProxy": is_referencesProxy}) + + return csprojs + + +# Post-processing of .csproj files according to provided flags: +# --include_all_refs: Ensures that references proxy .csproj file reference all assemblies (.dll) available in the module bin folder +# --remove_resources_files: Removes embedded .resources files from main .csproj file (if existing) +def process_csproj_files(module_name: str, module_folder: str, include_all_refs: bool, remove_resources_files: bool): + + # Find .csproj files available in the provided module folder + csprojs = find_csproj_files(module_name, module_folder) + + # Build bin directory full path + bin_folder = os.path.join(module_folder, "bin") + + # Iterate through all csproj files for the current module + for csproj in csprojs: + # Build csproj file full path + csproj_file = os.path.join(module_folder, csproj["RelativePath"]) + + # Check if csproj is for the module's references proxy assembly + if csproj["IsReferencesProxy"] and include_all_refs: + # Read csproj file and identify first ItemGroup element + ET.register_namespace('', MS_BUILD_NAMESPACE) + tree = ET.parse(csproj_file) + itemgroup_elem = tree.find("./{val}ItemGroup".format(val='{' + MS_BUILD_NAMESPACE + '}')) + + # Iterate through all dlls found in the bin directory + for file in os.listdir(bin_folder): + if file.endswith(".dll"): + dll_name = os.path.splitext(file)[0] + # TODO: Use os.path.join instead + dll_relpath = os.path.join(os.path.relpath(module_folder, os.path.dirname(csproj_file)), 'bin', file) + + # Validate if dll already exists in the csproj + dll_exists = tree.find("./{val}ItemGroup/{val}Reference/{val}HintPath[.='{dll}']".format(val='{' + MS_BUILD_NAMESPACE + '}', dll=dll_relpath)) is not None + + # Continue if dll is csproj target assembly + # Continue if dll already exists in csproj + # Continue if dll exists in blacklist + if dll_name == os.path.splitext(os.path.basename(csproj_file))[0] or dll_exists or dll_name in ASSEMBLY_BLACKLIST: + continue + + # Create Reference structure + ref = ET.Element("Reference") + ref.set("Include", dll_name) + + # Create Name structure + ref_name = ET.Element("Name") + ref_name.text = dll_name + ref.append(ref_name) + + # Create HintPath structure + ref_hintpath = ET.Element("HintPath") + # Identify the relative path between the module's full dir and csproj file full dir + # Adds to the element text the dll relative path + ref_hintpath.text = dll_relpath + ref.append(ref_hintpath) + + # Create Private structure + ref_private = ET.Element("Private") + ref_private.text = "False" + ref.append(ref_private) + + # Append new element to ItemGroup element + itemgroup_elem.append(ref) + + # Save newly added references to csproj file + tree.write(csproj_file) + + # Check if csproj is for the module's main assembly + elif csproj["IsMain"] and remove_resources_files: + # Read csproj file and find all ItemGroup elements + ET.register_namespace('', MS_BUILD_NAMESPACE) + tree = ET.parse(csproj_file) + itemgroup_elems = tree.findall("./{val}ItemGroup".format(val='{' + MS_BUILD_NAMESPACE + '}')) + + # Iterate through all ItemGroups to find out which has embedded resource files + for itemgroup in itemgroup_elems: + emb_resource_elems = itemgroup.findall("./{val}EmbeddedResource".format(val='{' + MS_BUILD_NAMESPACE + '}')) + if emb_resource_elems: + for emb_resource in emb_resource_elems: + # Remove every embedded resource element found + itemgroup.remove(emb_resource) + break + # Save changes to csproj file + tree.write(csproj_file) + + +def main(artifact_dir: str, lt_http_proto: str, lt_url: str, lt_api_endpoint: str, lt_api_version: int, lt_token: str, target_env: str, apps: list, trigger_manifest: dict, include_test_apps: bool, friendly_package_names: bool, include_all_refs: bool, remove_resources_files: bool): + + # Builds the LifeTime endpoint + lt_endpoint = build_lt_endpoint(lt_http_proto, lt_url, lt_api_endpoint, lt_api_version) + + # List of application names to fetch the source code from target environment + app_list = [] + + # Extract names from manifest file (when available) + if trigger_manifest: + for app in trigger_manifest[MANIFEST_APPLICATION_VERSIONS]: + if include_test_apps or not app[MANIFEST_FLAG_IS_TEST_APPLICATION]: + app_list.append(app[MANIFEST_APPLICATION_NAME]) + else: + app_list = apps + + for app_name in app_list: + # Request source code package creation + pkg_details = get_environment_app_source_code(artifact_dir, lt_endpoint, lt_token, env_name=target_env, app_name=app_name) + pkg_key = pkg_details["PackageKey"] + print("Source code package {} started being created for application {} deployed in {} environment.".format(pkg_key, app_name, target_env), flush=True) + + # Wait for package creation to finish + wait_counter = 0 + link_available = False + while wait_counter < get_configuration_value("SOURCECODE_TIMEOUT_IN_SECS", SOURCECODE_TIMEOUT_IN_SECS): + # Check current package status + pkg_status = get_environment_app_source_code_status(artifact_dir, lt_endpoint, lt_token, + env_name=target_env, app_name=app_name, pkg_key=pkg_key) + if pkg_status["Status"] == SOURCECODE_FINISHED_STATUS: + # Package was created successfully + link_available = True + break + elif pkg_status["Status"] == SOURCECODE_ONGOING_STATUS: + # Package is still being created. Go back to sleep. + sleep_value = get_configuration_value("SOURCECODE_SLEEP_PERIOD_IN_SECS", SOURCECODE_SLEEP_PERIOD_IN_SECS) + sleep(sleep_value) + wait_counter += sleep_value + print("{} secs have passed while source code package is being created...".format(wait_counter), flush=True) + else: + raise NotImplementedError("Unknown source code package status: {}.".format(pkg_status["Status"])) + + # When the package is created, download it using the provided key + if link_available: + print("Source code package {} created successfully.".format(pkg_key), flush=True) + pkg_link = get_environment_app_source_code_link(artifact_dir, lt_endpoint, lt_token, + env_name=target_env, app_name=app_name, pkg_key=pkg_key) + if friendly_package_names: + target_env_key = get_environment_key(artifact_dir, lt_endpoint, lt_token, target_env) + running_version = get_running_app_version(artifact_dir, lt_endpoint, lt_token, target_env_key, app_name=app_name) + file_name = "{}_v{}".format(app_name.replace(" ", "_"), running_version["Version"].replace(".", "_")) + if running_version["IsModified"]: + file_name += "+" + else: + file_name = pkg_key + file_name += ENVIRONMENT_SOURCECODE_DOWNLOAD_FILE + file_path = os.path.join(artifact_dir, ENVIRONMENT_SOURCECODE_FOLDER, file_name) + + download_package(file_path, lt_token, pkg_link["url"]) + print("Source code package {} downloaded successfully.".format(pkg_key), flush=True) + + # Extract source code for each module from downloaded package, applying post-processing actions (if requested) + module_count = extract_package_content(file_path, include_all_refs, remove_resources_files) + print("{} application modules processed successfully.".format(module_count), flush=True) + else: + print("Timeout expired while generating source code package {}. Unable to download source code for application {}.".format(pkg_key, app_name), flush=True) + + +# End of main() + + +if __name__ == "__main__": + # Argument menu / parsing + parser = argparse.ArgumentParser() + parser.add_argument("-a", "--artifacts", type=str, default=ARTIFACT_FOLDER, + help="Name of the artifacts folder. Default: \"Artifacts\"") + parser.add_argument("-lu", "--lt_url", type=str, required=True, + help="URL for LifeTime environment, without the API endpoint. Example: \"https://\"") + parser.add_argument("-lt", "--lt_token", type=str, required=True, + help="Token for LifeTime API calls.") + parser.add_argument("-e", "--lt_endpoint", type=str, default=LIFETIME_API_ENDPOINT, + help="(optional) Used to set the API endpoint for LifeTime, without the version. Default: \"lifetimeapi/rest\"") + parser.add_argument("-t", "--target_env", type=str, required=True, + help="Name, as displayed in LifeTime, of the target environment where to fetch the source code from.") + parser.add_argument("-l", "--app_list", type=str, + help="Comma separated list of apps you want to fetch. Example: \"App1,App2 With Spaces,App3_With_Underscores\"") + parser.add_argument("-f", "--manifest_file", type=str, + help="Manifest file (with JSON format). Contains required data used throughout the pipeline execution.") + parser.add_argument("-i", "--include_test_apps", action='store_true', + help="Flag that indicates if applications marked as \"Test Application\" in the manifest are fetched as well.") + parser.add_argument("-n", "--friendly_package_names", action='store_true', + help="Flag that indicates if downloaded source code packages should have a user-friendly name. Example: \"_v1_2_1\"") + parser.add_argument("-ref", "--include_all_refs", action='store_true', + help="Flag that indicates if all assemblies in the \"bin\" folder should be added as references in the .csproj file.") + parser.add_argument("-res", "--remove_resources_files", action='store_true', + help="Flag that indicates if embedded resources files should be removed from the .csproj file.") + parser.add_argument("-cf", "--config_file", type=str, + help="Config file path. Contains configuration values to override the default ones.") + + args = parser.parse_args() + + # Load config file if exists + if args.config_file: + load_configuration_file(args.config_file) + # Parse the artifact directory + artifact_dir = args.artifacts + # Parse the API endpoint + lt_api_endpoint = args.lt_endpoint + # Parse the LT Url and split the LT hostname from the HTTP protocol + # Assumes the default HTTP protocol = https + lt_http_proto = LIFETIME_HTTP_PROTO + lt_url = args.lt_url + if lt_url.startswith("http://"): + lt_http_proto = "http" + lt_url = lt_url.replace("http://", "") + else: + lt_url = lt_url.replace("https://", "") + if lt_url.endswith("/"): + lt_url = lt_url[:-1] + # Parte LT API Version + lt_version = LIFETIME_API_VERSION + # Parse the LT Token + lt_token = args.lt_token + # Parse Target Environment + target_env = args.target_env + # Check if either an app list or a manifest file is being provided + if not args.app_list and not args.manifest_file: + parser.error("either --app_list or --manifest_file must be provided as arguments") + # Use Trigger Manifest (if available) + if args.manifest_file: + # Parse Trigger Manifest artifact + trigger_manifest = load_data("", args.manifest_file) + apps = None + else: + trigger_manifest = None + # Parse App list + _apps = args.app_list + apps = _apps.split(',') + # Parse Include Test Apps flag + include_test_apps = args.include_test_apps + # Parse Friendly Package Names flag + friendly_package_names = args.friendly_package_names + # Parse Include All References flag + include_all_refs = args.include_all_refs + # Parse Remove Resources Files flag + remove_resources_files = args.remove_resources_files + + # Calls the main script + main(artifact_dir, lt_http_proto, lt_url, lt_api_endpoint, lt_version, lt_token, target_env, apps, trigger_manifest, include_test_apps, friendly_package_names, include_all_refs, remove_resources_files) # type: ignore diff --git a/outsystems/pipeline/fetch_lifetime_solution_from_manifest.py b/outsystems/pipeline/fetch_lifetime_solution_from_manifest.py new file mode 100644 index 0000000..3d8e911 --- /dev/null +++ b/outsystems/pipeline/fetch_lifetime_solution_from_manifest.py @@ -0,0 +1,197 @@ +# Python Modules +import sys +import os +import argparse +from time import sleep +import json + +# Workaround for Jenkins: +# Set the path to include the outsystems module +# Jenkins exposes the workspace directory through env. +if "WORKSPACE" in os.environ: + sys.path.append(os.environ['WORKSPACE']) +else: # Else just add the project dir + sys.path.append(os.getcwd()) + +# Custom Modules +# Variables +from outsystems.vars.file_vars import ARTIFACT_FOLDER, SOLUTIONS_OSP_FILE, SOLUTIONS_FOLDER +from outsystems.vars.lifetime_vars import LIFETIME_HTTP_PROTO, LIFETIME_API_ENDPOINT, LIFETIME_API_VERSION +from outsystems.vars.manifest_vars import MANIFEST_APPLICATION_VERSIONS, MANIFEST_APPLICATION_KEY, MANIFEST_FLAG_IS_TEST_APPLICATION, MANIFEST_APPLICATION_NAME +from outsystems.vars.pipeline_vars import SOLUTION_TIMEOUT_IN_SECS, SOLUTION_SLEEP_PERIOD_IN_SECS, SOLUTION_CREATED_STATUS, \ + SOLUTION_READY_STATUS, SOLUTION_GATHERING_DEPENDENCIES_STATUS, SOLUTION_GETTING_BINARIES_STATUS, SOLUTION_GENERATING_META_MODEL_STATUS, \ + SOLUTION_GENERATING_SOLUTION_STATUS, SOLUTION_COMPLETED_STATUS, SOLUTION_ABORTED_STATUS + +# Functions +from outsystems.file_helpers.file import load_data, bytes_human_readable_size +from outsystems.lifetime.lifetime_solutions import create_solution, get_solution_status, get_solution_url +from outsystems.lifetime.lifetime_base import build_lt_endpoint +from outsystems.lifetime.lifetime_downloads import download_package +from outsystems.manifest.manifest_base import get_environment_details +from outsystems.vars.vars_base import get_configuration_value, load_configuration_file +# Exceptions +from outsystems.exceptions.manifest_does_not_exist import ManifestDoesNotExistError + + +# ############################################################# SCRIPT ############################################################## +# Get a formatted status message for the different statuses of a solution generation process. +def get_status_message(status: str): + status_messages = { + SOLUTION_CREATED_STATUS: "An empty solution was created in the system - it does not contain any associated application.", + SOLUTION_READY_STATUS: "All included applications were added to the database and the solution is ready to be processed.", + SOLUTION_GATHERING_DEPENDENCIES_STATUS: "Calculating all the dependencies for the solution.", + SOLUTION_GETTING_BINARIES_STATUS: "Getting the binaries for each module included in the solution.", + SOLUTION_GENERATING_META_MODEL_STATUS: "Building the solution package manifest.", + SOLUTION_GENERATING_SOLUTION_STATUS: "Creating the solution file." + } + + return status_messages.get(status, "Unknown status: " + status) + + +def main(artifact_dir: str, lt_http_proto: str, lt_url: str, lt_api_endpoint: str, lt_api_version: int, lt_token: str, source_env_label: str, include_test_apps: bool, solution_name: str, include_refs: bool, trigger_manifest: dict): + + # Builds the LifeTime endpoint + lt_endpoint = build_lt_endpoint(lt_http_proto, lt_url, lt_api_endpoint, lt_api_version) + + # Tuple with (EnvName, EnvKey): src_env_tuple[0] = EnvName; src_env_tuple[1] = EnvKey + env_tuple = get_environment_details(trigger_manifest, source_env_label) + + # Retrive the app keys from the manifest content + application_keys = [app[MANIFEST_APPLICATION_KEY] for app in trigger_manifest.get(MANIFEST_APPLICATION_VERSIONS, []) if include_test_apps or not app.get(MANIFEST_FLAG_IS_TEST_APPLICATION)] + + # Send request to create a solution with the given app keys + solution_key = create_solution(artifact_dir, lt_endpoint, lt_token, env_tuple[1], solution_name, application_keys, include_refs) + + # Wait for solution package creation to finish + wait_counter = 0 + package_url_available = False + check_status = None + IN_PROGESS_STATUS = [SOLUTION_CREATED_STATUS, SOLUTION_READY_STATUS, SOLUTION_GATHERING_DEPENDENCIES_STATUS, + SOLUTION_GETTING_BINARIES_STATUS, SOLUTION_GENERATING_META_MODEL_STATUS, + SOLUTION_GENERATING_SOLUTION_STATUS] + + # Retrieve the app names from the manifest content + application_names = [app[MANIFEST_APPLICATION_NAME] for app in trigger_manifest.get(MANIFEST_APPLICATION_VERSIONS, []) if include_test_apps or not app.get(MANIFEST_FLAG_IS_TEST_APPLICATION)] + + # Print information about the solution package + print("A solution package will be created from '{}', containing the latest version of each module from the following applications:".format(env_tuple[0]), flush=True) + + for app_name in application_names: + print(" - {} ".format(app_name), flush=True) + + # Print additional information if include_refs is True + if include_refs: + print("Producer modules will also be included in the solution package", flush=True) + + print("Start creation of '{}' package:".format(solution_name), flush=True) + while wait_counter < get_configuration_value("SOLUTION_TIMEOUT_IN_SECS", SOLUTION_TIMEOUT_IN_SECS): + # Check current package status + solution_status = get_solution_status(artifact_dir, lt_endpoint, lt_token, env_tuple[1], solution_key) + if solution_status["Status"] == SOLUTION_COMPLETED_STATUS: + # Package was created successfully + package_url_available = True + break + elif solution_status["Status"] == SOLUTION_ABORTED_STATUS: + print(" - {}. Reason: {}".format(solution_status["Status"], solution_status["StatusReason"]), flush=True) + exit(1) + elif solution_status["Status"] in IN_PROGESS_STATUS: + # Solution package is still being created. Go back to sleep. + sleep_value = get_configuration_value("SOLUTION_SLEEP_PERIOD_IN_SECS", SOLUTION_SLEEP_PERIOD_IN_SECS) + sleep(sleep_value) + wait_counter += sleep_value + if check_status != solution_status["Status"]: + print(" - {} - {}".format(solution_status["Status"], get_status_message(solution_status["Status"])), flush=True) + check_status = solution_status["Status"] + else: + raise NotImplementedError("Unknown solution code status: {}.".format(solution_status["Status"])) + + # When the package is created, download it using the provided key + if package_url_available: + print("Solution package {} created successfully.".format(solution_key), flush=True) + solution_url = get_solution_url(artifact_dir, lt_endpoint, lt_token, env_tuple[1], solution_key) + + file_name = solution_name + SOLUTIONS_OSP_FILE + file_path = os.path.join(artifact_dir, SOLUTIONS_FOLDER, file_name) + download_package(file_path, lt_token, solution_url) + + print("Solution package successfully downloaded as '{}' ({}).".format(file_name, bytes_human_readable_size(os.path.getsize(file_path))), flush=True) + else: + print("Timeout expired while generating solution package {}.".format(solution_key), flush=True) + +# End of main() + + +if __name__ == "__main__": + # Argument menu / parsing + parser = argparse.ArgumentParser() + parser.add_argument("-a", "--artifacts", type=str, default=ARTIFACT_FOLDER, + help="(Optional) Name of the artifacts folder. Default: \"Artifacts\"") + parser.add_argument("-u", "--lt_url", type=str, required=True, + help="URL for LifeTime environment, without the API endpoint. Example: \"https://\"") + parser.add_argument("-t", "--lt_token", type=str, required=True, + help="Token for LifeTime API calls.") + parser.add_argument("-v", "--lt_api_version", type=int, default=LIFETIME_API_VERSION, + help="(Optional) LifeTime API version number. If version <= 10, use 1, if version >= 11, use 2. Default: 2") + parser.add_argument("-e", "--lt_endpoint", type=str, default=LIFETIME_API_ENDPOINT, + help="(Optional) Used to set the API endpoint for LifeTime, without the version. Default: \"lifetimeapi/rest\"") + parser.add_argument("-s", "--source_env_label", type=str, required=True, + help="Label, as configured in the manifest, of the source environment where the apps are.") + parser.add_argument("-i", "--include_test_apps", action='store_true', + help="Flag that indicates if applications marked as \"Test Application\" in the manifest are included in the solution.") + parser.add_argument("-m", "--trigger_manifest", type=str, + help="Manifest artifact (in JSON format) received when the pipeline is triggered. Contains required data used throughout the pipeline execution.") + parser.add_argument("-f", "--manifest_file", type=str, + help="Manifest file (with JSON format). Contains required data used throughout the pipeline execution.") + parser.add_argument("-sn", "--solution_name", type=str, required=True, + help="Name of the solution package that will be created.") + parser.add_argument("-r", "--include_refs", action='store_true', + help="Flag that indicates if whether to include producer modules in the solution.") + parser.add_argument("-cf", "--config_file", type=str, + help="Config file path. Contains configuration values to override the default ones.") + + args = parser.parse_args() + + # Load config file if exists + if args.config_file: + load_configuration_file(args.config_file) + # Parse the artifact directory + artifact_dir = args.artifacts + # Parse the API endpoint + lt_api_endpoint = args.lt_endpoint + # Parse the LT Url and split the LT hostname from the HTTP protocol + # Assumes the default HTTP protocol = https + lt_http_proto = LIFETIME_HTTP_PROTO + lt_url = args.lt_url + if lt_url.startswith("http://"): + lt_http_proto = "http" + lt_url = lt_url.replace("http://", "") + else: + lt_url = lt_url.replace("https://", "") + if lt_url.endswith("/"): + lt_url = lt_url[:-1] + # Parte LT API Version + lt_version = args.lt_api_version + # Parse the LT Token + lt_token = args.lt_token + # Parse Source Environment + source_env_label = args.source_env_label + # Parse Include Test Apps flag + include_test_apps = args.include_test_apps + # Parse Solution Name + solution_name = args.solution_name + # Parse Include References flag + include_refs = args.include_refs + + # Validate Manifest is being passed either as JSON or as file + if not args.trigger_manifest and not args.manifest_file: + raise ManifestDoesNotExistError("The manifest was not provided as JSON or as a file. Aborting!") + + # Parse Trigger Manifest artifact + if args.manifest_file: + trigger_manifest_path = os.path.split(args.manifest_file) + trigger_manifest = load_data(trigger_manifest_path[0], trigger_manifest_path[1]) + else: + trigger_manifest = json.loads(args.trigger_manifest) + + # Calls the main script + main(artifact_dir, lt_http_proto, lt_url, lt_api_endpoint, lt_version, lt_token, source_env_label, include_test_apps, solution_name, include_refs, trigger_manifest) diff --git a/outsystems/vars/dotnet_vars.py b/outsystems/vars/dotnet_vars.py new file mode 100644 index 0000000..2f88c95 --- /dev/null +++ b/outsystems/vars/dotnet_vars.py @@ -0,0 +1,3 @@ +# DotNet specific +MS_BUILD_NAMESPACE = 'http://schemas.microsoft.com/developer/msbuild/2003' +ASSEMBLY_BLACKLIST = ["System.ComponentModel.Annotations"] diff --git a/outsystems/vars/file_vars.py b/outsystems/vars/file_vars.py index d9db837..9faf1b8 100644 --- a/outsystems/vars/file_vars.py +++ b/outsystems/vars/file_vars.py @@ -25,8 +25,12 @@ ENVIRONMENT_VERSIONS_FILE = ".versions.cache" ENVIRONMENT_DEPLOYMENT_ZONES_FILE = ".deploymentzones.cache" ENVIRONMENT_FOLDER = "environment_data" +ENVIRONMENT_SOURCECODE_FOLDER = "sourcecode_data" +ENVIRONMENT_SOURCECODE_STATUS_FILE = ".status.cache" +ENVIRONMENT_SOURCECODE_LINK_FILE = ".link.cache" +ENVIRONMENT_SOURCECODE_DOWNLOAD_FILE = ".source.zip" -# Environments vars +# Deployments vars DEPLOYMENTS_FILE = "deployments.cache" DEPLOYMENT_FILE = ".cache" DEPLOYMENT_PLAN_FILE = ".plan.cache" @@ -57,3 +61,10 @@ # AirGap vars DEPLOYMENT_ORDER_FILE = "sorted_oap.list" + +# Solutions vars +SOLUTIONS_OSP_FILE = ".osp" +SOLUTIONS_LINK_FILE = ".link.cache" +SOLUTIONS_STATUS_FILE = ".status.cache" +SOLUTIONS_DEPLOY_FILE = ".deploy.cache" +SOLUTIONS_FOLDER = "solution_data" diff --git a/outsystems/vars/lifetime_vars.py b/outsystems/vars/lifetime_vars.py index 28223cc..da421f0 100644 --- a/outsystems/vars/lifetime_vars.py +++ b/outsystems/vars/lifetime_vars.py @@ -34,6 +34,7 @@ # Deployments Endpoint Variables # Deployment list specific +DEPLOYMENT_ENDPOINT = "deployment" DEPLOYMENTS_ENDPOINT = "deployments" DEPLOYMENTS_SUCCESS_CODE = 200 DEPLOYMENTS_EMPTY_CODE = 204 @@ -96,3 +97,33 @@ ENVIRONMENT_ZONES_NO_PERMISSION_CODE = 403 ENVIRONMENT_ZONES_NOT_FOUND = 404 ENVIRONMENT_ZONES_FAILED_CODE = 500 +# Environment application source code specific +ENVIRONMENT_APPLICATIONS_SOURCECODE_ENDPOINT = "sourcecodeaccess" +ENVIRONMENT_SOURCECODE_LINK_SUCCESS_CODE = 200 +ENVIRONMENT_SOURCECODE_PACKAGE_SUCCESS_CODE = 201 +ENVIRONMENT_SOURCECODE_FAILED_CODE = 500 +# Solutions specific +ENVIRONMENT_SOLUTION_ENDPOINT = "solution" +ENVIRONMENT_SOLUTION_SUCCESS_CODE = 200 +ENVIRONMENT_SOLUTION_NOT_STATUS_CODE = 400 +ENVIRONMENT_SOLUTION_NO_PERMISSION_CODE = 403 +ENVIRONMENT_SOLUTION_NOT_FOUND = 404 +ENVIRONMENT_SOLUTION_FAILED_CODE = 500 +# Solutions status specific +ENVIRONMENT_SOLUTION_STATUS_ENDPOINT = "solutionstatus" +ENVIRONMENT_SOLUTION_STATUS_SUCCESS_CODE = 200 +ENVIRONMENT_SOLUTION_STATUS_NOT_STATUS_CODE = 400 +ENVIRONMENT_SOLUTION_STATUS_NO_PERMISSION_CODE = 403 +ENVIRONMENT_SOLUTION_STATUS_NOT_FOUND = 404 +ENVIRONMENT_SOLUTION_STATUS_FAILED_CODE = 500 +# Solutions link specific +ENVIRONMENT_SOLUTION_LINK_SUCCESS_CODE = 200 +ENVIRONMENT_SOLUTION_LINK_FAILED_CODE = 400 + +# Downloads Endpoint Variables +DOWNLOADS_ENDPOINT = "downloads" +DOWNLOAD_SUCCESS_CODE = 200 +DOWNLOAD_INVALID_KEY_CODE = 400 +DOWNLOAD_NO_PERMISSION_CODE = 403 +DOWNLOAD_NOT_FOUND = 404 +DOWNLOAD_FAILED_CODE = 500 diff --git a/outsystems/vars/pipeline_vars.py b/outsystems/vars/pipeline_vars.py index 5755fec..468db47 100644 --- a/outsystems/vars/pipeline_vars.py +++ b/outsystems/vars/pipeline_vars.py @@ -17,3 +17,21 @@ # Application specific variables MAX_VERSIONS_TO_RETURN = 10 TAG_APP_MAX_RETRIES = 5 + +# Environment specific variables +SOURCECODE_TIMEOUT_IN_SECS = 3600 +SOURCECODE_SLEEP_PERIOD_IN_SECS = 10 +SOURCECODE_ONGOING_STATUS = "InProgress" +SOURCECODE_FINISHED_STATUS = "Done" + +# Solutions specific variables +SOLUTION_TIMEOUT_IN_SECS = 3600 +SOLUTION_SLEEP_PERIOD_IN_SECS = 2 +SOLUTION_CREATED_STATUS = "Created" +SOLUTION_READY_STATUS = "Ready" +SOLUTION_GATHERING_DEPENDENCIES_STATUS = "Gathering Dependencies" +SOLUTION_GETTING_BINARIES_STATUS = "Getting Binaries" +SOLUTION_GENERATING_META_MODEL_STATUS = "Generating Meta Model" +SOLUTION_GENERATING_SOLUTION_STATUS = "Generating Solution" +SOLUTION_COMPLETED_STATUS = "Completed" +SOLUTION_ABORTED_STATUS = "Aborted" diff --git a/setup.py b/setup.py index 2d73eec..d1fdc76 100644 --- a/setup.py +++ b/setup.py @@ -11,32 +11,34 @@ What's new ========== -**Config File Support** - Load configuration values from a custom file to override default values. To use this feature, use the new `--config_file` parameter to specify the configuration file path. - This enhancement is available in the following scripts: - - * `apply_configuration_values_to_target_env.py` - * `continue_deployment_to_target_env.py` - * `deploy_apps_to_target_env_with_airgap.py` - * `deploy_latest_tags_to_target_env.py` - * `deploy_tags_to_target_env_with_manifest.py` - * `evaluate_test_results.py` - * `fetch_apps_packages.py` - * `fetch_lifetime_data.py` - * `scan_test_endpoints.py` - * `start_deployment_to_target_env.py` - * `tag_apps_based_on_manifest_data.py` - * `tag_modified_apps.py` - -**SSL Certificate Verification** - The Python `requests` module verifies SSL certificates for HTTPS requests. - Now there's a flag to enable (default value) or disable SSL certificate verification. - -**Fetch Technical Debt** - Enhanced the `fetch_tech_debt` script to prevent failures when all modules of an app are marked as 'ignored' in AI Mentor Studio and when an app has no security findings. - -**Tag Modified Applications** - Updated `tag_modified_apps` script to tag applications based on a app_list parameter or from the trigger_manifest artifact +**Download Application Source Code** + + A new script was added to download platform-generated source code: + + * `fetch_apps_source_code.py` + + Use the following parameters to generate more human-readable outputs and facilitate the compilation of the source code: + + * --friendly_package_names: source code packages with user-friendly names. + * --include_all_refs: adds to .csproj file all assemblies in the bin folder as references. + * --remove_resources_files: removes references to embedded resources files from the.csproj file. + +**Solution Download and Deploy** + + Added new functions to leverage the recently released/improved APIs to download and deploy outsystems packages: + + * `fetch_lifetime_solution_from_manifest.py` - downloads a solution file based on manifest data. + * `deploy_package_to_target_env.py` - deploys an outsystems package (solution or application) to a target environment. + * `deploy_package_to_target_env_with_osptool.py` - deploys an outsystems package (solution or application) using OSP Tool. + +**Improved OSPTool Operations** + + OSPTool command line calls now have live output callback and catalog mapping support. + +**Updated Package Dependencies** + + * Updated python-dateutil dependency to version 2.9.0.post0 + * Updated python-dotenv dependency to version 1.0.1 Installing and upgrading ======================== @@ -72,12 +74,12 @@ ] REQUIREMENTS = [ - 'python-dateutil==2.8.2', + 'python-dateutil==2.9.0.post0', 'requests==2.31.0', 'unittest-xml-reporting==3.2.0', 'xunitparser==1.3.4', 'toposort==1.10', - 'python-dotenv==1.0.0' + 'python-dotenv==1.0.1' ] PACKAGES = [