From 69765f24b6eb9a9e12a5ae1d6637e657688f6eb8 Mon Sep 17 00:00:00 2001 From: Calvin Remsburg Date: Sat, 7 Dec 2024 20:22:18 -0600 Subject: [PATCH 01/18] Remove cloning scripts for address and security profile groups. This commit deletes the scripts responsible for cloning address groups and security profile groups, along with their helper functions. The related code in `clone_address_groups.py`, `clone_security_profile_groups.py`, and `helpers.py` has been removed. Additionally, updates to `poetry.lock` reflect changes in dependencies, ensuring compatibility with existing packages. --- poetry.lock | 346 ++++++++++++------ pyproject.toml | 2 +- scm_config_clone/commands/__init__.py | 3 - .../commands/clone_address_groups.py | 81 ---- .../commands/clone_address_objects.py | 82 ----- .../commands/clone_security_profile_groups.py | 82 ----- .../commands/create_secrets_file.py | 64 +++- scm_config_clone/commands/objects/__init__.py | 3 + scm_config_clone/commands/objects/address.py | 169 +++++++++ .../commands/security_services/__init__.py | 0 scm_config_clone/main.py | 24 +- scm_config_clone/utilities/__init__.py | 3 + scm_config_clone/utilities/helpers.py | 218 ----------- .../{config => utilities}/settings.py | 0 14 files changed, 477 insertions(+), 600 deletions(-) delete mode 100644 scm_config_clone/commands/clone_address_groups.py delete mode 100644 scm_config_clone/commands/clone_address_objects.py delete mode 100644 scm_config_clone/commands/clone_security_profile_groups.py create mode 100644 scm_config_clone/commands/objects/__init__.py create mode 100644 scm_config_clone/commands/objects/address.py create mode 100644 scm_config_clone/commands/security_services/__init__.py delete mode 100644 scm_config_clone/utilities/helpers.py rename scm_config_clone/{config => utilities}/settings.py (100%) diff --git a/poetry.lock b/poetry.lock index cacff00..6f5e3f3 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,15 @@ -# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. + +[[package]] +name = "annotated-types" +version = "0.7.0" +description = "Reusable constraint types to use with typing.Annotated" +optional = false +python-versions = ">=3.8" +files = [ + {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, + {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, +] [[package]] name = "babel" @@ -73,75 +84,78 @@ files = [ [[package]] name = "cffi" -version = "1.15.1" +version = "1.17.1" description = "Foreign Function Interface for Python calling C code." optional = false -python-versions = "*" +python-versions = ">=3.8" files = [ - {file = "cffi-1.15.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2"}, - {file = "cffi-1.15.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2"}, - {file = "cffi-1.15.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914"}, - {file = "cffi-1.15.1-cp27-cp27m-win32.whl", hash = "sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3"}, - {file = "cffi-1.15.1-cp27-cp27m-win_amd64.whl", hash = "sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e"}, - {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162"}, - {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b"}, - {file = "cffi-1.15.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21"}, - {file = "cffi-1.15.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4"}, - {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01"}, - {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e"}, - {file = "cffi-1.15.1-cp310-cp310-win32.whl", hash = "sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2"}, - {file = "cffi-1.15.1-cp310-cp310-win_amd64.whl", hash = "sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d"}, - {file = "cffi-1.15.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac"}, - {file = "cffi-1.15.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c"}, - {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef"}, - {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8"}, - {file = "cffi-1.15.1-cp311-cp311-win32.whl", hash = "sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d"}, - {file = "cffi-1.15.1-cp311-cp311-win_amd64.whl", hash = "sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104"}, - {file = "cffi-1.15.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e"}, - {file = "cffi-1.15.1-cp36-cp36m-win32.whl", hash = "sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf"}, - {file = "cffi-1.15.1-cp36-cp36m-win_amd64.whl", hash = "sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497"}, - {file = "cffi-1.15.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426"}, - {file = "cffi-1.15.1-cp37-cp37m-win32.whl", hash = "sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9"}, - {file = "cffi-1.15.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045"}, - {file = "cffi-1.15.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192"}, - {file = "cffi-1.15.1-cp38-cp38-win32.whl", hash = "sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314"}, - {file = "cffi-1.15.1-cp38-cp38-win_amd64.whl", hash = "sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5"}, - {file = "cffi-1.15.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585"}, - {file = "cffi-1.15.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27"}, - {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76"}, - {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3"}, - {file = "cffi-1.15.1-cp39-cp39-win32.whl", hash = "sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee"}, - {file = "cffi-1.15.1-cp39-cp39-win_amd64.whl", hash = "sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c"}, - {file = "cffi-1.15.1.tar.gz", hash = "sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9"}, + {file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"}, + {file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be"}, + {file = "cffi-1.17.1-cp310-cp310-win32.whl", hash = "sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c"}, + {file = "cffi-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15"}, + {file = "cffi-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401"}, + {file = "cffi-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b"}, + {file = "cffi-1.17.1-cp311-cp311-win32.whl", hash = "sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655"}, + {file = "cffi-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0"}, + {file = "cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4"}, + {file = "cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93"}, + {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3"}, + {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8"}, + {file = "cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65"}, + {file = "cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903"}, + {file = "cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e"}, + {file = "cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd"}, + {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed"}, + {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9"}, + {file = "cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d"}, + {file = "cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a"}, + {file = "cffi-1.17.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:636062ea65bd0195bc012fea9321aca499c0504409f413dc88af450b57ffd03b"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7eac2ef9b63c79431bc4b25f1cd649d7f061a28808cbc6c47b534bd789ef964"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e221cf152cff04059d011ee126477f0d9588303eb57e88923578ace7baad17f9"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:31000ec67d4221a71bd3f67df918b1f88f676f1c3b535a7eb473255fdc0b83fc"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f17be4345073b0a7b8ea599688f692ac3ef23ce28e5df79c04de519dbc4912c"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2b1fac190ae3ebfe37b979cc1ce69c81f4e4fe5746bb401dca63a9062cdaf1"}, + {file = "cffi-1.17.1-cp38-cp38-win32.whl", hash = "sha256:7596d6620d3fa590f677e9ee430df2958d2d6d6de2feeae5b20e82c00b76fbf8"}, + {file = "cffi-1.17.1-cp38-cp38-win_amd64.whl", hash = "sha256:78122be759c3f8a014ce010908ae03364d00a1f81ab5c7f4a7a5120607ea56e1"}, + {file = "cffi-1.17.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b2ab587605f4ba0bf81dc0cb08a41bd1c0a5906bd59243d56bad7668a6fc6c16"}, + {file = "cffi-1.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:28b16024becceed8c6dfbc75629e27788d8a3f9030691a1dbf9821a128b22c36"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e"}, + {file = "cffi-1.17.1-cp39-cp39-win32.whl", hash = "sha256:e31ae45bc2e29f6b2abd0de1cc3b9d5205aa847cafaecb8af1476a609a2f6eb7"}, + {file = "cffi-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662"}, + {file = "cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824"}, ] [package.dependencies] @@ -651,13 +665,13 @@ files = [ [[package]] name = "oauthlib" -version = "3.2.0" +version = "3.2.2" description = "A generic, spec-compliant, thorough implementation of the OAuth request-signing logic" optional = false python-versions = ">=3.6" files = [ - {file = "oauthlib-3.2.0-py3-none-any.whl", hash = "sha256:6db33440354787f9b7f3a6dbd4febf5d0f93758354060e802f6c06cb493022fe"}, - {file = "oauthlib-3.2.0.tar.gz", hash = "sha256:23a8208d75b902797ea29fd31fa80a15ed9dc2c6c16fe73f5d346f83f6fa27a2"}, + {file = "oauthlib-3.2.2-py3-none-any.whl", hash = "sha256:8139f29aac13e25d502680e9e19963e83f16838d48a0d71c287fe40e7067fbca"}, + {file = "oauthlib-3.2.2.tar.gz", hash = "sha256:9859c40929662bec5d64f34d01c99e093149682a3f38915dc0655d5a633dd918"}, ] [package.extras] @@ -692,29 +706,23 @@ dev = ["pytest", "tox"] lint = ["black"] [[package]] -name = "panapi" -version = "0.0.1" -description = "A lightweight SDK for the Panorama Cloud API" +name = "pan-scm-sdk" +version = "0.3.4" +description = "Python SDK for Palo Alto Networks Strata Cloud Manager." optional = false -python-versions = ">=3.6" +python-versions = "<4.0,>=3.10" files = [ - {file = "panapi-0.0.1-py3-none-any.whl", hash = "sha256:75ca74eefc75dd81756ebf9b8268150660714ff865c381d7cc52a60bd2bb02aa"}, - {file = "panapi-0.0.1.tar.gz", hash = "sha256:9cfe7d18bff4e3fba75d068d42c2392a7576690e4b53a825c3fe874a081d04c7"}, + {file = "pan_scm_sdk-0.3.4-py3-none-any.whl", hash = "sha256:22265b8817fdf3c56ba34706c4fa9f0c3b32b65b2edad149ece75ccf7dc88e0d"}, + {file = "pan_scm_sdk-0.3.4.tar.gz", hash = "sha256:de417b6c785c87cac63e9d775d171c9d4860fa3e2326cbda991e18a4c9b6dfa4"}, ] [package.dependencies] -certifi = "2022.6.15" -cffi = "1.15.1" -charset-normalizer = "2.1.1" -cryptography = "37.0.4" -idna = "3.3" -oauthlib = "3.2.0" -pycparser = "2.21" -PyJWT = "2.4.0" -PyYAML = "6.0" -requests = "2.28.1" -requests-oauthlib = "1.3.1" -urllib3 = "1.26.12" +cryptography = ">=37.0.4,<38.0.0" +oauthlib = ">=3.2.2,<4.0.0" +pydantic = ">=2.9.2,<3.0.0" +pyjwt = ">=2.9.0,<3.0.0" +requests-oauthlib = ">=2.0.0,<3.0.0" +setuptools = ">=75.1.0,<76.0.0" [[package]] name = "pathspec" @@ -771,15 +779,147 @@ files = [ [[package]] name = "pycparser" -version = "2.21" +version = "2.22" description = "C parser in Python" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=3.8" +files = [ + {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, + {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, +] + +[[package]] +name = "pydantic" +version = "2.10.3" +description = "Data validation using Python type hints" +optional = false +python-versions = ">=3.8" files = [ - {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, - {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, + {file = "pydantic-2.10.3-py3-none-any.whl", hash = "sha256:be04d85bbc7b65651c5f8e6b9976ed9c6f41782a55524cef079a34a0bb82144d"}, + {file = "pydantic-2.10.3.tar.gz", hash = "sha256:cb5ac360ce894ceacd69c403187900a02c4b20b693a9dd1d643e1effab9eadf9"}, ] +[package.dependencies] +annotated-types = ">=0.6.0" +pydantic-core = "2.27.1" +typing-extensions = ">=4.12.2" + +[package.extras] +email = ["email-validator (>=2.0.0)"] +timezone = ["tzdata"] + +[[package]] +name = "pydantic-core" +version = "2.27.1" +description = "Core functionality for Pydantic validation and serialization" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pydantic_core-2.27.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:71a5e35c75c021aaf400ac048dacc855f000bdfed91614b4a726f7432f1f3d6a"}, + {file = "pydantic_core-2.27.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f82d068a2d6ecfc6e054726080af69a6764a10015467d7d7b9f66d6ed5afa23b"}, + {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:121ceb0e822f79163dd4699e4c54f5ad38b157084d97b34de8b232bcaad70278"}, + {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4603137322c18eaf2e06a4495f426aa8d8388940f3c457e7548145011bb68e05"}, + {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a33cd6ad9017bbeaa9ed78a2e0752c5e250eafb9534f308e7a5f7849b0b1bfb4"}, + {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:15cc53a3179ba0fcefe1e3ae50beb2784dede4003ad2dfd24f81bba4b23a454f"}, + {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45d9c5eb9273aa50999ad6adc6be5e0ecea7e09dbd0d31bd0c65a55a2592ca08"}, + {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8bf7b66ce12a2ac52d16f776b31d16d91033150266eb796967a7e4621707e4f6"}, + {file = "pydantic_core-2.27.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:655d7dd86f26cb15ce8a431036f66ce0318648f8853d709b4167786ec2fa4807"}, + {file = "pydantic_core-2.27.1-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:5556470f1a2157031e676f776c2bc20acd34c1990ca5f7e56f1ebf938b9ab57c"}, + {file = "pydantic_core-2.27.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f69ed81ab24d5a3bd93861c8c4436f54afdf8e8cc421562b0c7504cf3be58206"}, + {file = "pydantic_core-2.27.1-cp310-none-win32.whl", hash = "sha256:f5a823165e6d04ccea61a9f0576f345f8ce40ed533013580e087bd4d7442b52c"}, + {file = "pydantic_core-2.27.1-cp310-none-win_amd64.whl", hash = "sha256:57866a76e0b3823e0b56692d1a0bf722bffb324839bb5b7226a7dbd6c9a40b17"}, + {file = "pydantic_core-2.27.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:ac3b20653bdbe160febbea8aa6c079d3df19310d50ac314911ed8cc4eb7f8cb8"}, + {file = "pydantic_core-2.27.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a5a8e19d7c707c4cadb8c18f5f60c843052ae83c20fa7d44f41594c644a1d330"}, + {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7f7059ca8d64fea7f238994c97d91f75965216bcbe5f695bb44f354893f11d52"}, + {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bed0f8a0eeea9fb72937ba118f9db0cb7e90773462af7962d382445f3005e5a4"}, + {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a3cb37038123447cf0f3ea4c74751f6a9d7afef0eb71aa07bf5f652b5e6a132c"}, + {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:84286494f6c5d05243456e04223d5a9417d7f443c3b76065e75001beb26f88de"}, + {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:acc07b2cfc5b835444b44a9956846b578d27beeacd4b52e45489e93276241025"}, + {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4fefee876e07a6e9aad7a8c8c9f85b0cdbe7df52b8a9552307b09050f7512c7e"}, + {file = "pydantic_core-2.27.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:258c57abf1188926c774a4c94dd29237e77eda19462e5bb901d88adcab6af919"}, + {file = "pydantic_core-2.27.1-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:35c14ac45fcfdf7167ca76cc80b2001205a8d5d16d80524e13508371fb8cdd9c"}, + {file = "pydantic_core-2.27.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d1b26e1dff225c31897696cab7d4f0a315d4c0d9e8666dbffdb28216f3b17fdc"}, + {file = "pydantic_core-2.27.1-cp311-none-win32.whl", hash = "sha256:2cdf7d86886bc6982354862204ae3b2f7f96f21a3eb0ba5ca0ac42c7b38598b9"}, + {file = "pydantic_core-2.27.1-cp311-none-win_amd64.whl", hash = "sha256:3af385b0cee8df3746c3f406f38bcbfdc9041b5c2d5ce3e5fc6637256e60bbc5"}, + {file = "pydantic_core-2.27.1-cp311-none-win_arm64.whl", hash = "sha256:81f2ec23ddc1b476ff96563f2e8d723830b06dceae348ce02914a37cb4e74b89"}, + {file = "pydantic_core-2.27.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:9cbd94fc661d2bab2bc702cddd2d3370bbdcc4cd0f8f57488a81bcce90c7a54f"}, + {file = "pydantic_core-2.27.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5f8c4718cd44ec1580e180cb739713ecda2bdee1341084c1467802a417fe0f02"}, + {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:15aae984e46de8d376df515f00450d1522077254ef6b7ce189b38ecee7c9677c"}, + {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1ba5e3963344ff25fc8c40da90f44b0afca8cfd89d12964feb79ac1411a260ac"}, + {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:992cea5f4f3b29d6b4f7f1726ed8ee46c8331c6b4eed6db5b40134c6fe1768bb"}, + {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0325336f348dbee6550d129b1627cb8f5351a9dc91aad141ffb96d4937bd9529"}, + {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7597c07fbd11515f654d6ece3d0e4e5093edc30a436c63142d9a4b8e22f19c35"}, + {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3bbd5d8cc692616d5ef6fbbbd50dbec142c7e6ad9beb66b78a96e9c16729b089"}, + {file = "pydantic_core-2.27.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:dc61505e73298a84a2f317255fcc72b710b72980f3a1f670447a21efc88f8381"}, + {file = "pydantic_core-2.27.1-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:e1f735dc43da318cad19b4173dd1ffce1d84aafd6c9b782b3abc04a0d5a6f5bb"}, + {file = "pydantic_core-2.27.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f4e5658dbffe8843a0f12366a4c2d1c316dbe09bb4dfbdc9d2d9cd6031de8aae"}, + {file = "pydantic_core-2.27.1-cp312-none-win32.whl", hash = "sha256:672ebbe820bb37988c4d136eca2652ee114992d5d41c7e4858cdd90ea94ffe5c"}, + {file = "pydantic_core-2.27.1-cp312-none-win_amd64.whl", hash = "sha256:66ff044fd0bb1768688aecbe28b6190f6e799349221fb0de0e6f4048eca14c16"}, + {file = "pydantic_core-2.27.1-cp312-none-win_arm64.whl", hash = "sha256:9a3b0793b1bbfd4146304e23d90045f2a9b5fd5823aa682665fbdaf2a6c28f3e"}, + {file = "pydantic_core-2.27.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:f216dbce0e60e4d03e0c4353c7023b202d95cbaeff12e5fd2e82ea0a66905073"}, + {file = "pydantic_core-2.27.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a2e02889071850bbfd36b56fd6bc98945e23670773bc7a76657e90e6b6603c08"}, + {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42b0e23f119b2b456d07ca91b307ae167cc3f6c846a7b169fca5326e32fdc6cf"}, + {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:764be71193f87d460a03f1f7385a82e226639732214b402f9aa61f0d025f0737"}, + {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1c00666a3bd2f84920a4e94434f5974d7bbc57e461318d6bb34ce9cdbbc1f6b2"}, + {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3ccaa88b24eebc0f849ce0a4d09e8a408ec5a94afff395eb69baf868f5183107"}, + {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c65af9088ac534313e1963443d0ec360bb2b9cba6c2909478d22c2e363d98a51"}, + {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:206b5cf6f0c513baffaeae7bd817717140770c74528f3e4c3e1cec7871ddd61a"}, + {file = "pydantic_core-2.27.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:062f60e512fc7fff8b8a9d680ff0ddaaef0193dba9fa83e679c0c5f5fbd018bc"}, + {file = "pydantic_core-2.27.1-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:a0697803ed7d4af5e4c1adf1670af078f8fcab7a86350e969f454daf598c4960"}, + {file = "pydantic_core-2.27.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:58ca98a950171f3151c603aeea9303ef6c235f692fe555e883591103da709b23"}, + {file = "pydantic_core-2.27.1-cp313-none-win32.whl", hash = "sha256:8065914ff79f7eab1599bd80406681f0ad08f8e47c880f17b416c9f8f7a26d05"}, + {file = "pydantic_core-2.27.1-cp313-none-win_amd64.whl", hash = "sha256:ba630d5e3db74c79300d9a5bdaaf6200172b107f263c98a0539eeecb857b2337"}, + {file = "pydantic_core-2.27.1-cp313-none-win_arm64.whl", hash = "sha256:45cf8588c066860b623cd11c4ba687f8d7175d5f7ef65f7129df8a394c502de5"}, + {file = "pydantic_core-2.27.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:5897bec80a09b4084aee23f9b73a9477a46c3304ad1d2d07acca19723fb1de62"}, + {file = "pydantic_core-2.27.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d0165ab2914379bd56908c02294ed8405c252250668ebcb438a55494c69f44ab"}, + {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b9af86e1d8e4cfc82c2022bfaa6f459381a50b94a29e95dcdda8442d6d83864"}, + {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5f6c8a66741c5f5447e047ab0ba7a1c61d1e95580d64bce852e3df1f895c4067"}, + {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9a42d6a8156ff78981f8aa56eb6394114e0dedb217cf8b729f438f643608cbcd"}, + {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:64c65f40b4cd8b0e049a8edde07e38b476da7e3aaebe63287c899d2cff253fa5"}, + {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdcf339322a3fae5cbd504edcefddd5a50d9ee00d968696846f089b4432cf78"}, + {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bf99c8404f008750c846cb4ac4667b798a9f7de673ff719d705d9b2d6de49c5f"}, + {file = "pydantic_core-2.27.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8f1edcea27918d748c7e5e4d917297b2a0ab80cad10f86631e488b7cddf76a36"}, + {file = "pydantic_core-2.27.1-cp38-cp38-musllinux_1_1_armv7l.whl", hash = "sha256:159cac0a3d096f79ab6a44d77a961917219707e2a130739c64d4dd46281f5c2a"}, + {file = "pydantic_core-2.27.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:029d9757eb621cc6e1848fa0b0310310de7301057f623985698ed7ebb014391b"}, + {file = "pydantic_core-2.27.1-cp38-none-win32.whl", hash = "sha256:a28af0695a45f7060e6f9b7092558a928a28553366519f64083c63a44f70e618"}, + {file = "pydantic_core-2.27.1-cp38-none-win_amd64.whl", hash = "sha256:2d4567c850905d5eaaed2f7a404e61012a51caf288292e016360aa2b96ff38d4"}, + {file = "pydantic_core-2.27.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:e9386266798d64eeb19dd3677051f5705bf873e98e15897ddb7d76f477131967"}, + {file = "pydantic_core-2.27.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4228b5b646caa73f119b1ae756216b59cc6e2267201c27d3912b592c5e323b60"}, + {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b3dfe500de26c52abe0477dde16192ac39c98f05bf2d80e76102d394bd13854"}, + {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:aee66be87825cdf72ac64cb03ad4c15ffef4143dbf5c113f64a5ff4f81477bf9"}, + {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b748c44bb9f53031c8cbc99a8a061bc181c1000c60a30f55393b6e9c45cc5bd"}, + {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ca038c7f6a0afd0b2448941b6ef9d5e1949e999f9e5517692eb6da58e9d44be"}, + {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e0bd57539da59a3e4671b90a502da9a28c72322a4f17866ba3ac63a82c4498e"}, + {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ac6c2c45c847bbf8f91930d88716a0fb924b51e0c6dad329b793d670ec5db792"}, + {file = "pydantic_core-2.27.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b94d4ba43739bbe8b0ce4262bcc3b7b9f31459ad120fb595627eaeb7f9b9ca01"}, + {file = "pydantic_core-2.27.1-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:00e6424f4b26fe82d44577b4c842d7df97c20be6439e8e685d0d715feceb9fb9"}, + {file = "pydantic_core-2.27.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:38de0a70160dd97540335b7ad3a74571b24f1dc3ed33f815f0880682e6880131"}, + {file = "pydantic_core-2.27.1-cp39-none-win32.whl", hash = "sha256:7ccebf51efc61634f6c2344da73e366c75e735960b5654b63d7e6f69a5885fa3"}, + {file = "pydantic_core-2.27.1-cp39-none-win_amd64.whl", hash = "sha256:a57847b090d7892f123726202b7daa20df6694cbd583b67a592e856bff603d6c"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3fa80ac2bd5856580e242dbc202db873c60a01b20309c8319b5c5986fbe53ce6"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d950caa237bb1954f1b8c9227b5065ba6875ac9771bb8ec790d956a699b78676"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e4216e64d203e39c62df627aa882f02a2438d18a5f21d7f721621f7a5d3611d"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:02a3d637bd387c41d46b002f0e49c52642281edacd2740e5a42f7017feea3f2c"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:161c27ccce13b6b0c8689418da3885d3220ed2eae2ea5e9b2f7f3d48f1d52c27"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:19910754e4cc9c63bc1c7f6d73aa1cfee82f42007e407c0f413695c2f7ed777f"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:e173486019cc283dc9778315fa29a363579372fe67045e971e89b6365cc035ed"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:af52d26579b308921b73b956153066481f064875140ccd1dfd4e77db89dbb12f"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:981fb88516bd1ae8b0cbbd2034678a39dedc98752f264ac9bc5839d3923fa04c"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5fde892e6c697ce3e30c61b239330fc5d569a71fefd4eb6512fc6caec9dd9e2f"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:816f5aa087094099fff7edabb5e01cc370eb21aa1a1d44fe2d2aefdfb5599b31"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c10c309e18e443ddb108f0ef64e8729363adbfd92d6d57beec680f6261556f3"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98476c98b02c8e9b2eec76ac4156fd006628b1b2d0ef27e548ffa978393fd154"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c3027001c28434e7ca5a6e1e527487051136aa81803ac812be51802150d880dd"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:7699b1df36a48169cdebda7ab5a2bac265204003f153b4bd17276153d997670a"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:1c39b07d90be6b48968ddc8c19e7585052088fd7ec8d568bb31ff64c70ae3c97"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:46ccfe3032b3915586e469d4972973f893c0a2bb65669194a5bdea9bacc088c2"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:62ba45e21cf6571d7f716d903b5b7b6d2617e2d5d67c0923dc47b9d41369f840"}, + {file = "pydantic_core-2.27.1.tar.gz", hash = "sha256:62a763352879b84aa31058fc931884055fd75089cccbd9d58bb6afd01141b235"}, +] + +[package.dependencies] +typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" + [[package]] name = "pyflakes" version = "3.2.0" @@ -807,18 +947,18 @@ windows-terminal = ["colorama (>=0.4.6)"] [[package]] name = "pyjwt" -version = "2.4.0" +version = "2.10.1" description = "JSON Web Token implementation in Python" optional = false -python-versions = ">=3.6" +python-versions = ">=3.9" files = [ - {file = "PyJWT-2.4.0-py3-none-any.whl", hash = "sha256:72d1d253f32dbd4f5c88eaf1fdc62f3a19f676ccbadb9dbc5d07e951b2b26daf"}, - {file = "PyJWT-2.4.0.tar.gz", hash = "sha256:d42908208c699b3b973cbeb01a969ba6a96c821eefb1c5bfe4c390c01d67abba"}, + {file = "PyJWT-2.10.1-py3-none-any.whl", hash = "sha256:dcdd193e30abefd5debf142f9adfcdd2b58004e644f25406ffaebd50bd98dacb"}, + {file = "pyjwt-2.10.1.tar.gz", hash = "sha256:3cc5772eb20009233caf06e9d8a0577824723b44e6648ee0a2aedb6cf9381953"}, ] [package.extras] -crypto = ["cryptography (>=3.3.1)"] -dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.3.1)", "mypy", "pre-commit", "pytest (>=6.0.0,<7.0.0)", "sphinx", "sphinx-rtd-theme", "zope.interface"] +crypto = ["cryptography (>=3.4.0)"] +dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pytest (>=6.0.0,<7.0.0)", "sphinx", "sphinx-rtd-theme", "zope.interface"] docs = ["sphinx", "sphinx-rtd-theme", "zope.interface"] tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] @@ -1065,13 +1205,13 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "requests-oauthlib" -version = "1.3.1" +version = "2.0.0" description = "OAuthlib authentication support for Requests." optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=3.4" files = [ - {file = "requests-oauthlib-1.3.1.tar.gz", hash = "sha256:75beac4a47881eeb94d5ea5d6ad31ef88856affe2332b9aafb52c6452ccf0d7a"}, - {file = "requests_oauthlib-1.3.1-py2.py3-none-any.whl", hash = "sha256:2577c501a2fb8d05a304c09d090d6e47c306fef15809d102b327cf8364bddab5"}, + {file = "requests-oauthlib-2.0.0.tar.gz", hash = "sha256:b3dffaebd884d8cd778494369603a9e7b58d29111bf6b41bdc2dcd87203af4e9"}, + {file = "requests_oauthlib-2.0.0-py2.py3-none-any.whl", hash = "sha256:7dd8a5c40426b779b0868c404bdef9768deccf22749cde15852df527e6269b36"}, ] [package.dependencies] @@ -1259,4 +1399,4 @@ watchmedo = ["PyYAML (>=3.10)"] [metadata] lock-version = "2.0" python-versions = "^3.10" -content-hash = "90e25c58cf27db2813c4bd7864a38f78bbf2b5876c9787fbd63afbbe3286d270" +content-hash = "4395d5a7621483475d98da6d3d3022bd3379432f80c68b0cb8a78a8ac676a648" diff --git a/pyproject.toml b/pyproject.toml index a9b0177..7437759 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -8,10 +8,10 @@ readme = "README.md" [tool.poetry.dependencies] python = "^3.10" -panapi = "^0.0.1" dynaconf = "^3.2.6" typer = "^0.12.5" setuptools = "^75.1.0" +pan-scm-sdk = "^0.3.4" [tool.poetry.group.dev.dependencies] black = "^24.10.0" diff --git a/scm_config_clone/commands/__init__.py b/scm_config_clone/commands/__init__.py index fa64ba5..90b98af 100644 --- a/scm_config_clone/commands/__init__.py +++ b/scm_config_clone/commands/__init__.py @@ -6,7 +6,4 @@ This package contains the commands used in the SCM Config Clone CLI application. """ -from .clone_address_objects import clone_address_objects -from .clone_address_groups import clone_address_groups -from .clone_security_profile_groups import clone_security_profile_groups from .create_secrets_file import create_secrets_file diff --git a/scm_config_clone/commands/clone_address_groups.py b/scm_config_clone/commands/clone_address_groups.py deleted file mode 100644 index 7edf2a4..0000000 --- a/scm_config_clone/commands/clone_address_groups.py +++ /dev/null @@ -1,81 +0,0 @@ -# scm_config_clone/commands/clone_address_groups.py - -import logging -import typer -from panapi.config.objects import AddressGroup - -from scm_config_clone.config.settings import load_settings -from scm_config_clone.utilities.helpers import ( - authenticate_scm, - create_scm_address_groups, -) - -logger = logging.getLogger(__name__) - -def clone_address_groups( - settings_file: str = typer.Option( - ".secrets.yaml", - "--settings-file", - "-s", - help="Path to the settings YAML file.", - ), -): - """ - Clone address groups from the source to the destination SCM tenant. - - Authenticates with both source and destination tenants, retrieves address groups from the source, - and creates them in the destination tenant. - - Args: - settings_file (str): Path to the YAML settings file. - - Error: - typer.Exit: Exits the application if an error occurs during the process. - - Return: - None - """ - typer.echo("Starting address groups migration...") - - # Load settings - settings = load_settings(settings_file) - - # Authenticate with source tenant - try: - source_session = authenticate_scm(settings["source_scm"]) - except Exception as e: - logger.error(f"Error authenticating with source tenant: {e}") - raise typer.Exit(code=1) - - # Retrieve address groups from source - try: - folder = {"folder": settings["source_scm"]["folder"]} - source_address_group = AddressGroup(**folder) - address_groups = source_address_group.list(source_session) - logger.info(f"Retrieved {len(address_groups)} address groups from source.") - except Exception as e: - logger.error(f"Error retrieving address groups from source: {e}") - raise typer.Exit(code=1) - - # Authenticate with destination tenant - try: - destination_session = authenticate_scm(settings["destination_scm"]) - except Exception as e: - logger.error(f"Error authenticating with destination tenant: {e}") - raise typer.Exit(code=1) - - # Create address groups in destination - try: - created_groups = create_scm_address_groups( - address_groups=address_groups, - folder=settings["destination_scm"]["folder"], - session=destination_session, - ) - logger.info( - f"Successfully created {len(created_groups)} address groups in destination." - ) - except Exception as e: - logger.error(f"Error creating address groups in destination: {e}") - raise typer.Exit(code=1) - - typer.echo("Address groups migration completed successfully.") \ No newline at end of file diff --git a/scm_config_clone/commands/clone_address_objects.py b/scm_config_clone/commands/clone_address_objects.py deleted file mode 100644 index b749930..0000000 --- a/scm_config_clone/commands/clone_address_objects.py +++ /dev/null @@ -1,82 +0,0 @@ -# scm_config_clone/commands/clone_address_objects.py - -import typer -import logging - -from scm_config_clone.utilities.helpers import ( - authenticate_scm, - create_scm_address_objects, -) -from scm_config_clone.config.settings import load_settings -from panapi.config.objects import Address - -logger = logging.getLogger(__name__) - - -def clone_address_objects( - settings_file: str = typer.Option( - ".secrets.yaml", - "--settings-file", - "-s", - help="Path to the settings YAML file.", - ), -): - """ - Clone address objects from the source to the destination SCM tenant. - - Authenticates with both source and destination tenants, retrieves address objects from the source, - and creates them in the destination tenant. - - Args: - settings_file (str): Path to the YAML settings file. - - Error: - typer.Exit: Exits the application if an error occurs during the process. - - Return: - None - """ - typer.echo("Starting address objects migration...") - - # Load settings - settings = load_settings(settings_file) - - # Authenticate with source tenant - try: - source_session = authenticate_scm(settings["source_scm"]) - except Exception as e: - logger.error(f"Error authenticating with source tenant: {e}") - raise typer.Exit(code=1) - - # Retrieve address objects from source - try: - folder = {"folder": settings["source_scm"]["folder"]} - source_address = Address(**folder) - address_objects = source_address.list(source_session) - logger.info(f"Retrieved {len(address_objects)} address objects from source.") - except Exception as e: - logger.error(f"Error retrieving address objects from source: {e}") - raise typer.Exit(code=1) - - # Authenticate with destination tenant - try: - destination_session = authenticate_scm(settings["destination_scm"]) - except Exception as e: - logger.error(f"Error authenticating with destination tenant: {e}") - raise typer.Exit(code=1) - - # Create address objects in destination - try: - created_objects = create_scm_address_objects( - address_objects=address_objects, - folder=settings["destination_scm"]["folder"], - session=destination_session, - ) - logger.info( - f"Successfully created {len(created_objects)} address objects in destination." - ) - except Exception as e: - logger.error(f"Error creating address objects in destination: {e}") - raise typer.Exit(code=1) - - typer.echo("Address objects migration completed successfully.") diff --git a/scm_config_clone/commands/clone_security_profile_groups.py b/scm_config_clone/commands/clone_security_profile_groups.py deleted file mode 100644 index 517e463..0000000 --- a/scm_config_clone/commands/clone_security_profile_groups.py +++ /dev/null @@ -1,82 +0,0 @@ -# scm_config_clone/commands/clone_security_profile_groups.py - -import typer -import logging - -from scm_config_clone.utilities.helpers import ( - authenticate_scm, - create_scm_security_profile_groups, -) -from scm_config_clone.config.settings import load_settings -from panapi.config.security import ProfileGroup - -logger = logging.getLogger(__name__) - - -def clone_security_profile_groups( - settings_file: str = typer.Option( - ".secrets.yaml", - "--settings-file", - "-s", - help="Path to the settings YAML file.", - ), -): - """ - Clone security profile groups from the source to the destination SCM tenant. - - Authenticates with both source and destination tenants, retrieves security profile groups from the source, - and creates them in the destination tenant. - - Args: - settings_file (str): Path to the YAML settings file. - - Error: - typer.Exit: Exits the application if an error occurs during the process. - - Return: - None - """ - typer.echo("Starting security profile groups migration...") - - # Load settings - settings = load_settings(settings_file) - - # Authenticate with source tenant - try: - source_session = authenticate_scm(settings["source_scm"]) - except Exception as e: - logger.error(f"Error authenticating with source tenant: {e}") - raise typer.Exit(code=1) - - # Retrieve security profile groups from source - try: - folder = {"folder": settings["source_scm"]["folder"]} - source_profile_group = ProfileGroup(**folder) - profile_groups = source_profile_group.list(source_session) - logger.info(f"Retrieved {len(profile_groups)} security profile groups from source.") - except Exception as e: - logger.error(f"Error retrieving security profile groups from source: {e}") - raise typer.Exit(code=1) - - # Authenticate with destination tenant - try: - destination_session = authenticate_scm(settings["destination_scm"]) - except Exception as e: - logger.error(f"Error authenticating with destination tenant: {e}") - raise typer.Exit(code=1) - - # Create security profile groups in destination - try: - created_profile_groups = create_scm_security_profile_groups( - profile_groups=profile_groups, - folder=settings["destination_scm"]["folder"], - session=destination_session, - ) - logger.info( - f"Successfully created {len(created_profile_groups)} security profile groups in destination." - ) - except Exception as e: - logger.error(f"Error creating security profile groups in destination: {e}") - raise typer.Exit(code=1) - - typer.echo("Security profile groups migration completed successfully.") diff --git a/scm_config_clone/commands/create_secrets_file.py b/scm_config_clone/commands/create_secrets_file.py index a1515ee..7bb6f1d 100644 --- a/scm_config_clone/commands/create_secrets_file.py +++ b/scm_config_clone/commands/create_secrets_file.py @@ -29,30 +29,58 @@ def create_secrets_file( Return: None """ - typer.echo("Creating authentication file...") + typer.echo("*" * 79 + "\nCreating authentication file called .secrets.yaml in the current directory\n") # Prompt user for credentials - typer.echo("Enter source Strata Cloud Manager credentials:") - source_client_id = typer.prompt("Source Client ID") - source_client_secret = typer.prompt("Source Client Secret", hide_input=True) - source_tsg = typer.prompt("Source Tenant TSG") - source_folder = typer.prompt("Source Folder", default="Prisma Access") - - typer.echo("Enter destination Strata Cloud Manager credentials:") - dest_client_id = typer.prompt("Destination Client ID") - dest_client_secret = typer.prompt("Destination Client Secret", hide_input=True) - dest_tsg = typer.prompt("Destination Tenant TSG") - dest_folder = typer.prompt("Destination Folder", default="Prisma Access") + typer.echo("-" * 79 + "\n\tEnter source SCM credentials (where are you cloning from?)\n" + "-" * 79) + source_client_id = typer.prompt( + default="example@1234567890.iam.panserviceaccount.com", + text="Source SCM Client ID\n", + show_default=True, + ) + source_client_secret = typer.prompt( + default="12345678-1234-1234-1234-123456789012", + hide_input=True, + show_default=True, + text="Source SCM Client Secret (input hidden)\n", + ) + source_tsg = typer.prompt( + default="1234567890", + show_default=True, + text="Source SCM Tenant TSG ID\n", + ) + source_folder = typer.prompt( + default="Prisma Access", + show_default=True, + text="Source Configuration Folder\n", + ) - token_url = typer.prompt( - "Token URL", - default="https://auth.apps.paloaltonetworks.com/oauth2/access_token", + typer.echo("\n" + "-" * 79 + "\n\tEnter destination SCM credentials (where are you cloning to?)\n" + "-" * 79) + dest_client_id = typer.prompt( + default="example@0987654321.iam.panserviceaccount.com", + text="Destination SCM Client ID\n", + show_default=True, + ) + dest_client_secret = typer.prompt( + default="87654321-4321-4321-4321-120987654321", + hide_input=True, + show_default=True, + text="Destination SCM Client Secret (input hidden)\n", + ) + dest_tsg = typer.prompt( + default="0987654321", + show_default=True, + text="Destination SCM Tenant TSG ID\n", + ) + dest_folder = typer.prompt( + default="Prisma Access", + show_default=True, + text="Destination Configuration Folder\n", ) # Build data dictionary data = { "oauth": { - "token_url": token_url, "source": { "client_id": source_client_id, "client_secret": source_client_secret, @@ -72,9 +100,9 @@ def create_secrets_file( try: with open(output_file, "w") as f: yaml.dump(data, f) - logger.info(f"Authentication file written to {output_file}") except Exception as e: logger.error(f"Error writing authentication file: {e}") raise typer.Exit(code=1) - typer.echo("Authentication file created successfully.") + typer.echo("\n" + "-" * 79 + f"\n\tAuthentication file created successfully `{output_file}`\n" + "-" * 79 + "\n") + typer.echo("*" * 79 ) diff --git a/scm_config_clone/commands/objects/__init__.py b/scm_config_clone/commands/objects/__init__.py new file mode 100644 index 0000000..86a0a1c --- /dev/null +++ b/scm_config_clone/commands/objects/__init__.py @@ -0,0 +1,3 @@ +# scm_config_clone/commands/objects/__init__.py + +from .address import clone_address_objects diff --git a/scm_config_clone/commands/objects/address.py b/scm_config_clone/commands/objects/address.py new file mode 100644 index 0000000..e184f86 --- /dev/null +++ b/scm_config_clone/commands/objects/address.py @@ -0,0 +1,169 @@ +# scm_config_clone/commands/objects/address.py + +import typer +import logging +from typing import List + +from scm.client import Scm +from scm.config.objects import Address +from scm.exceptions import ( + AuthenticationError, + InvalidObjectError, + MissingQueryParameterError, + NameNotUniqueError, + ObjectNotPresentError, +) +from scm_config_clone.utilities.settings import load_settings + +logger = logging.getLogger(__name__) + + +def clone_address_objects( + settings_file: str = typer.Option( + ".secrets.yaml", + "--settings-file", + "-s", + help="Path to the settings YAML file.", + ), +): + """ + Clone address objects from the source to the destination SCM tenant using the pan-scm-sdk. + + Steps: + 1. Load SCM settings from a YAML file. + 2. Authenticate with both source and destination tenants using provided credentials. + 3. Retrieve all address objects from the source tenant. + 4. Create these address objects in the destination tenant. + 5. Commit the changes on the destination tenant. + + Args: + settings_file (str): Path to the YAML settings file. + + Errors: + typer.Exit: Exits the CLI if authentication, retrieval, creation, or commit fails. + """ + typer.echo("Starting address objects migration...") + + # Load settings + settings = load_settings(settings_file) + + # --- Authenticate with source tenant --- + try: + source_creds = settings["source_scm"] + source_client = Scm( + client_id=source_creds["client_id"], + client_secret=source_creds["client_secret"], + tsg_id=source_creds["tsg_id"], + log_level="INFO", + ) + logger.info(f"Authenticated with source SCM tenant: {source_creds['tsg_id']}") + except (AuthenticationError, KeyError) as e: + logger.error(f"Error authenticating with source tenant: {e}") + raise typer.Exit(code=1) + except Exception as e: + logger.error(f"Unexpected error with source authentication: {e}") + raise typer.Exit(code=1) + + # --- Retrieve address objects from source --- + try: + source_addresses = Address(source_client) + source_folder = source_creds["folder"] + address_objects = source_addresses.list(folder=source_folder) + logger.info( + f"Retrieved {len(address_objects)} address objects from source tenant." + ) + except Exception as e: + logger.error(f"Error retrieving address objects from source: {e}") + raise typer.Exit(code=1) + + # --- Authenticate with destination tenant --- + try: + dest_creds = settings["destination_scm"] + destination_client = Scm( + client_id=dest_creds["client_id"], + client_secret=dest_creds["client_secret"], + tsg_id=dest_creds["tsg_id"], + log_level="INFO", + ) + logger.info( + f"Authenticated with destination SCM tenant: {dest_creds['tsg_id']}" + ) + except (AuthenticationError, KeyError) as e: + logger.error(f"Error authenticating with destination tenant: {e}") + raise typer.Exit(code=1) + except Exception as e: + logger.error(f"Unexpected error with destination authentication: {e}") + raise typer.Exit(code=1) + + # --- Create address objects in the destination --- + destination_addresses = Address(destination_client) + destination_folder = dest_creds["folder"] + + created_objects: List[str] = [] + for src_obj in address_objects: + # Build create parameters based on source object's attributes + create_params = { + "name": src_obj.name, + "folder": destination_folder, + "description": getattr(src_obj, "description", None), + "tag": getattr(src_obj, "tag", []), + } + + # Determine the address type and assign the appropriate field + if getattr(src_obj, "ip_netmask", None): + create_params["ip_netmask"] = src_obj.ip_netmask + elif getattr(src_obj, "fqdn", None): + create_params["fqdn"] = src_obj.fqdn + elif getattr(src_obj, "ip_range", None): + create_params["ip_range"] = src_obj.ip_range + elif getattr(src_obj, "ip_wildcard", None): + create_params["ip_wildcard"] = src_obj.ip_wildcard + else: + # If no recognizable address type is found, skip this object + logger.warning( + f"Skipping {src_obj.name}: No valid address type (ip_netmask, fqdn, ip_range, ip_wildcard)." + ) + continue + + # Create the address object + try: + new_obj = destination_addresses.create(create_params) + created_objects.append(new_obj.name) + logger.info(f"Created address object in destination: {new_obj.name}") + except ( + InvalidObjectError, + MissingQueryParameterError, + NameNotUniqueError, + ObjectNotPresentError, + ) as e: + logger.error( + f"Error creating address object {src_obj.name} in destination: {e}" + ) + # Decide whether to continue or exit; continuing for now + continue + except Exception as e: + logger.error( + f"Unexpected error creating address object {src_obj.name} in destination: {e}" + ) + continue + + # --- Commit changes on destination --- + if created_objects: + try: + commit_params = { + "folders": [destination_folder], + "description": "Cloned address objects", + "sync": True, + } + result = destination_addresses.commit(**commit_params) + job_status = destination_addresses.get_job_status(result.job_id) + logger.info( + f"Commit job ID {result.job_id} status: {job_status.data[0].status_str}" + ) + except Exception as e: + logger.error(f"Error committing address objects in destination: {e}") + raise typer.Exit(code=1) + else: + logger.info("No new address objects were created, skipping commit.") + + typer.echo("Address objects migration completed successfully.") diff --git a/scm_config_clone/commands/security_services/__init__.py b/scm_config_clone/commands/security_services/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/scm_config_clone/main.py b/scm_config_clone/main.py index bf3db56..eef66f2 100644 --- a/scm_config_clone/main.py +++ b/scm_config_clone/main.py @@ -7,7 +7,6 @@ Commands: - `clone-address-objects`: Clone address objects. -- `clone-address-groups`: Clone address groups. - `create-secrets-file`: Create authentication file. Usage: @@ -17,12 +16,8 @@ import typer import logging -from scm_config_clone.commands import ( - clone_address_objects, - clone_address_groups, - clone_security_profile_groups, - create_secrets_file, -) +from scm_config_clone.commands.create_secrets_file import create_secrets_file +from scm_config_clone.commands.objects import clone_address_objects # Initialize Typer app app = typer.Typer( @@ -34,11 +29,16 @@ logging.basicConfig(level=logging.INFO) logger = logging.getLogger(__name__) -# Register commands -app.command()(clone_address_objects) -app.command()(clone_address_groups) -app.command()(clone_security_profile_groups) -app.command()(create_secrets_file) +# Register commands with clearer parameters +app.command( + name="create-secrets-file", + help="Create a YAML file containing SCM authentication details.", +)(create_secrets_file) + +app.command( + name="clone-address-objects", + help="Clone address objects from the source SCM tenant to the destination SCM tenant.", +)(clone_address_objects) if __name__ == "__main__": app() diff --git a/scm_config_clone/utilities/__init__.py b/scm_config_clone/utilities/__init__.py index e69de29..9b719e8 100644 --- a/scm_config_clone/utilities/__init__.py +++ b/scm_config_clone/utilities/__init__.py @@ -0,0 +1,3 @@ +# scm_config_clone/utilities/__init__.py + +from .settings import load_settings diff --git a/scm_config_clone/utilities/helpers.py b/scm_config_clone/utilities/helpers.py deleted file mode 100644 index 52c469f..0000000 --- a/scm_config_clone/utilities/helpers.py +++ /dev/null @@ -1,218 +0,0 @@ -# scm_config_clone/utilities/helpers.py - -import logging -from typing import Dict, List - -from panapi import PanApiSession -from panapi.config.objects import Address, AddressGroup -from panapi.config.security import ProfileGroup - -logger = logging.getLogger(__name__) - - -def authenticate_scm(scm_info: Dict[str, str]) -> PanApiSession: - """ - Authenticate with a Strata Cloud Manager tenant and return an API session. - - Args: - scm_info (Dict[str, str]): SCM authentication details. - - Error: - Exception: Raises an exception if authentication fails. - - Return: - PanApiSession: An authenticated API session object. - """ - session = PanApiSession() - try: - session.authenticate( - client_id=scm_info["client_id"], - client_secret=scm_info["client_secret"], - scope=f"profile tsg_id:{scm_info['tenant']} email", - token_url=scm_info["token_url"], - ) - logger.info( - f"Authenticated with Strata Cloud Manager tenant {scm_info['tenant']}" - ) - return session - except Exception as e: - logger.error(f"Error with Prisma authentication: {e}") - raise - - -def create_scm_address_objects( - address_objects: List[Address], - folder: str, - session: PanApiSession, -) -> List[Dict[str, str]]: - """ - Create address objects in the destination SCM tenant. - - Iterates over address objects and creates them in the specified folder of the destination tenant. - - Args: - address_objects (List[Address]): List of address objects to create. - folder (str): Folder name in the destination tenant. - session (PanApiSession): Authenticated API session for the destination tenant. - - Error: - Exception: Raises an exception if creation fails. - - Return: - List[Dict[str, str]]: List of created address objects data. - """ - scm_address_objects = [] - - for address_object in address_objects: - # Extract attributes - scm_address_data = { - "name": address_object.name, - "folder": folder, - "limit": 5000, - } - - # Optional fields - if getattr(address_object, "description", None): - scm_address_data["description"] = address_object.description - - # Determine address type - if getattr(address_object, "ip_netmask", None): - scm_address_data["ip_netmask"] = address_object.ip_netmask - elif getattr(address_object, "fqdn", None): - scm_address_data["fqdn"] = address_object.fqdn - elif getattr(address_object, "ip_range", None): - scm_address_data["ip_range"] = address_object.ip_range - else: - logger.warning( - f"Address object {address_object.name} has no valid address type." - ) - continue - - logger.debug(f"Processing scm_address_data: {scm_address_data}.") - - # Create address object - try: - scm_address = Address(**scm_address_data) - scm_address.create(session) - scm_address_objects.append(scm_address_data) - logger.info(f"Created address object {address_object.name}") - except Exception as e: - logger.error(f"Error creating address object {address_object.name}: {e}") - raise - - return scm_address_objects - - -def create_scm_address_groups( - address_groups: List[AddressGroup], - folder: str, - session: PanApiSession, -) -> List[Dict[str, str]]: - """ - Create address groups in the destination SCM tenant. - - Iterates over address groups and creates them in the specified folder of the destination tenant. - - Args: - address_groups (List[AddressGroup]): List of address groups to create. - folder (str): Folder name in the destination tenant. - session (PanApiSession): Authenticated API session for the destination tenant. - - Error: - Exception: Raises an exception if creation fails. - - Return: - List[Dict[str, str]]: List of created address groups data. - """ - scm_address_groups = [] - - for address_group in address_groups: - logger.debug(f"Processing address group: {address_group.name}") - - scm_address_group_data = { - "folder": folder, - "name": address_group.name, - "limit": 5000, - } - - # Optional fields - if getattr(address_group, "description", None): - scm_address_group_data["description"] = address_group.description - - # Handle static and dynamic groups - if getattr(address_group, "static", None): - scm_address_group_data["static"] = list(address_group.static) - elif getattr(address_group, "dynamic", None): - scm_address_group_data["dynamic"] = { - "filter": address_group.dynamic["filter"] - } - else: - logger.warning( - f"Address group {address_group.name} has no valid type (static or dynamic)." - ) - continue - - # Create address group - try: - scm_address_group = AddressGroup(**scm_address_group_data) - scm_address_group.create(session) - scm_address_groups.append(scm_address_group_data) - logger.info(f"Created address group {address_group.name}") - except Exception as e: - logger.error(f"Error creating address group {address_group.name}: {e}") - raise - - return scm_address_groups - - -def create_scm_security_profile_groups( - profile_groups: List[ProfileGroup], - folder: str, - session: PanApiSession, -) -> List[Dict[str, str]]: - """ - Create security profile groups in the destination SCM tenant. - - Iterates over list of security profile groups and creates them in the specified folder of the destination tenant. - - Args: - profile_groups (List[ProfileGroup]): List of security profile groups to create. - folder (str): Folder name in the destination tenant. - session (PanApiSession): Authenticated API session for the destination tenant. - - Error: - Exception: Raises an exception if creation fails. - - Return: - List[Dict[str, str]]: List of created security profile group data. - """ - scm_profile_groups = [] - - for profile_group in profile_groups: - # Extract attributes - scm_profile_group_data = { - "name": profile_group.name, - "folder": folder, - "limit": 5000, - } - - # Optional fields - if getattr(profile_group, "description", None): - scm_profile_group_data["description"] = profile_group.description - - - logger.debug(f"Processing scm_profile_group_data: {scm_profile_group_data}.") - - # Create address object - try: - scm_address = Address(**scm_profile_group_data) - scm_address.create(session) - scm_profile_groups.append(scm_profile_group_data) - logger.info(f"Created address object {profile_group.name}") - except Exception as e: - logger.error(f"Error creating address object {profile_group.name}: {e}") - raise - - return scm_profile_groups - - diff --git a/scm_config_clone/config/settings.py b/scm_config_clone/utilities/settings.py similarity index 100% rename from scm_config_clone/config/settings.py rename to scm_config_clone/utilities/settings.py From da870ff8a4f233a7ee082daf76ee1d3009a105c8 Mon Sep 17 00:00:00 2001 From: Calvin Remsburg Date: Sat, 7 Dec 2024 20:22:51 -0600 Subject: [PATCH 02/18] Update .gitignore to exclude IntelliJ IDEA config files Add entries to .gitignore for IntelliJ IDEA-specific configuration files to prevent them from being tracked in the repository. This ensures that environment-specific settings remain local and do not affect team members collaborating on the same project. --- .gitignore | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/.gitignore b/.gitignore index 1e7d766..3e55497 100644 --- a/.gitignore +++ b/.gitignore @@ -234,4 +234,10 @@ atlassian-ide-plugin.xml # Exclude secrets.yaml files .secrets.yaml -secrets.yaml \ No newline at end of file +secrets.yaml +/.idea/misc.xml +/.idea/modules.xml +/.idea/inspectionProfiles/profiles_settings.xml +/.idea/inspectionProfiles/Project_Default.xml +/.idea/scm-config-clone.iml +/.idea/vcs.xml From 41fa7a4422aa18ec79fad2b0aff5800abf58fc06 Mon Sep 17 00:00:00 2001 From: Calvin Remsburg Date: Sat, 7 Dec 2024 20:25:49 -0600 Subject: [PATCH 03/18] Improve output file naming and remove redundant token URLs Updated the authentication file creation message to dynamically reflect the name of the output file, enhancing clarity for the user. Removed redundant `token_url` entries from the settings utility as they were not utilized in the destination and source SCM configurations, thereby simplifying the code. --- scm_config_clone/commands/create_secrets_file.py | 2 +- scm_config_clone/utilities/settings.py | 2 -- 2 files changed, 1 insertion(+), 3 deletions(-) diff --git a/scm_config_clone/commands/create_secrets_file.py b/scm_config_clone/commands/create_secrets_file.py index 7bb6f1d..6cc3ecd 100644 --- a/scm_config_clone/commands/create_secrets_file.py +++ b/scm_config_clone/commands/create_secrets_file.py @@ -29,7 +29,7 @@ def create_secrets_file( Return: None """ - typer.echo("*" * 79 + "\nCreating authentication file called .secrets.yaml in the current directory\n") + typer.echo("*" * 79 + f"\nCreating authentication file called {output_file} in the current directory\n") # Prompt user for credentials typer.echo("-" * 79 + "\n\tEnter source SCM credentials (where are you cloning from?)\n" + "-" * 79) diff --git a/scm_config_clone/utilities/settings.py b/scm_config_clone/utilities/settings.py index e2a0945..76ef111 100644 --- a/scm_config_clone/utilities/settings.py +++ b/scm_config_clone/utilities/settings.py @@ -28,14 +28,12 @@ def load_settings(settings_file: str) -> Dict[str, Dict[str, str]]: "client_id": settings.oauth.source.client_id, "client_secret": settings.oauth.source.client_secret, "tenant": settings.oauth.source.tsg, - "token_url": settings.oauth.token_url, "folder": settings.oauth.source.folder, } destination_scm = { "client_id": settings.oauth.destination.client_id, "client_secret": settings.oauth.destination.client_secret, "tenant": settings.oauth.destination.tsg, - "token_url": settings.oauth.token_url, "folder": settings.oauth.destination.folder, } return { From dbb85c1f41d47dc2f67b160a880b1000a226d315 Mon Sep 17 00:00:00 2001 From: Calvin Remsburg Date: Sun, 8 Dec 2024 09:48:42 -0600 Subject: [PATCH 04/18] Improve logging and update credential keys for authentication Enhanced logging by displaying detailed debug information during source authentication and corrected the credential key from 'tsg_id' to 'tenant' for better clarity. This change ensures more accurate logging and aligns the code with the actual structure of the credential data. --- scm_config_clone/commands/objects/address.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/scm_config_clone/commands/objects/address.py b/scm_config_clone/commands/objects/address.py index e184f86..c203b30 100644 --- a/scm_config_clone/commands/objects/address.py +++ b/scm_config_clone/commands/objects/address.py @@ -46,6 +46,7 @@ def clone_address_objects( # Load settings settings = load_settings(settings_file) + logger.info(f"Loaded settings: {settings}") # --- Authenticate with source tenant --- try: @@ -53,10 +54,10 @@ def clone_address_objects( source_client = Scm( client_id=source_creds["client_id"], client_secret=source_creds["client_secret"], - tsg_id=source_creds["tsg_id"], - log_level="INFO", + tsg_id=source_creds["tenant"], + log_level="debug", ) - logger.info(f"Authenticated with source SCM tenant: {source_creds['tsg_id']}") + logger.info(f"Authenticated with source SCM tenant: {source_creds['tenant']}") except (AuthenticationError, KeyError) as e: logger.error(f"Error authenticating with source tenant: {e}") raise typer.Exit(code=1) @@ -82,11 +83,11 @@ def clone_address_objects( destination_client = Scm( client_id=dest_creds["client_id"], client_secret=dest_creds["client_secret"], - tsg_id=dest_creds["tsg_id"], + tsg_id=dest_creds["tenant"], log_level="INFO", ) logger.info( - f"Authenticated with destination SCM tenant: {dest_creds['tsg_id']}" + f"Authenticated with destination SCM tenant: {dest_creds['tenant']}" ) except (AuthenticationError, KeyError) as e: logger.error(f"Error authenticating with destination tenant: {e}") From 7cdea2fa20bbdd04eec8341a2e6bb61586f79219 Mon Sep 17 00:00:00 2001 From: Calvin Remsburg Date: Sun, 15 Dec 2024 06:25:44 -0600 Subject: [PATCH 05/18] complete refactor --- poetry.lock | 941 ++++++++++++------ pyproject.toml | 4 +- scm_config_clone/__init__.py | 5 + scm_config_clone/commands/__init__.py | 9 - .../commands/create_secrets_file.py | 108 -- .../commands/create_settings_file.py | 200 ++++ scm_config_clone/commands/objects/__init__.py | 3 - scm_config_clone/commands/objects/address.py | 405 ++++++-- scm_config_clone/commands/objects/tag.py | 334 +++++++ .../commands/security_services/__init__.py | 0 scm_config_clone/main.py | 32 +- scm_config_clone/utilities/__init__.py | 3 - scm_config_clone/utilities/settings.py | 98 +- 13 files changed, 1608 insertions(+), 534 deletions(-) delete mode 100644 scm_config_clone/commands/__init__.py delete mode 100644 scm_config_clone/commands/create_secrets_file.py create mode 100644 scm_config_clone/commands/create_settings_file.py delete mode 100644 scm_config_clone/commands/objects/__init__.py create mode 100644 scm_config_clone/commands/objects/tag.py delete mode 100644 scm_config_clone/commands/security_services/__init__.py delete mode 100644 scm_config_clone/utilities/__init__.py diff --git a/poetry.lock b/poetry.lock index 6f5e3f3..dc36bc7 100644 --- a/poetry.lock +++ b/poetry.lock @@ -73,13 +73,13 @@ uvloop = ["uvloop (>=0.15.2)"] [[package]] name = "certifi" -version = "2022.6.15" +version = "2024.12.14" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2022.6.15-py3-none-any.whl", hash = "sha256:fe86415d55e84719d75f8b69414f6438ac3547d2078ab91b67e779ef69378412"}, - {file = "certifi-2022.6.15.tar.gz", hash = "sha256:84c85a9078b11105f04f3036a9482ae10e4621616db313fe045dd24743a0820d"}, + {file = "certifi-2024.12.14-py3-none-any.whl", hash = "sha256:1275f7a45be9464efc1173084eaa30f866fe2e47d389406136d332ed4967ec56"}, + {file = "certifi-2024.12.14.tar.gz", hash = "sha256:b650d30f370c2b724812bee08008be0c4163b163ddaec3f2546c1caf65f191db"}, ] [[package]] @@ -163,18 +163,118 @@ pycparser = "*" [[package]] name = "charset-normalizer" -version = "2.1.1" +version = "3.4.0" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false -python-versions = ">=3.6.0" +python-versions = ">=3.7.0" files = [ - {file = "charset-normalizer-2.1.1.tar.gz", hash = "sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845"}, - {file = "charset_normalizer-2.1.1-py3-none-any.whl", hash = "sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:4f9fc98dad6c2eaa32fc3af1417d95b5e3d08aff968df0cd320066def971f9a6"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0de7b687289d3c1b3e8660d0741874abe7888100efe14bd0f9fd7141bcbda92b"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5ed2e36c3e9b4f21dd9422f6893dec0abf2cca553af509b10cd630f878d3eb99"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40d3ff7fc90b98c637bda91c89d51264a3dcf210cade3a2c6f838c7268d7a4ca"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1110e22af8ca26b90bd6364fe4c763329b0ebf1ee213ba32b68c73de5752323d"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:86f4e8cca779080f66ff4f191a685ced73d2f72d50216f7112185dc02b90b9b7"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f683ddc7eedd742e2889d2bfb96d69573fde1d92fcb811979cdb7165bb9c7d3"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:27623ba66c183eca01bf9ff833875b459cad267aeeb044477fedac35e19ba907"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f606a1881d2663630ea5b8ce2efe2111740df4b687bd78b34a8131baa007f79b"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0b309d1747110feb25d7ed6b01afdec269c647d382c857ef4663bbe6ad95a912"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:136815f06a3ae311fae551c3df1f998a1ebd01ddd424aa5603a4336997629e95"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:14215b71a762336254351b00ec720a8e85cada43b987da5a042e4ce3e82bd68e"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:79983512b108e4a164b9c8d34de3992f76d48cadc9554c9e60b43f308988aabe"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-win32.whl", hash = "sha256:c94057af19bc953643a33581844649a7fdab902624d2eb739738a30e2b3e60fc"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:55f56e2ebd4e3bc50442fbc0888c9d8c94e4e06a933804e2af3e89e2f9c1c749"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0d99dd8ff461990f12d6e42c7347fd9ab2532fb70e9621ba520f9e8637161d7c"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c57516e58fd17d03ebe67e181a4e4e2ccab1168f8c2976c6a334d4f819fe5944"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6dba5d19c4dfab08e58d5b36304b3f92f3bd5d42c1a3fa37b5ba5cdf6dfcbcee"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf4475b82be41b07cc5e5ff94810e6a01f276e37c2d55571e3fe175e467a1a1c"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce031db0408e487fd2775d745ce30a7cd2923667cf3b69d48d219f1d8f5ddeb6"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ff4e7cdfdb1ab5698e675ca622e72d58a6fa2a8aa58195de0c0061288e6e3ea"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3710a9751938947e6327ea9f3ea6332a09bf0ba0c09cae9cb1f250bd1f1549bc"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82357d85de703176b5587dbe6ade8ff67f9f69a41c0733cf2425378b49954de5"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:47334db71978b23ebcf3c0f9f5ee98b8d65992b65c9c4f2d34c2eaf5bcaf0594"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8ce7fd6767a1cc5a92a639b391891bf1c268b03ec7e021c7d6d902285259685c"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f1a2f519ae173b5b6a2c9d5fa3116ce16e48b3462c8b96dfdded11055e3d6365"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:63bc5c4ae26e4bc6be6469943b8253c0fd4e4186c43ad46e713ea61a0ba49129"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bcb4f8ea87d03bc51ad04add8ceaf9b0f085ac045ab4d74e73bbc2dc033f0236"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-win32.whl", hash = "sha256:9ae4ef0b3f6b41bad6366fb0ea4fc1d7ed051528e113a60fa2a65a9abb5b1d99"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:cee4373f4d3ad28f1ab6290684d8e2ebdb9e7a1b74fdc39e4c211995f77bec27"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0713f3adb9d03d49d365b70b84775d0a0d18e4ab08d12bc46baa6132ba78aaf6"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:de7376c29d95d6719048c194a9cf1a1b0393fbe8488a22008610b0361d834ecf"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4a51b48f42d9358460b78725283f04bddaf44a9358197b889657deba38f329db"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b295729485b06c1a0683af02a9e42d2caa9db04a373dc38a6a58cdd1e8abddf1"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ee803480535c44e7f5ad00788526da7d85525cfefaf8acf8ab9a310000be4b03"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d59d125ffbd6d552765510e3f31ed75ebac2c7470c7274195b9161a32350284"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cda06946eac330cbe6598f77bb54e690b4ca93f593dee1568ad22b04f347c15"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07afec21bbbbf8a5cc3651aa96b980afe2526e7f048fdfb7f1014d84acc8b6d8"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6b40e8d38afe634559e398cc32b1472f376a4099c75fe6299ae607e404c033b2"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b8dcd239c743aa2f9c22ce674a145e0a25cb1566c495928440a181ca1ccf6719"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:84450ba661fb96e9fd67629b93d2941c871ca86fc38d835d19d4225ff946a631"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:44aeb140295a2f0659e113b31cfe92c9061622cadbc9e2a2f7b8ef6b1e29ef4b"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1db4e7fefefd0f548d73e2e2e041f9df5c59e178b4c72fbac4cc6f535cfb1565"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-win32.whl", hash = "sha256:5726cf76c982532c1863fb64d8c6dd0e4c90b6ece9feb06c9f202417a31f7dd7"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:b197e7094f232959f8f20541ead1d9862ac5ebea1d58e9849c1bf979255dfac9"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:dd4eda173a9fcccb5f2e2bd2a9f423d180194b1bf17cf59e3269899235b2a114"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e9e3c4c9e1ed40ea53acf11e2a386383c3304212c965773704e4603d589343ed"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:92a7e36b000bf022ef3dbb9c46bfe2d52c047d5e3f3343f43204263c5addc250"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:54b6a92d009cbe2fb11054ba694bc9e284dad30a26757b1e372a1fdddaf21920"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ffd9493de4c922f2a38c2bf62b831dcec90ac673ed1ca182fe11b4d8e9f2a64"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:35c404d74c2926d0287fbd63ed5d27eb911eb9e4a3bb2c6d294f3cfd4a9e0c23"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4796efc4faf6b53a18e3d46343535caed491776a22af773f366534056c4e1fbc"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7fdd52961feb4c96507aa649550ec2a0d527c086d284749b2f582f2d40a2e0d"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:92db3c28b5b2a273346bebb24857fda45601aef6ae1c011c0a997106581e8a88"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ab973df98fc99ab39080bfb0eb3a925181454d7c3ac8a1e695fddfae696d9e90"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4b67fdab07fdd3c10bb21edab3cbfe8cf5696f453afce75d815d9d7223fbe88b"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:aa41e526a5d4a9dfcfbab0716c7e8a1b215abd3f3df5a45cf18a12721d31cb5d"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ffc519621dce0c767e96b9c53f09c5d215578e10b02c285809f76509a3931482"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-win32.whl", hash = "sha256:f19c1585933c82098c2a520f8ec1227f20e339e33aca8fa6f956f6691b784e67"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:707b82d19e65c9bd28b81dde95249b07bf9f5b90ebe1ef17d9b57473f8a64b7b"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:dbe03226baf438ac4fda9e2d0715022fd579cb641c4cf639fa40d53b2fe6f3e2"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd9a8bd8900e65504a305bf8ae6fa9fbc66de94178c420791d0293702fce2df7"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8831399554b92b72af5932cdbbd4ddc55c55f631bb13ff8fe4e6536a06c5c51"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a14969b8691f7998e74663b77b4c36c0337cb1df552da83d5c9004a93afdb574"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dcaf7c1524c0542ee2fc82cc8ec337f7a9f7edee2532421ab200d2b920fc97cf"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425c5f215d0eecee9a56cdb703203dda90423247421bf0d67125add85d0c4455"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:d5b054862739d276e09928de37c79ddeec42a6e1bfc55863be96a36ba22926f6"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:f3e73a4255342d4eb26ef6df01e3962e73aa29baa3124a8e824c5d3364a65748"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:2f6c34da58ea9c1a9515621f4d9ac379871a8f21168ba1b5e09d74250de5ad62"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:f09cb5a7bbe1ecae6e87901a2eb23e0256bb524a79ccc53eb0b7629fbe7677c4"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:0099d79bdfcf5c1f0c2c72f91516702ebf8b0b8ddd8905f97a8aecf49712c621"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-win32.whl", hash = "sha256:9c98230f5042f4945f957d006edccc2af1e03ed5e37ce7c373f00a5a4daa6149"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-win_amd64.whl", hash = "sha256:62f60aebecfc7f4b82e3f639a7d1433a20ec32824db2199a11ad4f5e146ef5ee"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:af73657b7a68211996527dbfeffbb0864e043d270580c5aef06dc4b659a4b578"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cab5d0b79d987c67f3b9e9c53f54a61360422a5a0bc075f43cab5621d530c3b6"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9289fd5dddcf57bab41d044f1756550f9e7cf0c8e373b8cdf0ce8773dc4bd417"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b493a043635eb376e50eedf7818f2f322eabbaa974e948bd8bdd29eb7ef2a51"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fa2566ca27d67c86569e8c85297aaf413ffab85a8960500f12ea34ff98e4c41"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8e538f46104c815be19c975572d74afb53f29650ea2025bbfaef359d2de2f7f"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fd30dc99682dc2c603c2b315bded2799019cea829f8bf57dc6b61efde6611c8"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2006769bd1640bdf4d5641c69a3d63b71b81445473cac5ded39740a226fa88ab"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:dc15e99b2d8a656f8e666854404f1ba54765871104e50c8e9813af8a7db07f12"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:ab2e5bef076f5a235c3774b4f4028a680432cded7cad37bba0fd90d64b187d19"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:4ec9dd88a5b71abfc74e9df5ebe7921c35cbb3b641181a531ca65cdb5e8e4dea"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:43193c5cda5d612f247172016c4bb71251c784d7a4d9314677186a838ad34858"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:aa693779a8b50cd97570e5a0f343538a8dbd3e496fa5dcb87e29406ad0299654"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-win32.whl", hash = "sha256:7706f5850360ac01d80c89bcef1640683cc12ed87f42579dab6c5d3ed6888613"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:c3e446d253bd88f6377260d07c895816ebf33ffffd56c1c792b13bff9c3e1ade"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:980b4f289d1d90ca5efcf07958d3eb38ed9c0b7676bf2831a54d4f66f9c27dfa"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f28f891ccd15c514a0981f3b9db9aa23d62fe1a99997512b0491d2ed323d229a"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8aacce6e2e1edcb6ac625fb0f8c3a9570ccc7bfba1f63419b3769ccf6a00ed0"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd7af3717683bea4c87acd8c0d3d5b44d56120b26fd3f8a692bdd2d5260c620a"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ff2ed8194587faf56555927b3aa10e6fb69d931e33953943bc4f837dfee2242"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e91f541a85298cf35433bf66f3fab2a4a2cff05c127eeca4af174f6d497f0d4b"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:309a7de0a0ff3040acaebb35ec45d18db4b28232f21998851cfa709eeff49d62"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:285e96d9d53422efc0d7a17c60e59f37fbf3dfa942073f666db4ac71e8d726d0"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:5d447056e2ca60382d460a604b6302d8db69476fd2015c81e7c35417cfabe4cd"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:20587d20f557fe189b7947d8e7ec5afa110ccf72a3128d61a2a387c3313f46be"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:130272c698667a982a5d0e626851ceff662565379baf0ff2cc58067b81d4f11d"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:ab22fbd9765e6954bc0bcff24c25ff71dcbfdb185fcdaca49e81bac68fe724d3"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7782afc9b6b42200f7362858f9e73b1f8316afb276d316336c0ec3bd73312742"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-win32.whl", hash = "sha256:2de62e8801ddfff069cd5c504ce3bc9672b23266597d4e4f50eda28846c322f2"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:95c3c157765b031331dd4db3c775e58deaee050a3042fcad72cbc4189d7c8dca"}, + {file = "charset_normalizer-3.4.0-py3-none-any.whl", hash = "sha256:fe9f97feb71aa9896b81973a7bbada8c49501dc73e58a10fcef6663af95e5079"}, + {file = "charset_normalizer-3.4.0.tar.gz", hash = "sha256:223217c3d4f82c3ac5e29032b3f1c2eb0fb591b72161f86d93f5719079dae93e"}, ] -[package.extras] -unicode-backport = ["unicodedata2"] - [[package]] name = "click" version = "8.1.7" @@ -297,13 +397,13 @@ doc = ["Sphinx", "sphinx-rtd-theme", "sphinxcontrib-spelling"] [[package]] name = "faker" -version = "30.3.0" +version = "33.1.0" description = "Faker is a Python package that generates fake data for you." optional = false python-versions = ">=3.8" files = [ - {file = "Faker-30.3.0-py3-none-any.whl", hash = "sha256:e8a15fd1b0f72992b008f5ea94c70d3baa0cb51b0d5a0e899c17b1d1b23d2771"}, - {file = "faker-30.3.0.tar.gz", hash = "sha256:8760fbb34564fbb2f394345eef24aec5b8f6506b6cfcefe8195ed66dd1032bdb"}, + {file = "Faker-33.1.0-py3-none-any.whl", hash = "sha256:d30c5f0e2796b8970de68978365247657486eb0311c5abe88d0b895b68dff05d"}, + {file = "faker-33.1.0.tar.gz", hash = "sha256:1c925fc0e86a51fc46648b504078c88d0cd48da1da2595c4e712841cab43a1e4"}, ] [package.dependencies] @@ -345,15 +445,18 @@ dev = ["flake8", "markdown", "twine", "wheel"] [[package]] name = "idna" -version = "3.3" +version = "3.10" description = "Internationalized Domain Names in Applications (IDNA)" optional = false -python-versions = ">=3.5" +python-versions = ">=3.6" files = [ - {file = "idna-3.3-py3-none-any.whl", hash = "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff"}, - {file = "idna-3.3.tar.gz", hash = "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"}, + {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, + {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, ] +[package.extras] +all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] + [[package]] name = "iniconfig" version = "2.0.0" @@ -423,72 +526,72 @@ testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] [[package]] name = "markupsafe" -version = "3.0.0" +version = "3.0.2" description = "Safely add untrusted strings to HTML/XML markup." optional = false python-versions = ">=3.9" files = [ - {file = "MarkupSafe-3.0.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:380faf314c3c84c1682ca672e6280c6c59e92d0bc13dc71758ffa2de3cd4e252"}, - {file = "MarkupSafe-3.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1ee9790be6f62121c4c58bbced387b0965ab7bffeecb4e17cc42ef290784e363"}, - {file = "MarkupSafe-3.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ddf5cb8e9c00d9bf8b0c75949fb3ff9ea2096ba531693e2e87336d197fdb908"}, - {file = "MarkupSafe-3.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9b36473a2d3e882d1873ea906ce54408b9588dc2c65989664e6e7f5a2de353d7"}, - {file = "MarkupSafe-3.0.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dba0f83119b9514bc37272ad012f0cc03f0805cc6a2bea7244e19250ac8ff29f"}, - {file = "MarkupSafe-3.0.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:409535e0521c4630d5b5a1bf284e9d3c76d2fc2f153ebb12cf3827797798cc99"}, - {file = "MarkupSafe-3.0.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:64a7c7856c3a409011139b17d137c2924df4318dab91ee0530800819617c4381"}, - {file = "MarkupSafe-3.0.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:4deea1d9169578917d1f35cdb581bc7bab56a7e8c5be2633bd1b9549c3c22a01"}, - {file = "MarkupSafe-3.0.0-cp310-cp310-win32.whl", hash = "sha256:3cd0bba31d484fe9b9d77698ddb67c978704603dc10cdc905512af308cfcca6b"}, - {file = "MarkupSafe-3.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:4ca04c60006867610a06575b46941ae616b19da0adc85b9f8f3d9cbd7a3da385"}, - {file = "MarkupSafe-3.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:e64b390a306f9e849ee809f92af6a52cda41741c914358e0e9f8499d03741526"}, - {file = "MarkupSafe-3.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7c524203207f5b569df06c96dafdc337228921ee8c3cc5f6e891d024c6595352"}, - {file = "MarkupSafe-3.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c409691696bec2b5e5c9efd9593c99025bf2f317380bf0d993ee0213516d908a"}, - {file = "MarkupSafe-3.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64f7d04410be600aa5ec0626d73d43e68a51c86500ce12917e10fd013e258df5"}, - {file = "MarkupSafe-3.0.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:105ada43a61af22acb8774514c51900dc820c481cc5ba53f17c09d294d9c07ca"}, - {file = "MarkupSafe-3.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a5fd5500d4e4f7cc88d8c0f2e45126c4307ed31e08f8ec521474f2fd99d35ac3"}, - {file = "MarkupSafe-3.0.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:25396abd52b16900932e05b7104bcdc640a4d96c914f39c3b984e5a17b01fba0"}, - {file = "MarkupSafe-3.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:3efde9a8c56c3b6e5f3fa4baea828f8184970c7c78480fedb620d804b1c31e5c"}, - {file = "MarkupSafe-3.0.0-cp311-cp311-win32.whl", hash = "sha256:12ddac720b8965332d36196f6f83477c6351ba6a25d4aff91e30708c729350d7"}, - {file = "MarkupSafe-3.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:658fdf6022740896c403d45148bf0c36978c6b48c9ef8b1f8d0c7a11b6cdea86"}, - {file = "MarkupSafe-3.0.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:d261ec38b8a99a39b62e0119ed47fe3b62f7691c500bc1e815265adc016438c1"}, - {file = "MarkupSafe-3.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:e363440c8534bf2f2ef1b8fdc02037eb5fff8fce2a558519b22d6a3a38b3ec5e"}, - {file = "MarkupSafe-3.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7835de4c56066e096407a1852e5561f6033786dd987fa90dc384e45b9bd21295"}, - {file = "MarkupSafe-3.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b6cc46a27d904c9be5732029769acf4b0af69345172ed1ef6d4db0c023ff603b"}, - {file = "MarkupSafe-3.0.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f0411641d31aa6f7f0cc13f0f18b63b8dc08da5f3a7505972a42ab059f479ba3"}, - {file = "MarkupSafe-3.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b2a7afd24d408b907672015555bc10be2382e6c5f62a488e2d452da670bbd389"}, - {file = "MarkupSafe-3.0.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:c8ab7efeff1884c5da8e18f743b667215300e09043820d11723718de0b7db934"}, - {file = "MarkupSafe-3.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:8219e2207f6c188d15614ea043636c2b36d2d79bf853639c124a179412325a13"}, - {file = "MarkupSafe-3.0.0-cp312-cp312-win32.whl", hash = "sha256:59420b5a9a5d3fee483a32adb56d7369ae0d630798da056001be1e9f674f3aa6"}, - {file = "MarkupSafe-3.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:7ed789d0f7f11fcf118cf0acb378743dfdd4215d7f7d18837c88171405c9a452"}, - {file = "MarkupSafe-3.0.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:27d6a73682b99568916c54a4bfced40e7d871ba685b580ea04bbd2e405dfd4c5"}, - {file = "MarkupSafe-3.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:494a64efc535e147fcc713dba58eecfce3a79f1e93ebe81995b387f5cd9bc2e1"}, - {file = "MarkupSafe-3.0.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5243044a927e8a6bb28517838662a019cd7f73d7f106bbb37ab5e7fa8451a92"}, - {file = "MarkupSafe-3.0.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63dae84964a9a3d2610808cee038f435d9a111620c37ccf872c2fcaeca6865b3"}, - {file = "MarkupSafe-3.0.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dcbee57fedc9b2182c54ffc1c5eed316c3da8bbfeda8009e1b5d7220199d15da"}, - {file = "MarkupSafe-3.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f846fd7c241e5bd4161e2a483663eb66e4d8e12130fcdc052f310f388f1d61c6"}, - {file = "MarkupSafe-3.0.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:678fbceb202382aae42c1f0cd9f56b776bc20a58ae5b553ee1fe6b802983a1d6"}, - {file = "MarkupSafe-3.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:bd9b8e458e2bab52f9ad3ab5dc8b689a3c84b12b2a2f64cd9a0dfe209fb6b42f"}, - {file = "MarkupSafe-3.0.0-cp313-cp313-win32.whl", hash = "sha256:1fd02f47596e00a372f5b4af2b4c45f528bade65c66dfcbc6e1ea1bfda758e98"}, - {file = "MarkupSafe-3.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:b94bec9eda10111ec7102ef909eca4f3c2df979643924bfe58375f560713a7d1"}, - {file = "MarkupSafe-3.0.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:509c424069dd037d078925b6815fc56b7271f3aaec471e55e6fa513b0a80d2aa"}, - {file = "MarkupSafe-3.0.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:81be2c0084d8c69e97e3c5d73ce9e2a6e523556f2a19c4e195c09d499be2f808"}, - {file = "MarkupSafe-3.0.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b43ac1eb9f91e0c14aac1d2ef0f76bc7b9ceea51de47536f61268191adf52ad7"}, - {file = "MarkupSafe-3.0.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b231255770723f1e125d63c14269bcd8b8136ecfb620b9a18c0297e046d0736"}, - {file = "MarkupSafe-3.0.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c182d45600556917f811aa019d834a89fe4b6f6255da2fd0bdcf80e970f95918"}, - {file = "MarkupSafe-3.0.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9f91c90f8f3bf436f81c12eeb4d79f9ddd263c71125e6ad71341906832a34386"}, - {file = "MarkupSafe-3.0.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:a7171d2b869e9be238ea318c196baf58fbf272704e9c1cd4be8c380eea963342"}, - {file = "MarkupSafe-3.0.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:cb244adf2499aa37d5dc43431990c7f0b632d841af66a51d22bd89c437b60264"}, - {file = "MarkupSafe-3.0.0-cp313-cp313t-win32.whl", hash = "sha256:96e3ed550600185d34429477f1176cedea8293fa40e47fe37a05751bcb64c997"}, - {file = "MarkupSafe-3.0.0-cp313-cp313t-win_amd64.whl", hash = "sha256:1d151b9cf3307e259b749125a5a08c030ba15a8f1d567ca5bfb0e92f35e761f5"}, - {file = "MarkupSafe-3.0.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:23efb2be7221105c8eb0e905433414d2439cb0a8c5d5ca081c1c72acef0f5613"}, - {file = "MarkupSafe-3.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:81ee9c967956b9ea39b3a5270b7cb1740928d205b0dc72629164ce621b4debf9"}, - {file = "MarkupSafe-3.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5509a8373fed30b978557890a226c3d30569746c565b9daba69df80c160365a5"}, - {file = "MarkupSafe-3.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f1c13c6c908811f867a8e9e66efb2d6c03d1cdd83e92788fe97f693c457dc44f"}, - {file = "MarkupSafe-3.0.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d7e63d1977d3806ce0a1a3e0099b089f61abdede5238ca6a3f3bf8877b46d095"}, - {file = "MarkupSafe-3.0.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:d2c099be5274847d606574234e494f23a359e829ba337ea9037c3a72b0851942"}, - {file = "MarkupSafe-3.0.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:e042ccf8fe5bf8b6a4b38b3f7d618eb10ea20402b0c9f4add9293408de447974"}, - {file = "MarkupSafe-3.0.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:98fb3a2bf525ad66db96745707b93ba0f78928b7a1cb2f1cb4b143bc7e2ba3b3"}, - {file = "MarkupSafe-3.0.0-cp39-cp39-win32.whl", hash = "sha256:a80c6740e1bfbe50cea7cbf74f48823bb57bd59d914ee22ff8a81963b08e62d2"}, - {file = "MarkupSafe-3.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:5d207ff5cceef77796f8aacd44263266248cf1fbc601441524d7835613f8abec"}, - {file = "markupsafe-3.0.0.tar.gz", hash = "sha256:03ff62dea2fef3eadf2f1853bc6332bcb0458d9608b11dfb1cd5aeda1c178ea6"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38a9ef736c01fccdd6600705b09dc574584b89bea478200c5fbf112a6b0d5579"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbcb445fa71794da8f178f0f6d66789a28d7319071af7a496d4d507ed566270d"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57cb5a3cf367aeb1d316576250f65edec5bb3be939e9247ae594b4bcbc317dfb"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3809ede931876f5b2ec92eef964286840ed3540dadf803dd570c3b7e13141a3b"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e07c3764494e3776c602c1e78e298937c3315ccc9043ead7e685b7f2b8d47b3c"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b424c77b206d63d500bcb69fa55ed8d0e6a3774056bdc4839fc9298a7edca171"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-win32.whl", hash = "sha256:fcabf5ff6eea076f859677f5f0b6b5c1a51e70a376b0579e0eadef8db48c6b50"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:6af100e168aa82a50e186c82875a5893c5597a0c1ccdb0d8b40240b1f28b969a"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-win32.whl", hash = "sha256:6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-win32.whl", hash = "sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-win32.whl", hash = "sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-win32.whl", hash = "sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:eaa0a10b7f72326f1372a713e73c3f739b524b3af41feb43e4921cb529f5929a"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:48032821bbdf20f5799ff537c7ac3d1fba0ba032cfc06194faffa8cda8b560ff"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a9d3f5f0901fdec14d8d2f66ef7d035f2157240a433441719ac9a3fba440b13"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88b49a3b9ff31e19998750c38e030fc7bb937398b1f78cfa599aaef92d693144"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cfad01eed2c2e0c01fd0ecd2ef42c492f7f93902e39a42fc9ee1692961443a29"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:1225beacc926f536dc82e45f8a4d68502949dc67eea90eab715dea3a21c1b5f0"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:3169b1eefae027567d1ce6ee7cae382c57fe26e82775f460f0b2778beaad66c0"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:eb7972a85c54febfb25b5c4b4f3af4dcc731994c7da0d8a0b4a6eb0640e1d178"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-win32.whl", hash = "sha256:8c4e8c3ce11e1f92f6536ff07154f9d49677ebaaafc32db9db4620bc11ed480f"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:6e296a513ca3d94054c2c881cc913116e90fd030ad1c656b3869762b754f5f8a"}, + {file = "markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0"}, ] [[package]] @@ -588,13 +691,13 @@ pyyaml = ">=5.1" [[package]] name = "mkdocs-material" -version = "9.5.39" +version = "9.5.48" description = "Documentation that simply works" optional = false python-versions = ">=3.8" files = [ - {file = "mkdocs_material-9.5.39-py3-none-any.whl", hash = "sha256:0f2f68c8db89523cb4a59705cd01b4acd62b2f71218ccb67e1e004e560410d2b"}, - {file = "mkdocs_material-9.5.39.tar.gz", hash = "sha256:25faa06142afa38549d2b781d475a86fb61de93189f532b88e69bf11e5e5c3be"}, + {file = "mkdocs_material-9.5.48-py3-none-any.whl", hash = "sha256:b695c998f4b939ce748adbc0d3bff73fa886a670ece948cf27818fa115dc16f8"}, + {file = "mkdocs_material-9.5.48.tar.gz", hash = "sha256:a582531e8b34f4c7ed38c29d5c44763053832cf2a32f7409567e0c74749a47db"}, ] [package.dependencies] @@ -628,13 +731,13 @@ files = [ [[package]] name = "mkdocstrings" -version = "0.26.1" +version = "0.26.2" description = "Automatic documentation from sources, for MkDocs." optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "mkdocstrings-0.26.1-py3-none-any.whl", hash = "sha256:29738bfb72b4608e8e55cc50fb8a54f325dc7ebd2014e4e3881a49892d5983cf"}, - {file = "mkdocstrings-0.26.1.tar.gz", hash = "sha256:bb8b8854d6713d5348ad05b069a09f3b79edbc6a0f33a34c6821141adb03fe33"}, + {file = "mkdocstrings-0.26.2-py3-none-any.whl", hash = "sha256:1248f3228464f3b8d1a15bd91249ce1701fe3104ac517a5f167a0e01ca850ba5"}, + {file = "mkdocstrings-0.26.2.tar.gz", hash = "sha256:34a8b50f1e6cfd29546c6c09fbe02154adfb0b361bb758834bf56aa284ba876e"}, ] [package.dependencies] @@ -663,6 +766,70 @@ files = [ {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, ] +[[package]] +name = "numpy" +version = "2.2.0" +description = "Fundamental package for array computing in Python" +optional = false +python-versions = ">=3.10" +files = [ + {file = "numpy-2.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1e25507d85da11ff5066269d0bd25d06e0a0f2e908415534f3e603d2a78e4ffa"}, + {file = "numpy-2.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a62eb442011776e4036af5c8b1a00b706c5bc02dc15eb5344b0c750428c94219"}, + {file = "numpy-2.2.0-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:b606b1aaf802e6468c2608c65ff7ece53eae1a6874b3765f69b8ceb20c5fa78e"}, + {file = "numpy-2.2.0-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:36b2b43146f646642b425dd2027730f99bac962618ec2052932157e213a040e9"}, + {file = "numpy-2.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7fe8f3583e0607ad4e43a954e35c1748b553bfe9fdac8635c02058023277d1b3"}, + {file = "numpy-2.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:122fd2fcfafdefc889c64ad99c228d5a1f9692c3a83f56c292618a59aa60ae83"}, + {file = "numpy-2.2.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3f2f5cddeaa4424a0a118924b988746db6ffa8565e5829b1841a8a3bd73eb59a"}, + {file = "numpy-2.2.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:7fe4bb0695fe986a9e4deec3b6857003b4cfe5c5e4aac0b95f6a658c14635e31"}, + {file = "numpy-2.2.0-cp310-cp310-win32.whl", hash = "sha256:b30042fe92dbd79f1ba7f6898fada10bdaad1847c44f2dff9a16147e00a93661"}, + {file = "numpy-2.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:54dc1d6d66f8d37843ed281773c7174f03bf7ad826523f73435deb88ba60d2d4"}, + {file = "numpy-2.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9874bc2ff574c40ab7a5cbb7464bf9b045d617e36754a7bc93f933d52bd9ffc6"}, + {file = "numpy-2.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0da8495970f6b101ddd0c38ace92edea30e7e12b9a926b57f5fabb1ecc25bb90"}, + {file = "numpy-2.2.0-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:0557eebc699c1c34cccdd8c3778c9294e8196df27d713706895edc6f57d29608"}, + {file = "numpy-2.2.0-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:3579eaeb5e07f3ded59298ce22b65f877a86ba8e9fe701f5576c99bb17c283da"}, + {file = "numpy-2.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40deb10198bbaa531509aad0cd2f9fadb26c8b94070831e2208e7df543562b74"}, + {file = "numpy-2.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c2aed8fcf8abc3020d6a9ccb31dbc9e7d7819c56a348cc88fd44be269b37427e"}, + {file = "numpy-2.2.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a222d764352c773aa5ebde02dd84dba3279c81c6db2e482d62a3fa54e5ece69b"}, + {file = "numpy-2.2.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4e58666988605e251d42c2818c7d3d8991555381be26399303053b58a5bbf30d"}, + {file = "numpy-2.2.0-cp311-cp311-win32.whl", hash = "sha256:4723a50e1523e1de4fccd1b9a6dcea750c2102461e9a02b2ac55ffeae09a4410"}, + {file = "numpy-2.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:16757cf28621e43e252c560d25b15f18a2f11da94fea344bf26c599b9cf54b73"}, + {file = "numpy-2.2.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:cff210198bb4cae3f3c100444c5eaa573a823f05c253e7188e1362a5555235b3"}, + {file = "numpy-2.2.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:58b92a5828bd4d9aa0952492b7de803135038de47343b2aa3cc23f3b71a3dc4e"}, + {file = "numpy-2.2.0-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:ebe5e59545401fbb1b24da76f006ab19734ae71e703cdb4a8b347e84a0cece67"}, + {file = "numpy-2.2.0-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:e2b8cd48a9942ed3f85b95ca4105c45758438c7ed28fff1e4ce3e57c3b589d8e"}, + {file = "numpy-2.2.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:57fcc997ffc0bef234b8875a54d4058afa92b0b0c4223fc1f62f24b3b5e86038"}, + {file = "numpy-2.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85ad7d11b309bd132d74397fcf2920933c9d1dc865487128f5c03d580f2c3d03"}, + {file = "numpy-2.2.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:cb24cca1968b21355cc6f3da1a20cd1cebd8a023e3c5b09b432444617949085a"}, + {file = "numpy-2.2.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:0798b138c291d792f8ea40fe3768610f3c7dd2574389e37c3f26573757c8f7ef"}, + {file = "numpy-2.2.0-cp312-cp312-win32.whl", hash = "sha256:afe8fb968743d40435c3827632fd36c5fbde633b0423da7692e426529b1759b1"}, + {file = "numpy-2.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:3a4199f519e57d517ebd48cb76b36c82da0360781c6a0353e64c0cac30ecaad3"}, + {file = "numpy-2.2.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f8c8b141ef9699ae777c6278b52c706b653bf15d135d302754f6b2e90eb30367"}, + {file = "numpy-2.2.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0f0986e917aca18f7a567b812ef7ca9391288e2acb7a4308aa9d265bd724bdae"}, + {file = "numpy-2.2.0-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:1c92113619f7b272838b8d6702a7f8ebe5edea0df48166c47929611d0b4dea69"}, + {file = "numpy-2.2.0-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:5a145e956b374e72ad1dff82779177d4a3c62bc8248f41b80cb5122e68f22d13"}, + {file = "numpy-2.2.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18142b497d70a34b01642b9feabb70156311b326fdddd875a9981f34a369b671"}, + {file = "numpy-2.2.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a7d41d1612c1a82b64697e894b75db6758d4f21c3ec069d841e60ebe54b5b571"}, + {file = "numpy-2.2.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a98f6f20465e7618c83252c02041517bd2f7ea29be5378f09667a8f654a5918d"}, + {file = "numpy-2.2.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e09d40edfdb4e260cb1567d8ae770ccf3b8b7e9f0d9b5c2a9992696b30ce2742"}, + {file = "numpy-2.2.0-cp313-cp313-win32.whl", hash = "sha256:3905a5fffcc23e597ee4d9fb3fcd209bd658c352657548db7316e810ca80458e"}, + {file = "numpy-2.2.0-cp313-cp313-win_amd64.whl", hash = "sha256:a184288538e6ad699cbe6b24859206e38ce5fba28f3bcfa51c90d0502c1582b2"}, + {file = "numpy-2.2.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:7832f9e8eb00be32f15fdfb9a981d6955ea9adc8574c521d48710171b6c55e95"}, + {file = "numpy-2.2.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:f0dd071b95bbca244f4cb7f70b77d2ff3aaaba7fa16dc41f58d14854a6204e6c"}, + {file = "numpy-2.2.0-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:b0b227dcff8cdc3efbce66d4e50891f04d0a387cce282fe1e66199146a6a8fca"}, + {file = "numpy-2.2.0-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:6ab153263a7c5ccaf6dfe7e53447b74f77789f28ecb278c3b5d49db7ece10d6d"}, + {file = "numpy-2.2.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e500aba968a48e9019e42c0c199b7ec0696a97fa69037bea163b55398e390529"}, + {file = "numpy-2.2.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:440cfb3db4c5029775803794f8638fbdbf71ec702caf32735f53b008e1eaece3"}, + {file = "numpy-2.2.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:a55dc7a7f0b6198b07ec0cd445fbb98b05234e8b00c5ac4874a63372ba98d4ab"}, + {file = "numpy-2.2.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:4bddbaa30d78c86329b26bd6aaaea06b1e47444da99eddac7bf1e2fab717bd72"}, + {file = "numpy-2.2.0-cp313-cp313t-win32.whl", hash = "sha256:30bf971c12e4365153afb31fc73f441d4da157153f3400b82db32d04de1e4066"}, + {file = "numpy-2.2.0-cp313-cp313t-win_amd64.whl", hash = "sha256:d35717333b39d1b6bb8433fa758a55f1081543de527171543a2b710551d40881"}, + {file = "numpy-2.2.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:e12c6c1ce84628c52d6367863773f7c8c8241be554e8b79686e91a43f1733773"}, + {file = "numpy-2.2.0-pp310-pypy310_pp73-macosx_14_0_x86_64.whl", hash = "sha256:b6207dc8fb3c8cb5668e885cef9ec7f70189bec4e276f0ff70d5aa078d32c88e"}, + {file = "numpy-2.2.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a50aeff71d0f97b6450d33940c7181b08be1441c6c193e678211bff11aa725e7"}, + {file = "numpy-2.2.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:df12a1f99b99f569a7c2ae59aa2d31724e8d835fc7f33e14f4792e3071d11221"}, + {file = "numpy-2.2.0.tar.gz", hash = "sha256:140dd80ff8981a583a60980be1a655068f8adebf7a45a06a6858c873fcdcd4a0"}, +] + [[package]] name = "oauthlib" version = "3.2.2" @@ -681,13 +848,13 @@ signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"] [[package]] name = "packaging" -version = "24.1" +version = "24.2" description = "Core utilities for Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"}, - {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"}, + {file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"}, + {file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"}, ] [[package]] @@ -707,13 +874,13 @@ lint = ["black"] [[package]] name = "pan-scm-sdk" -version = "0.3.4" +version = "0.3.6" description = "Python SDK for Palo Alto Networks Strata Cloud Manager." optional = false python-versions = "<4.0,>=3.10" files = [ - {file = "pan_scm_sdk-0.3.4-py3-none-any.whl", hash = "sha256:22265b8817fdf3c56ba34706c4fa9f0c3b32b65b2edad149ece75ccf7dc88e0d"}, - {file = "pan_scm_sdk-0.3.4.tar.gz", hash = "sha256:de417b6c785c87cac63e9d775d171c9d4860fa3e2326cbda991e18a4c9b6dfa4"}, + {file = "pan_scm_sdk-0.3.6-py3-none-any.whl", hash = "sha256:ca0486755fd2bb7191ea0e0a142e3a3eaea31b2edbf90c65bd4d828ec43d1d9c"}, + {file = "pan_scm_sdk-0.3.6.tar.gz", hash = "sha256:25b1f08d3ae377c68d87a3bb9a41b52f7f05bb8e2141f3da3cfbcba5d428ace2"}, ] [package.dependencies] @@ -724,6 +891,92 @@ pyjwt = ">=2.9.0,<3.0.0" requests-oauthlib = ">=2.0.0,<3.0.0" setuptools = ">=75.1.0,<76.0.0" +[[package]] +name = "pandas" +version = "2.2.3" +description = "Powerful data structures for data analysis, time series, and statistics" +optional = false +python-versions = ">=3.9" +files = [ + {file = "pandas-2.2.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1948ddde24197a0f7add2bdc4ca83bf2b1ef84a1bc8ccffd95eda17fd836ecb5"}, + {file = "pandas-2.2.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:381175499d3802cde0eabbaf6324cce0c4f5d52ca6f8c377c29ad442f50f6348"}, + {file = "pandas-2.2.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d9c45366def9a3dd85a6454c0e7908f2b3b8e9c138f5dc38fed7ce720d8453ed"}, + {file = "pandas-2.2.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86976a1c5b25ae3f8ccae3a5306e443569ee3c3faf444dfd0f41cda24667ad57"}, + {file = "pandas-2.2.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:b8661b0238a69d7aafe156b7fa86c44b881387509653fdf857bebc5e4008ad42"}, + {file = "pandas-2.2.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:37e0aced3e8f539eccf2e099f65cdb9c8aa85109b0be6e93e2baff94264bdc6f"}, + {file = "pandas-2.2.3-cp310-cp310-win_amd64.whl", hash = "sha256:56534ce0746a58afaf7942ba4863e0ef81c9c50d3f0ae93e9497d6a41a057645"}, + {file = "pandas-2.2.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:66108071e1b935240e74525006034333f98bcdb87ea116de573a6a0dccb6c039"}, + {file = "pandas-2.2.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7c2875855b0ff77b2a64a0365e24455d9990730d6431b9e0ee18ad8acee13dbd"}, + {file = "pandas-2.2.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cd8d0c3be0515c12fed0bdbae072551c8b54b7192c7b1fda0ba56059a0179698"}, + {file = "pandas-2.2.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c124333816c3a9b03fbeef3a9f230ba9a737e9e5bb4060aa2107a86cc0a497fc"}, + {file = "pandas-2.2.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:63cc132e40a2e084cf01adf0775b15ac515ba905d7dcca47e9a251819c575ef3"}, + {file = "pandas-2.2.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:29401dbfa9ad77319367d36940cd8a0b3a11aba16063e39632d98b0e931ddf32"}, + {file = "pandas-2.2.3-cp311-cp311-win_amd64.whl", hash = "sha256:3fc6873a41186404dad67245896a6e440baacc92f5b716ccd1bc9ed2995ab2c5"}, + {file = "pandas-2.2.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b1d432e8d08679a40e2a6d8b2f9770a5c21793a6f9f47fdd52c5ce1948a5a8a9"}, + {file = "pandas-2.2.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a5a1595fe639f5988ba6a8e5bc9649af3baf26df3998a0abe56c02609392e0a4"}, + {file = "pandas-2.2.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5de54125a92bb4d1c051c0659e6fcb75256bf799a732a87184e5ea503965bce3"}, + {file = "pandas-2.2.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fffb8ae78d8af97f849404f21411c95062db1496aeb3e56f146f0355c9989319"}, + {file = "pandas-2.2.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6dfcb5ee8d4d50c06a51c2fffa6cff6272098ad6540aed1a76d15fb9318194d8"}, + {file = "pandas-2.2.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:062309c1b9ea12a50e8ce661145c6aab431b1e99530d3cd60640e255778bd43a"}, + {file = "pandas-2.2.3-cp312-cp312-win_amd64.whl", hash = "sha256:59ef3764d0fe818125a5097d2ae867ca3fa64df032331b7e0917cf5d7bf66b13"}, + {file = "pandas-2.2.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f00d1345d84d8c86a63e476bb4955e46458b304b9575dcf71102b5c705320015"}, + {file = "pandas-2.2.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3508d914817e153ad359d7e069d752cdd736a247c322d932eb89e6bc84217f28"}, + {file = "pandas-2.2.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:22a9d949bfc9a502d320aa04e5d02feab689d61da4e7764b62c30b991c42c5f0"}, + {file = "pandas-2.2.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3a255b2c19987fbbe62a9dfd6cff7ff2aa9ccab3fc75218fd4b7530f01efa24"}, + {file = "pandas-2.2.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:800250ecdadb6d9c78eae4990da62743b857b470883fa27f652db8bdde7f6659"}, + {file = "pandas-2.2.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6374c452ff3ec675a8f46fd9ab25c4ad0ba590b71cf0656f8b6daa5202bca3fb"}, + {file = "pandas-2.2.3-cp313-cp313-win_amd64.whl", hash = "sha256:61c5ad4043f791b61dd4752191d9f07f0ae412515d59ba8f005832a532f8736d"}, + {file = "pandas-2.2.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:3b71f27954685ee685317063bf13c7709a7ba74fc996b84fc6821c59b0f06468"}, + {file = "pandas-2.2.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:38cf8125c40dae9d5acc10fa66af8ea6fdf760b2714ee482ca691fc66e6fcb18"}, + {file = "pandas-2.2.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ba96630bc17c875161df3818780af30e43be9b166ce51c9a18c1feae342906c2"}, + {file = "pandas-2.2.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1db71525a1538b30142094edb9adc10be3f3e176748cd7acc2240c2f2e5aa3a4"}, + {file = "pandas-2.2.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:15c0e1e02e93116177d29ff83e8b1619c93ddc9c49083f237d4312337a61165d"}, + {file = "pandas-2.2.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:ad5b65698ab28ed8d7f18790a0dc58005c7629f227be9ecc1072aa74c0c1d43a"}, + {file = "pandas-2.2.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bc6b93f9b966093cb0fd62ff1a7e4c09e6d546ad7c1de191767baffc57628f39"}, + {file = "pandas-2.2.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5dbca4c1acd72e8eeef4753eeca07de9b1db4f398669d5994086f788a5d7cc30"}, + {file = "pandas-2.2.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8cd6d7cc958a3910f934ea8dbdf17b2364827bb4dafc38ce6eef6bb3d65ff09c"}, + {file = "pandas-2.2.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99df71520d25fade9db7c1076ac94eb994f4d2673ef2aa2e86ee039b6746d20c"}, + {file = "pandas-2.2.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:31d0ced62d4ea3e231a9f228366919a5ea0b07440d9d4dac345376fd8e1477ea"}, + {file = "pandas-2.2.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7eee9e7cea6adf3e3d24e304ac6b8300646e2a5d1cd3a3c2abed9101b0846761"}, + {file = "pandas-2.2.3-cp39-cp39-win_amd64.whl", hash = "sha256:4850ba03528b6dd51d6c5d273c46f183f39a9baf3f0143e566b89450965b105e"}, + {file = "pandas-2.2.3.tar.gz", hash = "sha256:4f18ba62b61d7e192368b84517265a99b4d7ee8912f8708660fb4a366cc82667"}, +] + +[package.dependencies] +numpy = [ + {version = ">=1.22.4", markers = "python_version < \"3.11\""}, + {version = ">=1.23.2", markers = "python_version == \"3.11\""}, + {version = ">=1.26.0", markers = "python_version >= \"3.12\""}, +] +python-dateutil = ">=2.8.2" +pytz = ">=2020.1" +tzdata = ">=2022.7" + +[package.extras] +all = ["PyQt5 (>=5.15.9)", "SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)", "beautifulsoup4 (>=4.11.2)", "bottleneck (>=1.3.6)", "dataframe-api-compat (>=0.1.7)", "fastparquet (>=2022.12.0)", "fsspec (>=2022.11.0)", "gcsfs (>=2022.11.0)", "html5lib (>=1.1)", "hypothesis (>=6.46.1)", "jinja2 (>=3.1.2)", "lxml (>=4.9.2)", "matplotlib (>=3.6.3)", "numba (>=0.56.4)", "numexpr (>=2.8.4)", "odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "pandas-gbq (>=0.19.0)", "psycopg2 (>=2.9.6)", "pyarrow (>=10.0.1)", "pymysql (>=1.0.2)", "pyreadstat (>=1.2.0)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "qtpy (>=2.3.0)", "s3fs (>=2022.11.0)", "scipy (>=1.10.0)", "tables (>=3.8.0)", "tabulate (>=0.9.0)", "xarray (>=2022.12.0)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)", "zstandard (>=0.19.0)"] +aws = ["s3fs (>=2022.11.0)"] +clipboard = ["PyQt5 (>=5.15.9)", "qtpy (>=2.3.0)"] +compression = ["zstandard (>=0.19.0)"] +computation = ["scipy (>=1.10.0)", "xarray (>=2022.12.0)"] +consortium-standard = ["dataframe-api-compat (>=0.1.7)"] +excel = ["odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)"] +feather = ["pyarrow (>=10.0.1)"] +fss = ["fsspec (>=2022.11.0)"] +gcp = ["gcsfs (>=2022.11.0)", "pandas-gbq (>=0.19.0)"] +hdf5 = ["tables (>=3.8.0)"] +html = ["beautifulsoup4 (>=4.11.2)", "html5lib (>=1.1)", "lxml (>=4.9.2)"] +mysql = ["SQLAlchemy (>=2.0.0)", "pymysql (>=1.0.2)"] +output-formatting = ["jinja2 (>=3.1.2)", "tabulate (>=0.9.0)"] +parquet = ["pyarrow (>=10.0.1)"] +performance = ["bottleneck (>=1.3.6)", "numba (>=0.56.4)", "numexpr (>=2.8.4)"] +plot = ["matplotlib (>=3.6.3)"] +postgresql = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "psycopg2 (>=2.9.6)"] +pyarrow = ["pyarrow (>=10.0.1)"] +spss = ["pyreadstat (>=1.2.0)"] +sql-other = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)"] +test = ["hypothesis (>=6.46.1)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)"] +xml = ["lxml (>=4.9.2)"] + [[package]] name = "pathspec" version = "0.12.1" @@ -964,13 +1217,13 @@ tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] [[package]] name = "pymdown-extensions" -version = "10.11.2" +version = "10.12" description = "Extension pack for Python Markdown." optional = false python-versions = ">=3.8" files = [ - {file = "pymdown_extensions-10.11.2-py3-none-any.whl", hash = "sha256:41cdde0a77290e480cf53892f5c5e50921a7ee3e5cd60ba91bf19837b33badcf"}, - {file = "pymdown_extensions-10.11.2.tar.gz", hash = "sha256:bc8847ecc9e784a098efd35e20cba772bc5a1b529dfcef9dc1972db9021a1049"}, + {file = "pymdown_extensions-10.12-py3-none-any.whl", hash = "sha256:49f81412242d3527b8b4967b990df395c89563043bc51a3d2d7d500e52123b77"}, + {file = "pymdown_extensions-10.12.tar.gz", hash = "sha256:b0ee1e0b2bef1071a47891ab17003bfe5bf824a398e13f49f8ed653b699369a7"}, ] [package.dependencies] @@ -982,13 +1235,13 @@ extra = ["pygments (>=2.12)"] [[package]] name = "pytest" -version = "8.3.3" +version = "8.3.4" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.8" files = [ - {file = "pytest-8.3.3-py3-none-any.whl", hash = "sha256:a6853c7375b2663155079443d2e45de913a911a11d669df02a50814944db57b2"}, - {file = "pytest-8.3.3.tar.gz", hash = "sha256:70b98107bd648308a7952b06e6ca9a50bc660be218d53c257cc1fc94fda10181"}, + {file = "pytest-8.3.4-py3-none-any.whl", hash = "sha256:50e16d954148559c9a74109af1eaf0c945ba2d8f30f0a3d3335edde19788b6f6"}, + {file = "pytest-8.3.4.tar.gz", hash = "sha256:965370d062bce11e73868e0335abac31b4d3de0e82f4007408d242b4f8610761"}, ] [package.dependencies] @@ -1016,53 +1269,77 @@ files = [ [package.dependencies] six = ">=1.5" +[[package]] +name = "pytz" +version = "2024.2" +description = "World timezone definitions, modern and historical" +optional = false +python-versions = "*" +files = [ + {file = "pytz-2024.2-py2.py3-none-any.whl", hash = "sha256:31c7c1817eb7fae7ca4b8c7ee50c72f93aa2dd863de768e1ef4245d426aa0725"}, + {file = "pytz-2024.2.tar.gz", hash = "sha256:2aa355083c50a0f93fa581709deac0c9ad65cca8a9e9beac660adcbd493c798a"}, +] + [[package]] name = "pyyaml" -version = "6.0" +version = "6.0.2" description = "YAML parser and emitter for Python" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "PyYAML-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53"}, - {file = "PyYAML-6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c"}, - {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc"}, - {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b"}, - {file = "PyYAML-6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5"}, - {file = "PyYAML-6.0-cp310-cp310-win32.whl", hash = "sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513"}, - {file = "PyYAML-6.0-cp310-cp310-win_amd64.whl", hash = "sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a"}, - {file = "PyYAML-6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d4b0ba9512519522b118090257be113b9468d804b19d63c71dbcf4a48fa32358"}, - {file = "PyYAML-6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:81957921f441d50af23654aa6c5e5eaf9b06aba7f0a19c18a538dc7ef291c5a1"}, - {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:afa17f5bc4d1b10afd4466fd3a44dc0e245382deca5b3c353d8b757f9e3ecb8d"}, - {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dbad0e9d368bb989f4515da330b88a057617d16b6a8245084f1b05400f24609f"}, - {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:432557aa2c09802be39460360ddffd48156e30721f5e8d917f01d31694216782"}, - {file = "PyYAML-6.0-cp311-cp311-win32.whl", hash = "sha256:bfaef573a63ba8923503d27530362590ff4f576c626d86a9fed95822a8255fd7"}, - {file = "PyYAML-6.0-cp311-cp311-win_amd64.whl", hash = "sha256:01b45c0191e6d66c470b6cf1b9531a771a83c1c4208272ead47a3ae4f2f603bf"}, - {file = "PyYAML-6.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86"}, - {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f"}, - {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92"}, - {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4"}, - {file = "PyYAML-6.0-cp36-cp36m-win32.whl", hash = "sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293"}, - {file = "PyYAML-6.0-cp36-cp36m-win_amd64.whl", hash = "sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57"}, - {file = "PyYAML-6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c"}, - {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0"}, - {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0ce82d761c532fe4ec3f87fc45688bdd3a4c1dc5e0b4a19814b9009a29baefd4"}, - {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:231710d57adfd809ef5d34183b8ed1eeae3f76459c18fb4a0b373ad56bedcdd9"}, - {file = "PyYAML-6.0-cp37-cp37m-win32.whl", hash = "sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737"}, - {file = "PyYAML-6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d"}, - {file = "PyYAML-6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0b4624f379dab24d3725ffde76559cff63d9ec94e1736b556dacdfebe5ab6d4b"}, - {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:213c60cd50106436cc818accf5baa1aba61c0189ff610f64f4a3e8c6726218ba"}, - {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34"}, - {file = "PyYAML-6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287"}, - {file = "PyYAML-6.0-cp38-cp38-win32.whl", hash = "sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78"}, - {file = "PyYAML-6.0-cp38-cp38-win_amd64.whl", hash = "sha256:1e4747bc279b4f613a09eb64bba2ba602d8a6664c6ce6396a4d0cd413a50ce07"}, - {file = "PyYAML-6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b"}, - {file = "PyYAML-6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174"}, - {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803"}, - {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3"}, - {file = "PyYAML-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0"}, - {file = "PyYAML-6.0-cp39-cp39-win32.whl", hash = "sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb"}, - {file = "PyYAML-6.0-cp39-cp39-win_amd64.whl", hash = "sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c"}, - {file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"}, + {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, + {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"}, + {file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"}, + {file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"}, + {file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"}, + {file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"}, + {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"}, + {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"}, + {file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"}, + {file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"}, + {file = "PyYAML-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083"}, + {file = "PyYAML-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706"}, + {file = "PyYAML-6.0.2-cp38-cp38-win32.whl", hash = "sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a"}, + {file = "PyYAML-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"}, + {file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"}, + {file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"}, + {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, ] [[package]] @@ -1081,123 +1358,123 @@ pyyaml = "*" [[package]] name = "regex" -version = "2024.9.11" +version = "2024.11.6" description = "Alternative regular expression module, to replace re." optional = false python-versions = ">=3.8" files = [ - {file = "regex-2024.9.11-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:1494fa8725c285a81d01dc8c06b55287a1ee5e0e382d8413adc0a9197aac6408"}, - {file = "regex-2024.9.11-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0e12c481ad92d129c78f13a2a3662317e46ee7ef96c94fd332e1c29131875b7d"}, - {file = "regex-2024.9.11-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:16e13a7929791ac1216afde26f712802e3df7bf0360b32e4914dca3ab8baeea5"}, - {file = "regex-2024.9.11-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:46989629904bad940bbec2106528140a218b4a36bb3042d8406980be1941429c"}, - {file = "regex-2024.9.11-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a906ed5e47a0ce5f04b2c981af1c9acf9e8696066900bf03b9d7879a6f679fc8"}, - {file = "regex-2024.9.11-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e9a091b0550b3b0207784a7d6d0f1a00d1d1c8a11699c1a4d93db3fbefc3ad35"}, - {file = "regex-2024.9.11-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ddcd9a179c0a6fa8add279a4444015acddcd7f232a49071ae57fa6e278f1f71"}, - {file = "regex-2024.9.11-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6b41e1adc61fa347662b09398e31ad446afadff932a24807d3ceb955ed865cc8"}, - {file = "regex-2024.9.11-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ced479f601cd2f8ca1fd7b23925a7e0ad512a56d6e9476f79b8f381d9d37090a"}, - {file = "regex-2024.9.11-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:635a1d96665f84b292e401c3d62775851aedc31d4f8784117b3c68c4fcd4118d"}, - {file = "regex-2024.9.11-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:c0256beda696edcf7d97ef16b2a33a8e5a875affd6fa6567b54f7c577b30a137"}, - {file = "regex-2024.9.11-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:3ce4f1185db3fbde8ed8aa223fc9620f276c58de8b0d4f8cc86fd1360829edb6"}, - {file = "regex-2024.9.11-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:09d77559e80dcc9d24570da3745ab859a9cf91953062e4ab126ba9d5993688ca"}, - {file = "regex-2024.9.11-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:7a22ccefd4db3f12b526eccb129390942fe874a3a9fdbdd24cf55773a1faab1a"}, - {file = "regex-2024.9.11-cp310-cp310-win32.whl", hash = "sha256:f745ec09bc1b0bd15cfc73df6fa4f726dcc26bb16c23a03f9e3367d357eeedd0"}, - {file = "regex-2024.9.11-cp310-cp310-win_amd64.whl", hash = "sha256:01c2acb51f8a7d6494c8c5eafe3d8e06d76563d8a8a4643b37e9b2dd8a2ff623"}, - {file = "regex-2024.9.11-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2cce2449e5927a0bf084d346da6cd5eb016b2beca10d0013ab50e3c226ffc0df"}, - {file = "regex-2024.9.11-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3b37fa423beefa44919e009745ccbf353d8c981516e807995b2bd11c2c77d268"}, - {file = "regex-2024.9.11-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:64ce2799bd75039b480cc0360907c4fb2f50022f030bf9e7a8705b636e408fad"}, - {file = "regex-2024.9.11-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a4cc92bb6db56ab0c1cbd17294e14f5e9224f0cc6521167ef388332604e92679"}, - {file = "regex-2024.9.11-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d05ac6fa06959c4172eccd99a222e1fbf17b5670c4d596cb1e5cde99600674c4"}, - {file = "regex-2024.9.11-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:040562757795eeea356394a7fb13076ad4f99d3c62ab0f8bdfb21f99a1f85664"}, - {file = "regex-2024.9.11-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6113c008a7780792efc80f9dfe10ba0cd043cbf8dc9a76ef757850f51b4edc50"}, - {file = "regex-2024.9.11-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8e5fb5f77c8745a60105403a774fe2c1759b71d3e7b4ca237a5e67ad066c7199"}, - {file = "regex-2024.9.11-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:54d9ff35d4515debf14bc27f1e3b38bfc453eff3220f5bce159642fa762fe5d4"}, - {file = "regex-2024.9.11-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:df5cbb1fbc74a8305b6065d4ade43b993be03dbe0f8b30032cced0d7740994bd"}, - {file = "regex-2024.9.11-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:7fb89ee5d106e4a7a51bce305ac4efb981536301895f7bdcf93ec92ae0d91c7f"}, - {file = "regex-2024.9.11-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:a738b937d512b30bf75995c0159c0ddf9eec0775c9d72ac0202076c72f24aa96"}, - {file = "regex-2024.9.11-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e28f9faeb14b6f23ac55bfbbfd3643f5c7c18ede093977f1df249f73fd22c7b1"}, - {file = "regex-2024.9.11-cp311-cp311-win32.whl", hash = "sha256:18e707ce6c92d7282dfce370cd205098384b8ee21544e7cb29b8aab955b66fa9"}, - {file = "regex-2024.9.11-cp311-cp311-win_amd64.whl", hash = "sha256:313ea15e5ff2a8cbbad96ccef6be638393041b0a7863183c2d31e0c6116688cf"}, - {file = "regex-2024.9.11-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:b0d0a6c64fcc4ef9c69bd5b3b3626cc3776520a1637d8abaa62b9edc147a58f7"}, - {file = "regex-2024.9.11-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:49b0e06786ea663f933f3710a51e9385ce0cba0ea56b67107fd841a55d56a231"}, - {file = "regex-2024.9.11-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5b513b6997a0b2f10e4fd3a1313568e373926e8c252bd76c960f96fd039cd28d"}, - {file = "regex-2024.9.11-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ee439691d8c23e76f9802c42a95cfeebf9d47cf4ffd06f18489122dbb0a7ad64"}, - {file = "regex-2024.9.11-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a8f877c89719d759e52783f7fe6e1c67121076b87b40542966c02de5503ace42"}, - {file = "regex-2024.9.11-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:23b30c62d0f16827f2ae9f2bb87619bc4fba2044911e2e6c2eb1af0161cdb766"}, - {file = "regex-2024.9.11-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85ab7824093d8f10d44330fe1e6493f756f252d145323dd17ab6b48733ff6c0a"}, - {file = "regex-2024.9.11-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8dee5b4810a89447151999428fe096977346cf2f29f4d5e29609d2e19e0199c9"}, - {file = "regex-2024.9.11-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:98eeee2f2e63edae2181c886d7911ce502e1292794f4c5ee71e60e23e8d26b5d"}, - {file = "regex-2024.9.11-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:57fdd2e0b2694ce6fc2e5ccf189789c3e2962916fb38779d3e3521ff8fe7a822"}, - {file = "regex-2024.9.11-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:d552c78411f60b1fdaafd117a1fca2f02e562e309223b9d44b7de8be451ec5e0"}, - {file = "regex-2024.9.11-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:a0b2b80321c2ed3fcf0385ec9e51a12253c50f146fddb2abbb10f033fe3d049a"}, - {file = "regex-2024.9.11-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:18406efb2f5a0e57e3a5881cd9354c1512d3bb4f5c45d96d110a66114d84d23a"}, - {file = "regex-2024.9.11-cp312-cp312-win32.whl", hash = "sha256:e464b467f1588e2c42d26814231edecbcfe77f5ac414d92cbf4e7b55b2c2a776"}, - {file = "regex-2024.9.11-cp312-cp312-win_amd64.whl", hash = "sha256:9e8719792ca63c6b8340380352c24dcb8cd7ec49dae36e963742a275dfae6009"}, - {file = "regex-2024.9.11-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:c157bb447303070f256e084668b702073db99bbb61d44f85d811025fcf38f784"}, - {file = "regex-2024.9.11-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:4db21ece84dfeefc5d8a3863f101995de646c6cb0536952c321a2650aa202c36"}, - {file = "regex-2024.9.11-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:220e92a30b426daf23bb67a7962900ed4613589bab80382be09b48896d211e92"}, - {file = "regex-2024.9.11-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eb1ae19e64c14c7ec1995f40bd932448713d3c73509e82d8cd7744dc00e29e86"}, - {file = "regex-2024.9.11-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f47cd43a5bfa48f86925fe26fbdd0a488ff15b62468abb5d2a1e092a4fb10e85"}, - {file = "regex-2024.9.11-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9d4a76b96f398697fe01117093613166e6aa8195d63f1b4ec3f21ab637632963"}, - {file = "regex-2024.9.11-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ea51dcc0835eea2ea31d66456210a4e01a076d820e9039b04ae8d17ac11dee6"}, - {file = "regex-2024.9.11-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b7aaa315101c6567a9a45d2839322c51c8d6e81f67683d529512f5bcfb99c802"}, - {file = "regex-2024.9.11-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:c57d08ad67aba97af57a7263c2d9006d5c404d721c5f7542f077f109ec2a4a29"}, - {file = "regex-2024.9.11-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:f8404bf61298bb6f8224bb9176c1424548ee1181130818fcd2cbffddc768bed8"}, - {file = "regex-2024.9.11-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:dd4490a33eb909ef5078ab20f5f000087afa2a4daa27b4c072ccb3cb3050ad84"}, - {file = "regex-2024.9.11-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:eee9130eaad130649fd73e5cd92f60e55708952260ede70da64de420cdcad554"}, - {file = "regex-2024.9.11-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6a2644a93da36c784e546de579ec1806bfd2763ef47babc1b03d765fe560c9f8"}, - {file = "regex-2024.9.11-cp313-cp313-win32.whl", hash = "sha256:e997fd30430c57138adc06bba4c7c2968fb13d101e57dd5bb9355bf8ce3fa7e8"}, - {file = "regex-2024.9.11-cp313-cp313-win_amd64.whl", hash = "sha256:042c55879cfeb21a8adacc84ea347721d3d83a159da6acdf1116859e2427c43f"}, - {file = "regex-2024.9.11-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:35f4a6f96aa6cb3f2f7247027b07b15a374f0d5b912c0001418d1d55024d5cb4"}, - {file = "regex-2024.9.11-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:55b96e7ce3a69a8449a66984c268062fbaa0d8ae437b285428e12797baefce7e"}, - {file = "regex-2024.9.11-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cb130fccd1a37ed894824b8c046321540263013da72745d755f2d35114b81a60"}, - {file = "regex-2024.9.11-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:323c1f04be6b2968944d730e5c2091c8c89767903ecaa135203eec4565ed2b2b"}, - {file = "regex-2024.9.11-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:be1c8ed48c4c4065ecb19d882a0ce1afe0745dfad8ce48c49586b90a55f02366"}, - {file = "regex-2024.9.11-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b5b029322e6e7b94fff16cd120ab35a253236a5f99a79fb04fda7ae71ca20ae8"}, - {file = "regex-2024.9.11-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6fff13ef6b5f29221d6904aa816c34701462956aa72a77f1f151a8ec4f56aeb"}, - {file = "regex-2024.9.11-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:587d4af3979376652010e400accc30404e6c16b7df574048ab1f581af82065e4"}, - {file = "regex-2024.9.11-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:079400a8269544b955ffa9e31f186f01d96829110a3bf79dc338e9910f794fca"}, - {file = "regex-2024.9.11-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:f9268774428ec173654985ce55fc6caf4c6d11ade0f6f914d48ef4719eb05ebb"}, - {file = "regex-2024.9.11-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:23f9985c8784e544d53fc2930fc1ac1a7319f5d5332d228437acc9f418f2f168"}, - {file = "regex-2024.9.11-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:ae2941333154baff9838e88aa71c1d84f4438189ecc6021a12c7573728b5838e"}, - {file = "regex-2024.9.11-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:e93f1c331ca8e86fe877a48ad64e77882c0c4da0097f2212873a69bbfea95d0c"}, - {file = "regex-2024.9.11-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:846bc79ee753acf93aef4184c040d709940c9d001029ceb7b7a52747b80ed2dd"}, - {file = "regex-2024.9.11-cp38-cp38-win32.whl", hash = "sha256:c94bb0a9f1db10a1d16c00880bdebd5f9faf267273b8f5bd1878126e0fbde771"}, - {file = "regex-2024.9.11-cp38-cp38-win_amd64.whl", hash = "sha256:2b08fce89fbd45664d3df6ad93e554b6c16933ffa9d55cb7e01182baaf971508"}, - {file = "regex-2024.9.11-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:07f45f287469039ffc2c53caf6803cd506eb5f5f637f1d4acb37a738f71dd066"}, - {file = "regex-2024.9.11-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4838e24ee015101d9f901988001038f7f0d90dc0c3b115541a1365fb439add62"}, - {file = "regex-2024.9.11-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6edd623bae6a737f10ce853ea076f56f507fd7726bee96a41ee3d68d347e4d16"}, - {file = "regex-2024.9.11-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c69ada171c2d0e97a4b5aa78fbb835e0ffbb6b13fc5da968c09811346564f0d3"}, - {file = "regex-2024.9.11-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:02087ea0a03b4af1ed6ebab2c54d7118127fee8d71b26398e8e4b05b78963199"}, - {file = "regex-2024.9.11-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:69dee6a020693d12a3cf892aba4808fe168d2a4cef368eb9bf74f5398bfd4ee8"}, - {file = "regex-2024.9.11-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:297f54910247508e6e5cae669f2bc308985c60540a4edd1c77203ef19bfa63ca"}, - {file = "regex-2024.9.11-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ecea58b43a67b1b79805f1a0255730edaf5191ecef84dbc4cc85eb30bc8b63b9"}, - {file = "regex-2024.9.11-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:eab4bb380f15e189d1313195b062a6aa908f5bd687a0ceccd47c8211e9cf0d4a"}, - {file = "regex-2024.9.11-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:0cbff728659ce4bbf4c30b2a1be040faafaa9eca6ecde40aaff86f7889f4ab39"}, - {file = "regex-2024.9.11-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:54c4a097b8bc5bb0dfc83ae498061d53ad7b5762e00f4adaa23bee22b012e6ba"}, - {file = "regex-2024.9.11-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:73d6d2f64f4d894c96626a75578b0bf7d9e56dcda8c3d037a2118fdfe9b1c664"}, - {file = "regex-2024.9.11-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:e53b5fbab5d675aec9f0c501274c467c0f9a5d23696cfc94247e1fb56501ed89"}, - {file = "regex-2024.9.11-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:0ffbcf9221e04502fc35e54d1ce9567541979c3fdfb93d2c554f0ca583a19b35"}, - {file = "regex-2024.9.11-cp39-cp39-win32.whl", hash = "sha256:e4c22e1ac1f1ec1e09f72e6c44d8f2244173db7eb9629cc3a346a8d7ccc31142"}, - {file = "regex-2024.9.11-cp39-cp39-win_amd64.whl", hash = "sha256:faa3c142464efec496967359ca99696c896c591c56c53506bac1ad465f66e919"}, - {file = "regex-2024.9.11.tar.gz", hash = "sha256:6c188c307e8433bcb63dc1915022deb553b4203a70722fc542c363bf120a01fd"}, + {file = "regex-2024.11.6-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ff590880083d60acc0433f9c3f713c51f7ac6ebb9adf889c79a261ecf541aa91"}, + {file = "regex-2024.11.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:658f90550f38270639e83ce492f27d2c8d2cd63805c65a13a14d36ca126753f0"}, + {file = "regex-2024.11.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:164d8b7b3b4bcb2068b97428060b2a53be050085ef94eca7f240e7947f1b080e"}, + {file = "regex-2024.11.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3660c82f209655a06b587d55e723f0b813d3a7db2e32e5e7dc64ac2a9e86fde"}, + {file = "regex-2024.11.6-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d22326fcdef5e08c154280b71163ced384b428343ae16a5ab2b3354aed12436e"}, + {file = "regex-2024.11.6-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f1ac758ef6aebfc8943560194e9fd0fa18bcb34d89fd8bd2af18183afd8da3a2"}, + {file = "regex-2024.11.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:997d6a487ff00807ba810e0f8332c18b4eb8d29463cfb7c820dc4b6e7562d0cf"}, + {file = "regex-2024.11.6-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:02a02d2bb04fec86ad61f3ea7f49c015a0681bf76abb9857f945d26159d2968c"}, + {file = "regex-2024.11.6-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f02f93b92358ee3f78660e43b4b0091229260c5d5c408d17d60bf26b6c900e86"}, + {file = "regex-2024.11.6-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:06eb1be98df10e81ebaded73fcd51989dcf534e3c753466e4b60c4697a003b67"}, + {file = "regex-2024.11.6-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:040df6fe1a5504eb0f04f048e6d09cd7c7110fef851d7c567a6b6e09942feb7d"}, + {file = "regex-2024.11.6-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:fdabbfc59f2c6edba2a6622c647b716e34e8e3867e0ab975412c5c2f79b82da2"}, + {file = "regex-2024.11.6-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:8447d2d39b5abe381419319f942de20b7ecd60ce86f16a23b0698f22e1b70008"}, + {file = "regex-2024.11.6-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:da8f5fc57d1933de22a9e23eec290a0d8a5927a5370d24bda9a6abe50683fe62"}, + {file = "regex-2024.11.6-cp310-cp310-win32.whl", hash = "sha256:b489578720afb782f6ccf2840920f3a32e31ba28a4b162e13900c3e6bd3f930e"}, + {file = "regex-2024.11.6-cp310-cp310-win_amd64.whl", hash = "sha256:5071b2093e793357c9d8b2929dfc13ac5f0a6c650559503bb81189d0a3814519"}, + {file = "regex-2024.11.6-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:5478c6962ad548b54a591778e93cd7c456a7a29f8eca9c49e4f9a806dcc5d638"}, + {file = "regex-2024.11.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2c89a8cc122b25ce6945f0423dc1352cb9593c68abd19223eebbd4e56612c5b7"}, + {file = "regex-2024.11.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:94d87b689cdd831934fa3ce16cc15cd65748e6d689f5d2b8f4f4df2065c9fa20"}, + {file = "regex-2024.11.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1062b39a0a2b75a9c694f7a08e7183a80c63c0d62b301418ffd9c35f55aaa114"}, + {file = "regex-2024.11.6-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:167ed4852351d8a750da48712c3930b031f6efdaa0f22fa1933716bfcd6bf4a3"}, + {file = "regex-2024.11.6-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2d548dafee61f06ebdb584080621f3e0c23fff312f0de1afc776e2a2ba99a74f"}, + {file = "regex-2024.11.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2a19f302cd1ce5dd01a9099aaa19cae6173306d1302a43b627f62e21cf18ac0"}, + {file = "regex-2024.11.6-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bec9931dfb61ddd8ef2ebc05646293812cb6b16b60cf7c9511a832b6f1854b55"}, + {file = "regex-2024.11.6-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:9714398225f299aa85267fd222f7142fcb5c769e73d7733344efc46f2ef5cf89"}, + {file = "regex-2024.11.6-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:202eb32e89f60fc147a41e55cb086db2a3f8cb82f9a9a88440dcfc5d37faae8d"}, + {file = "regex-2024.11.6-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:4181b814e56078e9b00427ca358ec44333765f5ca1b45597ec7446d3a1ef6e34"}, + {file = "regex-2024.11.6-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:068376da5a7e4da51968ce4c122a7cd31afaaec4fccc7856c92f63876e57b51d"}, + {file = "regex-2024.11.6-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ac10f2c4184420d881a3475fb2c6f4d95d53a8d50209a2500723d831036f7c45"}, + {file = "regex-2024.11.6-cp311-cp311-win32.whl", hash = "sha256:c36f9b6f5f8649bb251a5f3f66564438977b7ef8386a52460ae77e6070d309d9"}, + {file = "regex-2024.11.6-cp311-cp311-win_amd64.whl", hash = "sha256:02e28184be537f0e75c1f9b2f8847dc51e08e6e171c6bde130b2687e0c33cf60"}, + {file = "regex-2024.11.6-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:52fb28f528778f184f870b7cf8f225f5eef0a8f6e3778529bdd40c7b3920796a"}, + {file = "regex-2024.11.6-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:fdd6028445d2460f33136c55eeb1f601ab06d74cb3347132e1c24250187500d9"}, + {file = "regex-2024.11.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:805e6b60c54bf766b251e94526ebad60b7de0c70f70a4e6210ee2891acb70bf2"}, + {file = "regex-2024.11.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b85c2530be953a890eaffde05485238f07029600e8f098cdf1848d414a8b45e4"}, + {file = "regex-2024.11.6-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bb26437975da7dc36b7efad18aa9dd4ea569d2357ae6b783bf1118dabd9ea577"}, + {file = "regex-2024.11.6-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:abfa5080c374a76a251ba60683242bc17eeb2c9818d0d30117b4486be10c59d3"}, + {file = "regex-2024.11.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b7fa6606c2881c1db9479b0eaa11ed5dfa11c8d60a474ff0e095099f39d98e"}, + {file = "regex-2024.11.6-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0c32f75920cf99fe6b6c539c399a4a128452eaf1af27f39bce8909c9a3fd8cbe"}, + {file = "regex-2024.11.6-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:982e6d21414e78e1f51cf595d7f321dcd14de1f2881c5dc6a6e23bbbbd68435e"}, + {file = "regex-2024.11.6-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a7c2155f790e2fb448faed6dd241386719802296ec588a8b9051c1f5c481bc29"}, + {file = "regex-2024.11.6-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:149f5008d286636e48cd0b1dd65018548944e495b0265b45e1bffecce1ef7f39"}, + {file = "regex-2024.11.6-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:e5364a4502efca094731680e80009632ad6624084aff9a23ce8c8c6820de3e51"}, + {file = "regex-2024.11.6-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:0a86e7eeca091c09e021db8eb72d54751e527fa47b8d5787caf96d9831bd02ad"}, + {file = "regex-2024.11.6-cp312-cp312-win32.whl", hash = "sha256:32f9a4c643baad4efa81d549c2aadefaeba12249b2adc5af541759237eee1c54"}, + {file = "regex-2024.11.6-cp312-cp312-win_amd64.whl", hash = "sha256:a93c194e2df18f7d264092dc8539b8ffb86b45b899ab976aa15d48214138e81b"}, + {file = "regex-2024.11.6-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:a6ba92c0bcdf96cbf43a12c717eae4bc98325ca3730f6b130ffa2e3c3c723d84"}, + {file = "regex-2024.11.6-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:525eab0b789891ac3be914d36893bdf972d483fe66551f79d3e27146191a37d4"}, + {file = "regex-2024.11.6-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:086a27a0b4ca227941700e0b31425e7a28ef1ae8e5e05a33826e17e47fbfdba0"}, + {file = "regex-2024.11.6-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bde01f35767c4a7899b7eb6e823b125a64de314a8ee9791367c9a34d56af18d0"}, + {file = "regex-2024.11.6-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b583904576650166b3d920d2bcce13971f6f9e9a396c673187f49811b2769dc7"}, + {file = "regex-2024.11.6-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1c4de13f06a0d54fa0d5ab1b7138bfa0d883220965a29616e3ea61b35d5f5fc7"}, + {file = "regex-2024.11.6-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3cde6e9f2580eb1665965ce9bf17ff4952f34f5b126beb509fee8f4e994f143c"}, + {file = "regex-2024.11.6-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0d7f453dca13f40a02b79636a339c5b62b670141e63efd511d3f8f73fba162b3"}, + {file = "regex-2024.11.6-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:59dfe1ed21aea057a65c6b586afd2a945de04fc7db3de0a6e3ed5397ad491b07"}, + {file = "regex-2024.11.6-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b97c1e0bd37c5cd7902e65f410779d39eeda155800b65fc4d04cc432efa9bc6e"}, + {file = "regex-2024.11.6-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:f9d1e379028e0fc2ae3654bac3cbbef81bf3fd571272a42d56c24007979bafb6"}, + {file = "regex-2024.11.6-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:13291b39131e2d002a7940fb176e120bec5145f3aeb7621be6534e46251912c4"}, + {file = "regex-2024.11.6-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4f51f88c126370dcec4908576c5a627220da6c09d0bff31cfa89f2523843316d"}, + {file = "regex-2024.11.6-cp313-cp313-win32.whl", hash = "sha256:63b13cfd72e9601125027202cad74995ab26921d8cd935c25f09c630436348ff"}, + {file = "regex-2024.11.6-cp313-cp313-win_amd64.whl", hash = "sha256:2b3361af3198667e99927da8b84c1b010752fa4b1115ee30beaa332cabc3ef1a"}, + {file = "regex-2024.11.6-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:3a51ccc315653ba012774efca4f23d1d2a8a8f278a6072e29c7147eee7da446b"}, + {file = "regex-2024.11.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ad182d02e40de7459b73155deb8996bbd8e96852267879396fb274e8700190e3"}, + {file = "regex-2024.11.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ba9b72e5643641b7d41fa1f6d5abda2c9a263ae835b917348fc3c928182ad467"}, + {file = "regex-2024.11.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40291b1b89ca6ad8d3f2b82782cc33807f1406cf68c8d440861da6304d8ffbbd"}, + {file = "regex-2024.11.6-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cdf58d0e516ee426a48f7b2c03a332a4114420716d55769ff7108c37a09951bf"}, + {file = "regex-2024.11.6-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a36fdf2af13c2b14738f6e973aba563623cb77d753bbbd8d414d18bfaa3105dd"}, + {file = "regex-2024.11.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d1cee317bfc014c2419a76bcc87f071405e3966da434e03e13beb45f8aced1a6"}, + {file = "regex-2024.11.6-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:50153825ee016b91549962f970d6a4442fa106832e14c918acd1c8e479916c4f"}, + {file = "regex-2024.11.6-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ea1bfda2f7162605f6e8178223576856b3d791109f15ea99a9f95c16a7636fb5"}, + {file = "regex-2024.11.6-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:df951c5f4a1b1910f1a99ff42c473ff60f8225baa1cdd3539fe2819d9543e9df"}, + {file = "regex-2024.11.6-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:072623554418a9911446278f16ecb398fb3b540147a7828c06e2011fa531e773"}, + {file = "regex-2024.11.6-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:f654882311409afb1d780b940234208a252322c24a93b442ca714d119e68086c"}, + {file = "regex-2024.11.6-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:89d75e7293d2b3e674db7d4d9b1bee7f8f3d1609428e293771d1a962617150cc"}, + {file = "regex-2024.11.6-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:f65557897fc977a44ab205ea871b690adaef6b9da6afda4790a2484b04293a5f"}, + {file = "regex-2024.11.6-cp38-cp38-win32.whl", hash = "sha256:6f44ec28b1f858c98d3036ad5d7d0bfc568bdd7a74f9c24e25f41ef1ebfd81a4"}, + {file = "regex-2024.11.6-cp38-cp38-win_amd64.whl", hash = "sha256:bb8f74f2f10dbf13a0be8de623ba4f9491faf58c24064f32b65679b021ed0001"}, + {file = "regex-2024.11.6-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5704e174f8ccab2026bd2f1ab6c510345ae8eac818b613d7d73e785f1310f839"}, + {file = "regex-2024.11.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:220902c3c5cc6af55d4fe19ead504de80eb91f786dc102fbd74894b1551f095e"}, + {file = "regex-2024.11.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5e7e351589da0850c125f1600a4c4ba3c722efefe16b297de54300f08d734fbf"}, + {file = "regex-2024.11.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5056b185ca113c88e18223183aa1a50e66507769c9640a6ff75859619d73957b"}, + {file = "regex-2024.11.6-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2e34b51b650b23ed3354b5a07aab37034d9f923db2a40519139af34f485f77d0"}, + {file = "regex-2024.11.6-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5670bce7b200273eee1840ef307bfa07cda90b38ae56e9a6ebcc9f50da9c469b"}, + {file = "regex-2024.11.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:08986dce1339bc932923e7d1232ce9881499a0e02925f7402fb7c982515419ef"}, + {file = "regex-2024.11.6-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:93c0b12d3d3bc25af4ebbf38f9ee780a487e8bf6954c115b9f015822d3bb8e48"}, + {file = "regex-2024.11.6-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:764e71f22ab3b305e7f4c21f1a97e1526a25ebdd22513e251cf376760213da13"}, + {file = "regex-2024.11.6-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:f056bf21105c2515c32372bbc057f43eb02aae2fda61052e2f7622c801f0b4e2"}, + {file = "regex-2024.11.6-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:69ab78f848845569401469da20df3e081e6b5a11cb086de3eed1d48f5ed57c95"}, + {file = "regex-2024.11.6-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:86fddba590aad9208e2fa8b43b4c098bb0ec74f15718bb6a704e3c63e2cef3e9"}, + {file = "regex-2024.11.6-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:684d7a212682996d21ca12ef3c17353c021fe9de6049e19ac8481ec35574a70f"}, + {file = "regex-2024.11.6-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:a03e02f48cd1abbd9f3b7e3586d97c8f7a9721c436f51a5245b3b9483044480b"}, + {file = "regex-2024.11.6-cp39-cp39-win32.whl", hash = "sha256:41758407fc32d5c3c5de163888068cfee69cb4c2be844e7ac517a52770f9af57"}, + {file = "regex-2024.11.6-cp39-cp39-win_amd64.whl", hash = "sha256:b2837718570f95dd41675328e111345f9b7095d821bac435aac173ac80b19983"}, + {file = "regex-2024.11.6.tar.gz", hash = "sha256:7ab159b063c52a0333c884e4679f8d7a85112ee3078fe3d9004b2dd875585519"}, ] [[package]] name = "requests" -version = "2.28.1" +version = "2.32.3" description = "Python HTTP for Humans." optional = false -python-versions = ">=3.7, <4" +python-versions = ">=3.8" files = [ - {file = "requests-2.28.1-py3-none-any.whl", hash = "sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349"}, - {file = "requests-2.28.1.tar.gz", hash = "sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983"}, + {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, + {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, ] [package.dependencies] certifi = ">=2017.4.17" -charset-normalizer = ">=2,<3" +charset-normalizer = ">=2,<4" idna = ">=2.5,<4" -urllib3 = ">=1.21.1,<1.27" +urllib3 = ">=1.21.1,<3" [package.extras] socks = ["PySocks (>=1.5.6,!=1.5.7)"] @@ -1223,13 +1500,13 @@ rsa = ["oauthlib[signedtoken] (>=3.0.0)"] [[package]] name = "rich" -version = "13.9.2" +version = "13.9.4" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" optional = false python-versions = ">=3.8.0" files = [ - {file = "rich-13.9.2-py3-none-any.whl", hash = "sha256:8c82a3d3f8dcfe9e734771313e606b39d8247bb6b826e196f4914b333b743cf1"}, - {file = "rich-13.9.2.tar.gz", hash = "sha256:51a2c62057461aaf7152b4d611168f93a9fc73068f8ded2790f29fe2b5366d0c"}, + {file = "rich-13.9.4-py3-none-any.whl", hash = "sha256:6049d5e6ec054bf2779ab3358186963bac2ea89175919d699e378b99738c2a90"}, + {file = "rich-13.9.4.tar.gz", hash = "sha256:439594978a49a09530cff7ebc4b5c7103ef57baf48d5ea3184f21d9a2befa098"}, ] [package.dependencies] @@ -1242,23 +1519,23 @@ jupyter = ["ipywidgets (>=7.5.1,<9)"] [[package]] name = "setuptools" -version = "75.1.0" +version = "75.6.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "setuptools-75.1.0-py3-none-any.whl", hash = "sha256:35ab7fd3bcd95e6b7fd704e4a1539513edad446c097797f2985e0e4b960772f2"}, - {file = "setuptools-75.1.0.tar.gz", hash = "sha256:d59a21b17a275fb872a9c3dae73963160ae079f1049ed956880cd7c09b120538"}, + {file = "setuptools-75.6.0-py3-none-any.whl", hash = "sha256:ce74b49e8f7110f9bf04883b730f4765b774ef3ef28f722cce7c273d253aaf7d"}, + {file = "setuptools-75.6.0.tar.gz", hash = "sha256:8199222558df7c86216af4f84c30e9b34a61d8ba19366cc914424cdbd28252f6"}, ] [package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.5.2)"] -core = ["importlib-metadata (>=6)", "importlib-resources (>=5.10.2)", "jaraco.collections", "jaraco.functools", "jaraco.text (>=3.7)", "more-itertools", "more-itertools (>=8.8)", "packaging", "packaging (>=24)", "platformdirs (>=2.6.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.7.0)"] +core = ["importlib_metadata (>=6)", "jaraco.collections", "jaraco.functools (>=4)", "jaraco.text (>=3.7)", "more_itertools", "more_itertools (>=8.8)", "packaging", "packaging (>=24.2)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] enabler = ["pytest-enabler (>=2.2)"] -test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] -type = ["importlib-metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (==1.11.*)", "pytest-mypy"] +test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test (>=5.5)", "packaging (>=24.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] +type = ["importlib_metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (>=1.12,<1.14)", "pytest-mypy"] [[package]] name = "shellingham" @@ -1273,41 +1550,85 @@ files = [ [[package]] name = "six" -version = "1.16.0" +version = "1.17.0" description = "Python 2 and 3 compatibility utilities" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"}, + {file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"}, +] + +[[package]] +name = "tabulate" +version = "0.9.0" +description = "Pretty-print tabular data" +optional = false +python-versions = ">=3.7" files = [ - {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, - {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, + {file = "tabulate-0.9.0-py3-none-any.whl", hash = "sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f"}, + {file = "tabulate-0.9.0.tar.gz", hash = "sha256:0095b12bf5966de529c0feb1fa08671671b3368eec77d7ef7ab114be2c068b3c"}, ] +[package.extras] +widechars = ["wcwidth"] + [[package]] name = "termynal" -version = "0.12.1" +version = "0.12.2" description = "A lightweight and modern animated terminal window" optional = false -python-versions = ">=3.8.1,<4.0.0" +python-versions = ">=3.9" files = [ - {file = "termynal-0.12.1-py3-none-any.whl", hash = "sha256:8420fc7ddc9daa5733c5aa370e7d2567a0e9299caa6e47fe8b5efd228fdffc47"}, - {file = "termynal-0.12.1.tar.gz", hash = "sha256:48842b5066eee25d5efef570b776bbc7e2875fca547c38c39e73fec1bb562fa9"}, + {file = "termynal-0.12.2-py3-none-any.whl", hash = "sha256:62314dac6e77f1b7b64a251c2c90702eb6e6910f72ea9c6e5ef68d715b272709"}, + {file = "termynal-0.12.2.tar.gz", hash = "sha256:cc2356bbf8650c16abd0558786251fabdde6b25e1125c1f091607ed3e422f7f2"}, ] [package.dependencies] -markdown = "*" +markdown = ">=3" [package.extras] -mkdocs = ["mkdocs (>=1.4,<2.0)"] +mkdocs = ["mkdocs (>=1.4)"] [[package]] name = "tomli" -version = "2.0.2" +version = "2.2.1" description = "A lil' TOML parser" optional = false python-versions = ">=3.8" files = [ - {file = "tomli-2.0.2-py3-none-any.whl", hash = "sha256:2ebe24485c53d303f690b0ec092806a085f07af5a5aa1464f3931eec36caaa38"}, - {file = "tomli-2.0.2.tar.gz", hash = "sha256:d46d457a85337051c36524bc5349dd91b1877838e2979ac5ced3e710ed8a60ed"}, + {file = "tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249"}, + {file = "tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6"}, + {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a"}, + {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee"}, + {file = "tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e"}, + {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4"}, + {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106"}, + {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8"}, + {file = "tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff"}, + {file = "tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b"}, + {file = "tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea"}, + {file = "tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8"}, + {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192"}, + {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222"}, + {file = "tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77"}, + {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6"}, + {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd"}, + {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e"}, + {file = "tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98"}, + {file = "tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4"}, + {file = "tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7"}, + {file = "tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c"}, + {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13"}, + {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281"}, + {file = "tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272"}, + {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140"}, + {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2"}, + {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744"}, + {file = "tomli-2.2.1-cp313-cp313-win32.whl", hash = "sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec"}, + {file = "tomli-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69"}, + {file = "tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc"}, + {file = "tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff"}, ] [[package]] @@ -1338,59 +1659,71 @@ files = [ {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, ] +[[package]] +name = "tzdata" +version = "2024.2" +description = "Provider of IANA time zone data" +optional = false +python-versions = ">=2" +files = [ + {file = "tzdata-2024.2-py2.py3-none-any.whl", hash = "sha256:a48093786cdcde33cad18c2555e8532f34422074448fbc874186f0abd79565cd"}, + {file = "tzdata-2024.2.tar.gz", hash = "sha256:7d85cc416e9382e69095b7bdf4afd9e3880418a2413feec7069d533d6b4e31cc"}, +] + [[package]] name = "urllib3" -version = "1.26.12" +version = "2.2.3" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, <4" +python-versions = ">=3.8" files = [ - {file = "urllib3-1.26.12-py2.py3-none-any.whl", hash = "sha256:b930dd878d5a8afb066a637fbb35144fe7901e3b209d1cd4f524bd0e9deee997"}, - {file = "urllib3-1.26.12.tar.gz", hash = "sha256:3fa96cf423e6987997fc326ae8df396db2a8b7c667747d47ddd8ecba91f4a74e"}, + {file = "urllib3-2.2.3-py3-none-any.whl", hash = "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac"}, + {file = "urllib3-2.2.3.tar.gz", hash = "sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9"}, ] [package.extras] -brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] -secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] -socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] [[package]] name = "watchdog" -version = "5.0.3" +version = "6.0.0" description = "Filesystem events monitoring" optional = false python-versions = ">=3.9" files = [ - {file = "watchdog-5.0.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:85527b882f3facda0579bce9d743ff7f10c3e1e0db0a0d0e28170a7d0e5ce2ea"}, - {file = "watchdog-5.0.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:53adf73dcdc0ef04f7735066b4a57a4cd3e49ef135daae41d77395f0b5b692cb"}, - {file = "watchdog-5.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e25adddab85f674acac303cf1f5835951345a56c5f7f582987d266679979c75b"}, - {file = "watchdog-5.0.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f01f4a3565a387080dc49bdd1fefe4ecc77f894991b88ef927edbfa45eb10818"}, - {file = "watchdog-5.0.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:91b522adc25614cdeaf91f7897800b82c13b4b8ac68a42ca959f992f6990c490"}, - {file = "watchdog-5.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d52db5beb5e476e6853da2e2d24dbbbed6797b449c8bf7ea118a4ee0d2c9040e"}, - {file = "watchdog-5.0.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:94d11b07c64f63f49876e0ab8042ae034674c8653bfcdaa8c4b32e71cfff87e8"}, - {file = "watchdog-5.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:349c9488e1d85d0a58e8cb14222d2c51cbc801ce11ac3936ab4c3af986536926"}, - {file = "watchdog-5.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:53a3f10b62c2d569e260f96e8d966463dec1a50fa4f1b22aec69e3f91025060e"}, - {file = "watchdog-5.0.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:950f531ec6e03696a2414b6308f5c6ff9dab7821a768c9d5788b1314e9a46ca7"}, - {file = "watchdog-5.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ae6deb336cba5d71476caa029ceb6e88047fc1dc74b62b7c4012639c0b563906"}, - {file = "watchdog-5.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1021223c08ba8d2d38d71ec1704496471ffd7be42cfb26b87cd5059323a389a1"}, - {file = "watchdog-5.0.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:752fb40efc7cc8d88ebc332b8f4bcbe2b5cc7e881bccfeb8e25054c00c994ee3"}, - {file = "watchdog-5.0.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a2e8f3f955d68471fa37b0e3add18500790d129cc7efe89971b8a4cc6fdeb0b2"}, - {file = "watchdog-5.0.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b8ca4d854adcf480bdfd80f46fdd6fb49f91dd020ae11c89b3a79e19454ec627"}, - {file = "watchdog-5.0.3-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:90a67d7857adb1d985aca232cc9905dd5bc4803ed85cfcdcfcf707e52049eda7"}, - {file = "watchdog-5.0.3-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:720ef9d3a4f9ca575a780af283c8fd3a0674b307651c1976714745090da5a9e8"}, - {file = "watchdog-5.0.3-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:223160bb359281bb8e31c8f1068bf71a6b16a8ad3d9524ca6f523ac666bb6a1e"}, - {file = "watchdog-5.0.3-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:560135542c91eaa74247a2e8430cf83c4342b29e8ad4f520ae14f0c8a19cfb5b"}, - {file = "watchdog-5.0.3-py3-none-manylinux2014_aarch64.whl", hash = "sha256:dd021efa85970bd4824acacbb922066159d0f9e546389a4743d56919b6758b91"}, - {file = "watchdog-5.0.3-py3-none-manylinux2014_armv7l.whl", hash = "sha256:78864cc8f23dbee55be34cc1494632a7ba30263951b5b2e8fc8286b95845f82c"}, - {file = "watchdog-5.0.3-py3-none-manylinux2014_i686.whl", hash = "sha256:1e9679245e3ea6498494b3028b90c7b25dbb2abe65c7d07423ecfc2d6218ff7c"}, - {file = "watchdog-5.0.3-py3-none-manylinux2014_ppc64.whl", hash = "sha256:9413384f26b5d050b6978e6fcd0c1e7f0539be7a4f1a885061473c5deaa57221"}, - {file = "watchdog-5.0.3-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:294b7a598974b8e2c6123d19ef15de9abcd282b0fbbdbc4d23dfa812959a9e05"}, - {file = "watchdog-5.0.3-py3-none-manylinux2014_s390x.whl", hash = "sha256:26dd201857d702bdf9d78c273cafcab5871dd29343748524695cecffa44a8d97"}, - {file = "watchdog-5.0.3-py3-none-manylinux2014_x86_64.whl", hash = "sha256:0f9332243355643d567697c3e3fa07330a1d1abf981611654a1f2bf2175612b7"}, - {file = "watchdog-5.0.3-py3-none-win32.whl", hash = "sha256:c66f80ee5b602a9c7ab66e3c9f36026590a0902db3aea414d59a2f55188c1f49"}, - {file = "watchdog-5.0.3-py3-none-win_amd64.whl", hash = "sha256:f00b4cf737f568be9665563347a910f8bdc76f88c2970121c86243c8cfdf90e9"}, - {file = "watchdog-5.0.3-py3-none-win_ia64.whl", hash = "sha256:49f4d36cb315c25ea0d946e018c01bb028048023b9e103d3d3943f58e109dd45"}, - {file = "watchdog-5.0.3.tar.gz", hash = "sha256:108f42a7f0345042a854d4d0ad0834b741d421330d5f575b81cb27b883500176"}, + {file = "watchdog-6.0.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d1cdb490583ebd691c012b3d6dae011000fe42edb7a82ece80965b42abd61f26"}, + {file = "watchdog-6.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bc64ab3bdb6a04d69d4023b29422170b74681784ffb9463ed4870cf2f3e66112"}, + {file = "watchdog-6.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c897ac1b55c5a1461e16dae288d22bb2e412ba9807df8397a635d88f671d36c3"}, + {file = "watchdog-6.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6eb11feb5a0d452ee41f824e271ca311a09e250441c262ca2fd7ebcf2461a06c"}, + {file = "watchdog-6.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ef810fbf7b781a5a593894e4f439773830bdecb885e6880d957d5b9382a960d2"}, + {file = "watchdog-6.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:afd0fe1b2270917c5e23c2a65ce50c2a4abb63daafb0d419fde368e272a76b7c"}, + {file = "watchdog-6.0.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:bdd4e6f14b8b18c334febb9c4425a878a2ac20efd1e0b231978e7b150f92a948"}, + {file = "watchdog-6.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:c7c15dda13c4eb00d6fb6fc508b3c0ed88b9d5d374056b239c4ad1611125c860"}, + {file = "watchdog-6.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6f10cb2d5902447c7d0da897e2c6768bca89174d0c6e1e30abec5421af97a5b0"}, + {file = "watchdog-6.0.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:490ab2ef84f11129844c23fb14ecf30ef3d8a6abafd3754a6f75ca1e6654136c"}, + {file = "watchdog-6.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:76aae96b00ae814b181bb25b1b98076d5fc84e8a53cd8885a318b42b6d3a5134"}, + {file = "watchdog-6.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a175f755fc2279e0b7312c0035d52e27211a5bc39719dd529625b1930917345b"}, + {file = "watchdog-6.0.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e6f0e77c9417e7cd62af82529b10563db3423625c5fce018430b249bf977f9e8"}, + {file = "watchdog-6.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:90c8e78f3b94014f7aaae121e6b909674df5b46ec24d6bebc45c44c56729af2a"}, + {file = "watchdog-6.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e7631a77ffb1f7d2eefa4445ebbee491c720a5661ddf6df3498ebecae5ed375c"}, + {file = "watchdog-6.0.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:c7ac31a19f4545dd92fc25d200694098f42c9a8e391bc00bdd362c5736dbf881"}, + {file = "watchdog-6.0.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:9513f27a1a582d9808cf21a07dae516f0fab1cf2d7683a742c498b93eedabb11"}, + {file = "watchdog-6.0.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7a0e56874cfbc4b9b05c60c8a1926fedf56324bb08cfbc188969777940aef3aa"}, + {file = "watchdog-6.0.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:e6439e374fc012255b4ec786ae3c4bc838cd7309a540e5fe0952d03687d8804e"}, + {file = "watchdog-6.0.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:7607498efa04a3542ae3e05e64da8202e58159aa1fa4acddf7678d34a35d4f13"}, + {file = "watchdog-6.0.0-py3-none-manylinux2014_armv7l.whl", hash = "sha256:9041567ee8953024c83343288ccc458fd0a2d811d6a0fd68c4c22609e3490379"}, + {file = "watchdog-6.0.0-py3-none-manylinux2014_i686.whl", hash = "sha256:82dc3e3143c7e38ec49d61af98d6558288c415eac98486a5c581726e0737c00e"}, + {file = "watchdog-6.0.0-py3-none-manylinux2014_ppc64.whl", hash = "sha256:212ac9b8bf1161dc91bd09c048048a95ca3a4c4f5e5d4a7d1b1a7d5752a7f96f"}, + {file = "watchdog-6.0.0-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:e3df4cbb9a450c6d49318f6d14f4bbc80d763fa587ba46ec86f99f9e6876bb26"}, + {file = "watchdog-6.0.0-py3-none-manylinux2014_s390x.whl", hash = "sha256:2cce7cfc2008eb51feb6aab51251fd79b85d9894e98ba847408f662b3395ca3c"}, + {file = "watchdog-6.0.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:20ffe5b202af80ab4266dcd3e91aae72bf2da48c0d33bdb15c66658e685e94e2"}, + {file = "watchdog-6.0.0-py3-none-win32.whl", hash = "sha256:07df1fdd701c5d4c8e55ef6cf55b8f0120fe1aef7ef39a1c6fc6bc2e606d517a"}, + {file = "watchdog-6.0.0-py3-none-win_amd64.whl", hash = "sha256:cbafb470cf848d93b5d013e2ecb245d4aa1c8fd0504e863ccefa32445359d680"}, + {file = "watchdog-6.0.0-py3-none-win_ia64.whl", hash = "sha256:a1914259fa9e1454315171103c6a30961236f508b9b623eae470268bbcc6a22f"}, + {file = "watchdog-6.0.0.tar.gz", hash = "sha256:9ddf7c82fda3ae8e24decda1338ede66e1c99883db93711d8fb941eaa2d8c282"}, ] [package.extras] @@ -1399,4 +1732,4 @@ watchmedo = ["PyYAML (>=3.10)"] [metadata] lock-version = "2.0" python-versions = "^3.10" -content-hash = "4395d5a7621483475d98da6d3d3022bd3379432f80c68b0cb8a78a8ac676a648" +content-hash = "3e857905978a0858db689969f078cde7aba1ba315af047bd8009731b1976b65e" diff --git a/pyproject.toml b/pyproject.toml index 7437759..f4fb595 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "scm-config-clone" -version = "0.1.1" +version = "0.2.0" description = "A command-line tool to clone configuration objects between Palo Alto Networks Strata Cloud Manager (SCM) tenants." authors = ["Calvin Remsburg "] license = "Apache 2.0" @@ -12,6 +12,8 @@ dynaconf = "^3.2.6" typer = "^0.12.5" setuptools = "^75.1.0" pan-scm-sdk = "^0.3.4" +tabulate = "^0.9.0" +pandas = "^2.2.3" [tool.poetry.group.dev.dependencies] black = "^24.10.0" diff --git a/scm_config_clone/__init__.py b/scm_config_clone/__init__.py index e69de29..aa8eb19 100644 --- a/scm_config_clone/__init__.py +++ b/scm_config_clone/__init__.py @@ -0,0 +1,5 @@ +# scm_config_clone/__init__.py + +from .commands.create_settings_file import create_settings +from .commands.objects.address import addresses +from .commands.objects.tag import tags diff --git a/scm_config_clone/commands/__init__.py b/scm_config_clone/commands/__init__.py deleted file mode 100644 index 90b98af..0000000 --- a/scm_config_clone/commands/__init__.py +++ /dev/null @@ -1,9 +0,0 @@ -# scm_config_clone/commands/__init__.py - -""" -Commands package for SCM Config Clone. - -This package contains the commands used in the SCM Config Clone CLI application. -""" - -from .create_secrets_file import create_secrets_file diff --git a/scm_config_clone/commands/create_secrets_file.py b/scm_config_clone/commands/create_secrets_file.py deleted file mode 100644 index 6cc3ecd..0000000 --- a/scm_config_clone/commands/create_secrets_file.py +++ /dev/null @@ -1,108 +0,0 @@ -# scm_config_clone/commands/create_secrets_file.py - -import typer -import logging -import yaml - -logger = logging.getLogger(__name__) - - -def create_secrets_file( - output_file: str = typer.Option( - ".secrets.yaml", - "--output-file", - "-o", - help="Path to the output settings file.", - ), -): - """ - Create an authentication file (.secrets.yaml) with SCM credentials. - - Prompts the user for source and destination SCM credentials and writes them to a YAML file. - - Args: - output_file (str): Path to the output settings YAML file. - - Error: - typer.Exit: Exits the application if an error occurs during file writing. - - Return: - None - """ - typer.echo("*" * 79 + f"\nCreating authentication file called {output_file} in the current directory\n") - - # Prompt user for credentials - typer.echo("-" * 79 + "\n\tEnter source SCM credentials (where are you cloning from?)\n" + "-" * 79) - source_client_id = typer.prompt( - default="example@1234567890.iam.panserviceaccount.com", - text="Source SCM Client ID\n", - show_default=True, - ) - source_client_secret = typer.prompt( - default="12345678-1234-1234-1234-123456789012", - hide_input=True, - show_default=True, - text="Source SCM Client Secret (input hidden)\n", - ) - source_tsg = typer.prompt( - default="1234567890", - show_default=True, - text="Source SCM Tenant TSG ID\n", - ) - source_folder = typer.prompt( - default="Prisma Access", - show_default=True, - text="Source Configuration Folder\n", - ) - - typer.echo("\n" + "-" * 79 + "\n\tEnter destination SCM credentials (where are you cloning to?)\n" + "-" * 79) - dest_client_id = typer.prompt( - default="example@0987654321.iam.panserviceaccount.com", - text="Destination SCM Client ID\n", - show_default=True, - ) - dest_client_secret = typer.prompt( - default="87654321-4321-4321-4321-120987654321", - hide_input=True, - show_default=True, - text="Destination SCM Client Secret (input hidden)\n", - ) - dest_tsg = typer.prompt( - default="0987654321", - show_default=True, - text="Destination SCM Tenant TSG ID\n", - ) - dest_folder = typer.prompt( - default="Prisma Access", - show_default=True, - text="Destination Configuration Folder\n", - ) - - # Build data dictionary - data = { - "oauth": { - "source": { - "client_id": source_client_id, - "client_secret": source_client_secret, - "tsg": source_tsg, - "folder": source_folder, - }, - "destination": { - "client_id": dest_client_id, - "client_secret": dest_client_secret, - "tsg": dest_tsg, - "folder": dest_folder, - }, - } - } - - # Write to YAML file - try: - with open(output_file, "w") as f: - yaml.dump(data, f) - except Exception as e: - logger.error(f"Error writing authentication file: {e}") - raise typer.Exit(code=1) - - typer.echo("\n" + "-" * 79 + f"\n\tAuthentication file created successfully `{output_file}`\n" + "-" * 79 + "\n") - typer.echo("*" * 79 ) diff --git a/scm_config_clone/commands/create_settings_file.py b/scm_config_clone/commands/create_settings_file.py new file mode 100644 index 0000000..c73ce0a --- /dev/null +++ b/scm_config_clone/commands/create_settings_file.py @@ -0,0 +1,200 @@ +# scm_config_clone/commands/create_settings_file.py + +# standard library +import logging + +# third party library +import pandas as pd +import typer +import yaml +from tabulate import tabulate + +logger = logging.getLogger(__name__) + + +def create_settings( + output_file: str = typer.Option( + "settings.yaml", + "--output-file", + "-o", + help="Path to the output YAML settings file where credentials and configuration will be stored.", + ), +): + """ + Interactively create a .settings.yaml file containing SCM credentials, logging configuration, + and additional runtime options. + + This Typer CLI command prompts the user for: + - Source and destination SCM credentials (Client ID, Client Secret, TSG ID) + - Logging level preference (DEBUG, INFO, etc.) + - Additional boolean options (auto_approve, create_report, dry_run, quiet) + + It then writes these values to a YAML file, enabling subsequent commands to load these + settings automatically. + + Workflow: + 1. Prompt user for source SCM credentials. + 2. Prompt user for destination SCM credentials. + 3. Prompt user for logging level. + 4. Prompt user for additional boolean options (auto_approve, create_report, dry_run, quiet). + 5. Write the collected configuration to the specified YAML settings file. + 6. Display a summary table with masked secrets. + + Args: + output_file (str): The file path where the generated .settings.yaml will be written. + Defaults to ".settings.yaml". + + Raises: + typer.Exit: If an error occurs while writing the YAML file, the command exits + with a non-zero code after logging the error. + + Example: + Running the command without arguments: + ``` + scm-clone create-secrets-file + ``` + This will prompt the user interactively and create `.settings.yaml` in the current directory. + """ + typer.echo("🚀 " + ("*" * 79)) + typer.echo(f"Creating settings file called {output_file} in the current directory") + typer.echo() + + # Prompt user for source SCM credentials + typer.echo("-" * 79) + typer.echo("🔑 Enter source SCM credentials (used as the configuration source)") + typer.echo("-" * 79) + source_client_id = typer.prompt( + default="example@1234567890.iam.panserviceaccount.com", + text="Source SCM Client ID\n", + show_default=True, + ) + source_client_secret = typer.prompt( + default="12345678-1234-1234-1234-123456789012", + hide_input=True, + show_default=True, + text="Source SCM Client Secret (input hidden)\n", + ) + source_tsg = typer.prompt( + default="1234567890", + show_default=True, + text="Source SCM Tenant TSG ID\n", + ) + + # Prompt user for destination SCM credentials + typer.echo() + typer.echo("-" * 79) + typer.echo("🔑 Enter destination SCM credentials (target of configuration cloning)") + typer.echo("-" * 79) + dest_client_id = typer.prompt( + default="example@0987654321.iam.panserviceaccount.com", + text="Destination SCM Client ID\n", + show_default=True, + ) + dest_client_secret = typer.prompt( + default="87654321-4321-4321-4321-120987654321", + hide_input=True, + show_default=True, + text="Destination SCM Client Secret (input hidden)\n", + ) + dest_tsg = typer.prompt( + default="0987654321", + show_default=True, + text="Destination SCM Tenant TSG ID\n", + ) + + # Prompt user for logging level + typer.echo() + typer.echo("-" * 79) + typer.echo( + "🪵 Specify desired logging level (DEBUG, INFO, WARNING, ERROR, CRITICAL)" + ) + typer.echo("-" * 79) + logging_level = typer.prompt( + default="INFO", + show_default=True, + text="Logging Level\n", + ) + + # Additional questions + typer.echo() + typer.echo("-" * 79) + typer.echo("⚙️ Additional Configuration Options:") + typer.echo("-" * 79) + + auto_approve = typer.confirm( + "Would you like to auto-approve changes without review?", + default=False, + ) + create_report = typer.confirm( + "Would you like to create a .csv file reporting the job?", + default=False, + ) + dry_run = typer.confirm( + "Would you like to perform a dry run (no changes applied)?", + default=False, + ) + quiet = typer.confirm( + "Would you like to hide all console output (except log messages)?", + default=False, + ) + + # Build data dictionary to write to YAML + data = { + "oauth": { + "source": { + "client_id": source_client_id, + "client_secret": source_client_secret, + "tsg": source_tsg, + }, + "destination": { + "client_id": dest_client_id, + "client_secret": dest_client_secret, + "tsg": dest_tsg, + }, + }, + "logging": logging_level, + "auto_approve": auto_approve, + "create_report": create_report, + "dry_run": dry_run, + "quiet": quiet, + } + + # Write data to the specified YAML file + try: + with open(output_file, "w") as f: + yaml.dump(data, f) + except Exception as e: + logger.error(f"Error writing settings file: {e}") + raise typer.Exit(code=1) + + # Mask client secrets for display + masked_source_secret = source_client_secret[:4] + "****" + masked_dest_secret = dest_client_secret[:4] + "****" + + # Prepare data for tabular display + display_data = { + "Source Client ID": source_client_id, + "Source Client Secret": masked_source_secret, + "Source TSG": source_tsg, + "Destination Client ID": dest_client_id, + "Destination Client Secret": masked_dest_secret, + "Destination TSG": dest_tsg, + "Logging Level": logging_level, + "Auto Approve": auto_approve, + "Create Report": create_report, + "Dry Run": dry_run, + "Quiet Mode": quiet, + } + + df = pd.DataFrame( + list(display_data.items()), columns=["Configuration Key", "Value"] + ) + + typer.echo() + typer.echo("-" * 79) + typer.echo( + "✅ Settings file created successfully and the following configuration was saved:\n" + ) + typer.echo(tabulate(df, headers="keys", tablefmt="fancy_grid", showindex=False)) + typer.echo("-" * 79) + typer.echo("🎉 Setup complete! 🎉") diff --git a/scm_config_clone/commands/objects/__init__.py b/scm_config_clone/commands/objects/__init__.py deleted file mode 100644 index 86a0a1c..0000000 --- a/scm_config_clone/commands/objects/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -# scm_config_clone/commands/objects/__init__.py - -from .address import clone_address_objects diff --git a/scm_config_clone/commands/objects/address.py b/scm_config_clone/commands/objects/address.py index c203b30..6b5fd69 100644 --- a/scm_config_clone/commands/objects/address.py +++ b/scm_config_clone/commands/objects/address.py @@ -1,9 +1,9 @@ # scm_config_clone/commands/objects/address.py -import typer import logging -from typing import List +from typing import List, Optional, Any, Dict +import typer from scm.client import Scm from scm.config.objects import Address from scm.exceptions import ( @@ -13,49 +13,219 @@ NameNotUniqueError, ObjectNotPresentError, ) +from scm.models.objects.address import AddressCreateModel, AddressResponseModel +from tabulate import tabulate + from scm_config_clone.utilities.settings import load_settings -logger = logging.getLogger(__name__) + +def parse_csv_option(value: Optional[str]) -> Optional[List[str]]: + """ + Parse a comma-separated string into a list of stripped strings. + + This utility function converts an option like "val1,val2,val3" + into ["val1", "val2", "val3"]. If the input string is None + or empty, it returns None. + + Args: + value: The raw input string from a CLI option, possibly + containing comma-separated values. + + Returns: + A list of strings if values are present, or None if the input + is empty or None. + """ + if not value: + return None + return [v.strip() for v in value.split(",") if v.strip()] + + +def build_create_params(src_obj: AddressResponseModel, folder: str) -> Dict[str, Any]: + """ + Construct the dictionary of parameters required to create a new address object. + + Given an existing AddressResponseModel (source object) and a target folder, + this function builds a dictionary with all necessary fields for creating + a new address in the destination tenant. It identifies the address type + (e.g., ip_netmask, fqdn) and uses `model_dump` on a Pydantic model to ensure + only valid, explicitly set fields are included. Fields that are unset or None + are automatically excluded. + + Args: + src_obj: The AddressResponseModel representing the source address object. + folder: The folder in the destination tenant where the object should be created. + + Returns: + A dictionary containing the fields required for `Address.create()`. + This dictionary is validated and pruned by AddressCreateModel. + + Raises: + ValueError: If the source object does not contain a valid address type. + """ + data = { + "name": src_obj.name, + "folder": folder, + "description": src_obj.description if src_obj.description is not None else None, + "tag": src_obj.tag if src_obj.tag else [], + } + + # Determine which address type is set + if src_obj.ip_netmask: + data["ip_netmask"] = src_obj.ip_netmask + elif src_obj.fqdn: + data["fqdn"] = src_obj.fqdn + elif src_obj.ip_range: + data["ip_range"] = src_obj.ip_range + elif src_obj.ip_wildcard: + data["ip_wildcard"] = src_obj.ip_wildcard + else: + raise ValueError(f"No valid address type found for {src_obj.name}") + + create_model = AddressCreateModel(**data) + return create_model.model_dump( + exclude_unset=True, + exclude_none=True, + ) -def clone_address_objects( +def addresses( + folder: Optional[str] = typer.Option( + None, + "--folder", + prompt="Please enter the folder name", + help="The folder to focus on when retrieving and cloning addresses.", + ), + exclude_folders: str = typer.Option( + None, + "--exclude-folders", + help="Comma-separated list of folders to exclude from the retrieval.", + ), + exclude_snippets: str = typer.Option( + None, + "--exclude-snippets", + help="Comma-separated list of snippets to exclude from the retrieval.", + ), + exclude_devices: str = typer.Option( + None, + "--exclude-devices", + help="Comma-separated list of devices to exclude from the retrieval.", + ), + commit_and_push: bool = typer.Option( + False, + "--commit-and-push", + help="If set, commit the changes on the destination tenant after object creation.", + is_flag=True, + ), + # Existing flag that already was present + auto_approve: bool = typer.Option( + None, + "--auto-approve", + "-A", + help="If set, skip the confirmation prompt and automatically proceed with creation.", + is_flag=True, + ), + # New flags introduced + create_report: bool = typer.Option( + None, + "--create-report", + "-R", + help="If set, create or append to a 'result.csv' file with the task results.", + is_flag=True, + ), + dry_run: bool = typer.Option( + None, + "--dry-run", + "-D", + help="If set, perform a dry run without applying any changes.", + is_flag=True, + ), + quiet_mode: bool = typer.Option( + None, + "--quiet-mode", + "-Q", + help="If set, hide all console output (except log messages).", + is_flag=True, + ), + logging_level: str = typer.Option( + None, + "--logging-level", + "-L", + help="Override the logging level (DEBUG, INFO, WARNING, ERROR, CRITICAL).", + ), settings_file: str = typer.Option( - ".secrets.yaml", + "settings.yaml", "--settings-file", "-s", - help="Path to the settings YAML file.", + help="Path to the YAML settings file containing tenant credentials and configuration.", ), ): """ - Clone address objects from the source to the destination SCM tenant using the pan-scm-sdk. + Clone address objects from a source SCM tenant to a destination SCM tenant. - Steps: - 1. Load SCM settings from a YAML file. - 2. Authenticate with both source and destination tenants using provided credentials. - 3. Retrieve all address objects from the source tenant. - 4. Create these address objects in the destination tenant. - 5. Commit the changes on the destination tenant. + This Typer CLI command automates the process of retrieving address objects + from a specified folder in a source tenant, optionally filters them out based + on user-defined exclusion criteria, and then creates them in a destination tenant. + + The workflow is: + 1. Load authentication and configuration settings (e.g., credentials, logging) from the YAML file. + 2. If any runtime flags are provided, they override the corresponding settings from the file. + 3. Authenticate to the source tenant and retrieve address objects from the given folder. + 4. Display the retrieved source objects. If not auto-approved, prompt the user before proceeding. + 5. Authenticate to the destination tenant and create the retrieved objects there. + 6. If `--commit-and-push` is provided and objects were created successfully, commit the changes. + 7. Display the results, including successfully created objects and any errors. Args: - settings_file (str): Path to the YAML settings file. + folder: The source folder from which to retrieve address objects. + exclude_folders: Comma-separated folder names to exclude from source retrieval. + exclude_snippets: Comma-separated snippet names to exclude from source retrieval. + exclude_devices: Comma-separated device names to exclude from source retrieval. + commit_and_push: If True, commit changes in the destination tenant after creation. + auto_approve: If True or set in settings, skip the confirmation prompt before creating objects. + create_report: If True or set in settings, create/append a CSV file with task results. + dry_run: If True or set in settings, perform a dry run without applying changes (logic TBD). + quiet_mode: If True or set in settings, hide console output except log messages (logic TBD). + logging_level: If provided, override the logging level from settings.yaml. + settings_file: Path to the YAML settings file for loading authentication and configuration. - Errors: - typer.Exit: Exits the CLI if authentication, retrieval, creation, or commit fails. + Raises: + typer.Exit: Exits if authentication fails, retrieval fails, or if the user opts not to proceed. """ - typer.echo("Starting address objects migration...") + typer.echo("🚀 Starting address objects cloning...") - # Load settings + # Load settings from file settings = load_settings(settings_file) - logger.info(f"Loaded settings: {settings}") - # --- Authenticate with source tenant --- + # Apply fallback logic: if a flag wasn't provided at runtime, use settings.yaml values + # If a flag is provided (not None), use the provided value; otherwise, use settings default. + auto_approve = settings["auto_approve"] if auto_approve is None else auto_approve + create_report = ( + settings["create_report"] if create_report is None else create_report + ) + dry_run = settings["dry_run"] if dry_run is None else dry_run + quiet_mode = settings["quiet"] if quiet_mode is None else quiet_mode + + # Logging level fallback + if logging_level is None: + logging_level = settings["logging"] + logging_level = logging_level.upper() + + logger = logging.getLogger(__name__) + logger.setLevel(getattr(logging, logging_level, logging.INFO)) + + # Parse CSV options + exclude_folders_list = parse_csv_option(exclude_folders) + exclude_snippets_list = parse_csv_option(exclude_snippets) + exclude_devices_list = parse_csv_option(exclude_devices) + + # Authenticate and retrieve from source try: source_creds = settings["source_scm"] source_client = Scm( client_id=source_creds["client_id"], client_secret=source_creds["client_secret"], tsg_id=source_creds["tenant"], - log_level="debug", + log_level=logging_level, ) logger.info(f"Authenticated with source SCM tenant: {source_creds['tenant']}") except (AuthenticationError, KeyError) as e: @@ -65,26 +235,79 @@ def clone_address_objects( logger.error(f"Unexpected error with source authentication: {e}") raise typer.Exit(code=1) - # --- Retrieve address objects from source --- + # Retrieve address objects from the source try: - source_addresses = Address(source_client) - source_folder = source_creds["folder"] - address_objects = source_addresses.list(folder=source_folder) + source_addresses = Address(source_client, max_limit=5000) + address_objects = source_addresses.list( + folder=folder, + exact_match=True, + exclude_folders=exclude_folders_list, + exclude_snippets=exclude_snippets_list, + exclude_devices=exclude_devices_list, + ) logger.info( - f"Retrieved {len(address_objects)} address objects from source tenant." + f"Retrieved {len(address_objects)} address objects from source tenant folder '{folder}'." ) except Exception as e: logger.error(f"Error retrieving address objects from source: {e}") raise typer.Exit(code=1) - # --- Authenticate with destination tenant --- + # If not quiet_mode, display retrieved objects + if address_objects and not quiet_mode: + addr_table = [] + for addr in address_objects: + if addr.ip_netmask: + addr_value = addr.ip_netmask + elif addr.fqdn: + addr_value = addr.fqdn + elif addr.ip_range: + addr_value = addr.ip_range + elif addr.ip_wildcard: + addr_value = addr.ip_wildcard + else: + addr_value = "Unknown Type" + + addr_table.append( + [ + addr.name, + addr.folder, + addr_value, + addr.description or "", + ] + ) + + typer.echo( + tabulate( + addr_table, + headers=[ + "Name", + "Folder", + "Value", + "Description", + ], + tablefmt="fancy_grid", + ) + ) + elif not address_objects: + typer.echo("No address objects found in the source folder.") + + # Prompt for confirmation if not auto-approved and objects found + if address_objects and not auto_approve: + proceed = typer.confirm( + "Do you want to proceed with creating these objects in the destination tenant?" + ) + if not proceed: + typer.echo("Aborting cloning operation.") + raise typer.Exit(code=0) + + # Authenticate with destination tenant try: dest_creds = settings["destination_scm"] destination_client = Scm( client_id=dest_creds["client_id"], client_secret=dest_creds["client_secret"], tsg_id=dest_creds["tenant"], - log_level="INFO", + log_level=logging_level, ) logger.info( f"Authenticated with destination SCM tenant: {dest_creds['tenant']}" @@ -96,40 +319,34 @@ def clone_address_objects( logger.error(f"Unexpected error with destination authentication: {e}") raise typer.Exit(code=1) - # --- Create address objects in the destination --- - destination_addresses = Address(destination_client) - destination_folder = dest_creds["folder"] + # Create address objects in destination + destination_addresses = Address( + destination_client, + max_limit=5000, + ) + created_objs: List[AddressResponseModel] = [] + error_objects: List[List[str]] = [] - created_objects: List[str] = [] for src_obj in address_objects: - # Build create parameters based on source object's attributes - create_params = { - "name": src_obj.name, - "folder": destination_folder, - "description": getattr(src_obj, "description", None), - "tag": getattr(src_obj, "tag", []), - } - - # Determine the address type and assign the appropriate field - if getattr(src_obj, "ip_netmask", None): - create_params["ip_netmask"] = src_obj.ip_netmask - elif getattr(src_obj, "fqdn", None): - create_params["fqdn"] = src_obj.fqdn - elif getattr(src_obj, "ip_range", None): - create_params["ip_range"] = src_obj.ip_range - elif getattr(src_obj, "ip_wildcard", None): - create_params["ip_wildcard"] = src_obj.ip_wildcard - else: - # If no recognizable address type is found, skip this object - logger.warning( - f"Skipping {src_obj.name}: No valid address type (ip_netmask, fqdn, ip_range, ip_wildcard)." + try: + create_params = build_create_params( + src_obj, + folder, + ) + except ValueError as ve: + error_objects.append( + [ + src_obj.name, + str(ve), + ] ) continue - # Create the address object + # If dry_run is True, we might skip actual creation in the future. + # For now, just proceed as normal until logic is implemented. try: new_obj = destination_addresses.create(create_params) - created_objects.append(new_obj.name) + created_objs.append(new_obj) logger.info(f"Created address object in destination: {new_obj.name}") except ( InvalidObjectError, @@ -137,22 +354,68 @@ def clone_address_objects( NameNotUniqueError, ObjectNotPresentError, ) as e: - logger.error( - f"Error creating address object {src_obj.name} in destination: {e}" - ) - # Decide whether to continue or exit; continuing for now + error_objects.append([src_obj.name, str(e)]) continue except Exception as e: - logger.error( - f"Unexpected error creating address object {src_obj.name} in destination: {e}" - ) + error_objects.append([src_obj.name, str(e)]) continue - # --- Commit changes on destination --- - if created_objects: + # If not quiet_mode, display results + if created_objs and not quiet_mode: + typer.echo("\nSuccessfully created the following address objects:") + created_table = [] + for obj in created_objs: + if obj.ip_netmask: + value = obj.ip_netmask + elif obj.fqdn: + value = obj.fqdn + elif obj.ip_range: + value = obj.ip_range + elif obj.ip_wildcard: + value = obj.ip_wildcard + else: + value = "Unknown Type" + + created_table.append( + [ + obj.name, + obj.folder, + value, + obj.description or "", + ] + ) + + typer.echo( + tabulate( + created_table, + headers=[ + "Name", + "Folder", + "Value", + "Description", + ], + tablefmt="fancy_grid", + ) + ) + + if error_objects and not quiet_mode: + typer.echo("\nSome address objects failed to be created:") + typer.echo( + tabulate( + error_objects, + headers=[ + "Object Name", + "Error", + ], + tablefmt="fancy_grid", + ) + ) + + # Commit changes if requested and objects were created + if commit_and_push and created_objs: try: commit_params = { - "folders": [destination_folder], + "folders": [folder], "description": "Cloned address objects", "sync": True, } @@ -165,6 +428,14 @@ def clone_address_objects( logger.error(f"Error committing address objects in destination: {e}") raise typer.Exit(code=1) else: - logger.info("No new address objects were created, skipping commit.") + if created_objs and not commit_and_push: + logger.info( + "Objects created, but --commit-and-push not specified, skipping commit." + ) + else: + logger.info("No new address objects were created, skipping commit.") + + # If create_report is True, in the future we will append results to 'result.csv' + # For now, logic can be implemented later. - typer.echo("Address objects migration completed successfully.") + typer.echo("🎉 Address objects cloning completed successfully! 🎉") diff --git a/scm_config_clone/commands/objects/tag.py b/scm_config_clone/commands/objects/tag.py new file mode 100644 index 0000000..7c8022e --- /dev/null +++ b/scm_config_clone/commands/objects/tag.py @@ -0,0 +1,334 @@ +# scm_config_clone/commands/objects/tag.py + +import logging +from typing import List, Optional, Any + +import typer +from scm.client import Scm +from scm.config.objects import Tag +from scm.exceptions import ( + AuthenticationError, + InvalidObjectError, + MissingQueryParameterError, + NameNotUniqueError, + ObjectNotPresentError, +) +from tabulate import tabulate + +from scm_config_clone.utilities.settings import load_settings + + +def parse_csv_option(value: Optional[str]) -> Optional[List[str]]: + """ + Parse a comma-separated string into a list of stripped strings. + + This utility function converts options like "val1,val2,val3" + into ["val1", "val2", "val3"]. If the input is None or empty, + returns None. + + Args: + value: The raw input string from a CLI option. + + Returns: + A list of strings if values are present, or None if empty/None. + """ + if not value: + return None + return [v.strip() for v in value.split(",") if v.strip()] + + +def tags( + folder: Optional[str] = typer.Option( + None, + "--folder", + prompt="Please enter the folder name", + help="The folder from which to clone tag objects.", + ), + exclude_folders: str = typer.Option( + None, + "--exclude-folders", + help="Comma-separated list of folders to exclude from retrieval.", + ), + exclude_snippets: str = typer.Option( + None, + "--exclude-snippets", + help="Comma-separated list of snippets to exclude from retrieval.", + ), + exclude_devices: str = typer.Option( + None, + "--exclude-devices", + help="Comma-separated list of devices to exclude from retrieval.", + ), + commit_and_push: bool = typer.Option( + False, + "--commit-and-push", + help="If set, commit the changes on the destination tenant after object creation.", + is_flag=True, + ), + auto_approve_flag: Optional[bool] = typer.Option( + None, + "--auto-approve", + "-A", + help="If set, skip the confirmation prompt and automatically proceed with creation.", + is_flag=True, + ), + create_report_flag: Optional[bool] = typer.Option( + None, + "--create-report", + "-R", + help="If set, create/append a 'result.csv' file with the task results.", + is_flag=True, + ), + dry_run_flag: Optional[bool] = typer.Option( + None, + "--dry-run", + "-D", + help="If set, perform a dry run without applying changes.", + is_flag=True, + ), + quiet_mode_flag: Optional[bool] = typer.Option( + None, + "--quiet-mode", + "-Q", + help="If set, hide all console output (except log messages).", + is_flag=True, + ), + logging_level_flag: Optional[str] = typer.Option( + None, + "--logging-level", + "-L", + help="Override the logging level (DEBUG, INFO, WARNING, ERROR, CRITICAL).", + ), + settings_file: str = typer.Option( + "settings.yaml", + "--settings-file", + "-s", + help="Path to the YAML settings file containing tenant credentials and configuration.", + ), +): + """ + Clone tag objects from a source SCM tenant to a destination SCM tenant. + + This Typer CLI command automates the process of retrieving tag objects + from a specified folder in a source tenant, applying optional filters, + and then creating them in a destination tenant. + + Workflow: + 1. Load configuration and credentials from a YAML settings file. + 2. Override any settings with runtime flags if provided. + 3. Authenticate to the source tenant and retrieve tag objects from the given folder. + 4. Display the retrieved tag objects (if not quiet_mode). If not auto-approved, prompt the user. + 5. Authenticate to the destination tenant and create the retrieved tag objects there. + 6. If `--commit-and-push` is set and objects were created successfully, commit the changes. + 7. Display results and handle optional features (auto_approve, create_report, dry_run, quiet_mode). + + Args: + folder: The folder from which to list and clone tag objects. + exclude_folders: Folders to exclude from source retrieval. + exclude_snippets: Snippets to exclude from source retrieval. + exclude_devices: Devices to exclude from source retrieval. + commit_and_push: If True, commit changes in the destination tenant after creation. + auto_approve_flag: Override auto-approve setting; if True, skip confirmation prompt. + create_report_flag: Override create_report setting; if True, record results in 'result.csv'. + dry_run_flag: Override dry_run setting; if True, simulate without making changes. + quiet_mode_flag: Override quiet_mode setting; if True, suppress console output (except logs). + logging_level_flag: Override logging level if provided. + settings_file: Path to the YAML settings file. + + Raises: + typer.Exit: If authentication or retrieval fails, or if user chooses not to proceed. + """ + typer.echo("🚀 Starting tag objects cloning...") + + # Load settings + settings = load_settings(settings_file) + + # Apply fallback logic for runtime flags vs settings + auto_approve = ( + settings["auto_approve"] if auto_approve_flag is None else auto_approve_flag + ) + create_report = ( + settings["create_report"] if create_report_flag is None else create_report_flag + ) + dry_run = settings["dry_run"] if dry_run_flag is None else dry_run_flag + quiet_mode = settings["quiet"] if quiet_mode_flag is None else quiet_mode_flag + + # Determine logging level + if logging_level_flag is None: + logging_level_flag = settings["logging"] + logging_level_flag = logging_level_flag.upper() + + logger = logging.getLogger(__name__) + logger.setLevel(getattr(logging, logging_level_flag, logging.INFO)) + + # Convert comma-separated strings to lists + exclude_folders_list = parse_csv_option(exclude_folders) + exclude_snippets_list = parse_csv_option(exclude_snippets) + exclude_devices_list = parse_csv_option(exclude_devices) + + # Authenticate with source tenant + try: + source_creds = settings["source_scm"] + source_client = Scm( + client_id=source_creds["client_id"], + client_secret=source_creds["client_secret"], + tsg_id=source_creds["tenant"], + log_level=logging_level_flag, + ) + logger.info(f"Authenticated with source SCM tenant: {source_creds['tenant']}") + except (AuthenticationError, KeyError) as e: + logger.error(f"Error authenticating with source tenant: {e}") + raise typer.Exit(code=1) + except Exception as e: + logger.error(f"Unexpected error with source authentication: {e}") + raise typer.Exit(code=1) + + # Retrieve tag objects from source + try: + source_tags = Tag(source_client, max_limit=5000) + tag_objects = source_tags.list( + folder=folder, + exact_match=True, + exclude_folders=exclude_folders_list, + exclude_snippets=exclude_snippets_list, + exclude_devices=exclude_devices_list, + ) + logger.info( + f"Retrieved {len(tag_objects)} tag objects from source tenant folder '{folder}'." + ) + except Exception as e: + logger.error(f"Error retrieving tag objects from source: {e}") + raise typer.Exit(code=1) + + # Display retrieved tags if not quiet_mode + if tag_objects and not quiet_mode: + tag_table = [ + [t.name, t.folder, t.snippet or "", t.device or ""] for t in tag_objects + ] + typer.echo( + tabulate( + tag_table, + headers=["Name", "Folder", "Snippet", "Device"], + tablefmt="fancy_grid", + ) + ) + elif not tag_objects: + typer.echo("No tag objects found in the source folder.") + + # Prompt if not auto-approved and objects exist + if tag_objects and not auto_approve: + proceed = typer.confirm( + "Do you want to proceed with creating these tag objects in the destination tenant?" + ) + if not proceed: + typer.echo("Aborting tag objects cloning operation.") + raise typer.Exit(code=0) + + # Authenticate with destination tenant + try: + dest_creds = settings["destination_scm"] + destination_client = Scm( + client_id=dest_creds["client_id"], + client_secret=dest_creds["client_secret"], + tsg_id=dest_creds["tenant"], + log_level=logging_level_flag, + ) + logger.info( + f"Authenticated with destination SCM tenant: {dest_creds['tenant']}" + ) + except (AuthenticationError, KeyError) as e: + logger.error(f"Error authenticating with destination tenant: {e}") + raise typer.Exit(code=1) + except Exception as e: + logger.error(f"Unexpected error with destination authentication: {e}") + raise typer.Exit(code=1) + + # Create tag objects in the destination + destination_tags = Tag(destination_client, max_limit=5000) + created_objs: List[Any] = [] + error_objects: List[List[str]] = [] + + # For each tag from source, create in destination + for src_obj in tag_objects: + create_params = { + "name": src_obj.name, + "folder": folder, + "description": getattr(src_obj, "description", None), + } + + # If dry_run: skip in future logic + try: + new_obj = destination_tags.create(create_params) + created_objs.append(new_obj) + logger.info(f"Created tag object in destination: {new_obj.name}") + except ( + InvalidObjectError, + MissingQueryParameterError, + NameNotUniqueError, + ObjectNotPresentError, + ) as e: + error_objects.append([src_obj.name, str(e)]) + continue + except Exception as e: + error_objects.append([src_obj.name, str(e)]) + continue + + # Display results if not quiet_mode + if created_objs and not quiet_mode: + typer.echo("\nSuccessfully created the following tag objects:") + # Assuming new_obj has attributes similar to retrieved tags (name, folder, snippet, device) + created_table = [ + [ + obj.name, + obj.folder, + obj.snippet if getattr(obj, "snippet", None) else "", + obj.device if getattr(obj, "device", None) else "", + ] + for obj in created_objs + ] + + typer.echo( + tabulate( + created_table, + headers=["Name", "Folder", "Snippet", "Device"], + tablefmt="fancy_grid", + ) + ) + + if error_objects and not quiet_mode: + typer.echo("\nSome tag objects failed to be created:") + typer.echo( + tabulate( + error_objects, + headers=["Object Name", "Error"], + tablefmt="fancy_grid", + ) + ) + + # Commit changes if requested + if commit_and_push and created_objs: + try: + commit_params = { + "folders": [folder], + "description": "Cloned tag objects", + "sync": True, + } + result = destination_tags.commit(**commit_params) + job_status = destination_tags.get_job_status(result.job_id) + logger.info( + f"Commit job ID {result.job_id} status: {job_status.data[0].status_str}" + ) + except Exception as e: + logger.error(f"Error committing tag objects in destination: {e}") + raise typer.Exit(code=1) + else: + if created_objs and not commit_and_push: + logger.info( + "Tag objects created, but --commit-and-push not specified, skipping commit." + ) + else: + logger.info("No new tag objects were created, skipping commit.") + + # If create_report is True, future logic will append results to 'result.csv' + + typer.echo("🎉 Tag objects cloning completed successfully! 🎉") diff --git a/scm_config_clone/commands/security_services/__init__.py b/scm_config_clone/commands/security_services/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/scm_config_clone/main.py b/scm_config_clone/main.py index eef66f2..858caf3 100644 --- a/scm_config_clone/main.py +++ b/scm_config_clone/main.py @@ -6,18 +6,23 @@ Provides commands to clone configuration objects between SCM tenants. Commands: -- `clone-address-objects`: Clone address objects. -- `create-secrets-file`: Create authentication file. +- `addresses`: Clone address objects. +- `settings`: Create settings file. +- `tags`: Clone tag objects from source to destination tenant, focusing on a specific folder. Usage: scm-clone [OPTIONS] """ -import typer import logging -from scm_config_clone.commands.create_secrets_file import create_secrets_file -from scm_config_clone.commands.objects import clone_address_objects +import typer + +from scm_config_clone import ( + addresses, + create_settings, + tags, +) # Initialize Typer app app = typer.Typer( @@ -29,16 +34,21 @@ logging.basicConfig(level=logging.INFO) logger = logging.getLogger(__name__) -# Register commands with clearer parameters +# Register commands with explicit names and help text app.command( - name="create-secrets-file", - help="Create a YAML file containing SCM authentication details.", -)(create_secrets_file) + name="settings", + help="Create a YAML file containing the settings of our SCM cloning job (required for authentication).", +)(create_settings) app.command( - name="clone-address-objects", + name="addresses", help="Clone address objects from the source SCM tenant to the destination SCM tenant.", -)(clone_address_objects) +)(addresses) + +app.command( + name="tags", + help="Clone tag objects from the source SCM tenant to the destination SCM tenant, filtered by the specified folder.", +)(tags) if __name__ == "__main__": app() diff --git a/scm_config_clone/utilities/__init__.py b/scm_config_clone/utilities/__init__.py deleted file mode 100644 index 9b719e8..0000000 --- a/scm_config_clone/utilities/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -# scm_config_clone/utilities/__init__.py - -from .settings import load_settings diff --git a/scm_config_clone/utilities/settings.py b/scm_config_clone/utilities/settings.py index 76ef111..2bbb6b4 100644 --- a/scm_config_clone/utilities/settings.py +++ b/scm_config_clone/utilities/settings.py @@ -1,45 +1,87 @@ -# scm_config_clone/config/settings.py +# scm_config_clone/utilities/settings.py -from dynaconf import Dynaconf -from typing import Dict import logging +from typing import Dict, Any + +import typer +import yaml logger = logging.getLogger(__name__) -def load_settings(settings_file: str) -> Dict[str, Dict[str, str]]: +def load_settings(settings_file: str) -> Dict[str, Any]: """ - Load settings from a YAML configuration file. + Load configuration settings from a YAML file. + + This function reads the provided YAML settings file (e.g., "settings.yaml") and + extracts the following information: + - Source and destination SCM configurations (client_id, client_secret, tenant). + - Logging level. + - Additional boolean options: auto_approve, create_report, dry_run, quiet. - Reads the settings file and extracts source and destination SCM authentication details. + The returned dictionary is structured as follows: + { + "source_scm": { + "client_id": str, + "client_secret": str, + "tenant": str + }, + "destination_scm": { + "client_id": str, + "client_secret": str, + "tenant": str + }, + "logging": str, + "auto_approve": bool, + "create_report": bool, + "dry_run": bool, + "quiet": bool + } Args: - settings_file (str): Path to the YAML settings file. + settings_file (str): Path to the YAML settings file containing SCM credentials + and configuration. - Error: - Exception: Raises an exception if an error occurs during loading. + Raises: + typer.Exit: If an error occurs during file reading or parsing, the function + logs the error and exits. - Return: - Dict[str, Dict[str, str]]: A dictionary containing source and destination SCM configurations. + Returns: + Dict[str, Any]: A dictionary containing all relevant configuration keys + required by the CLI. """ try: - settings = Dynaconf(settings_files=[settings_file]) - source_scm = { - "client_id": settings.oauth.source.client_id, - "client_secret": settings.oauth.source.client_secret, - "tenant": settings.oauth.source.tsg, - "folder": settings.oauth.source.folder, - } - destination_scm = { - "client_id": settings.oauth.destination.client_id, - "client_secret": settings.oauth.destination.client_secret, - "tenant": settings.oauth.destination.tsg, - "folder": settings.oauth.destination.folder, - } - return { - "source_scm": source_scm, - "destination_scm": destination_scm, + with open(settings_file, "r") as f: + data = yaml.safe_load(f) or {} + + # Safely retrieve nested keys with defaults + source = data.get("oauth", {}).get("source", {}) + destination = data.get("oauth", {}).get("destination", {}) + + # Construct the dictionary + config = { + "source_scm": { + "client_id": source.get("client_id"), + "client_secret": source.get("client_secret"), + "tenant": source.get("tsg"), + }, + "destination_scm": { + "client_id": destination.get("client_id"), + "client_secret": destination.get("client_secret"), + "tenant": destination.get("tsg"), + }, + "logging": data.get("logging", "INFO"), + "auto_approve": data.get("auto_approve", False), + "create_report": data.get("create_report", False), + "dry_run": data.get("dry_run", False), + "quiet": data.get("quiet", False), } + + return config + except Exception as e: logger.error(f"Error loading settings from {settings_file}: {e}") - raise + typer.echo( + f"❌ Error loading configuration. Please check that '{settings_file}' is accessible and properly formatted." + ) + raise typer.Exit(code=1) From e5233b4acd50c8523f6618dc215a35a7a314b463 Mon Sep 17 00:00:00 2001 From: Calvin Remsburg Date: Sun, 15 Dec 2024 06:36:04 -0600 Subject: [PATCH 06/18] Refactor `parse_csv_option` to a dedicated utilities module. Extracted `parse_csv_option` into a new `utilities/parse_csv.py` module for better reusability and modularity. Updated imports in affected files to reflect this change. This improves code organization and reduces duplication. --- scm_config_clone/commands/objects/address.py | 23 +------------------ scm_config_clone/commands/objects/tag.py | 21 +---------------- scm_config_clone/utilities/__init__.py | 4 ++++ scm_config_clone/utilities/parse_csv.py | 24 ++++++++++++++++++++ 4 files changed, 30 insertions(+), 42 deletions(-) create mode 100644 scm_config_clone/utilities/__init__.py create mode 100644 scm_config_clone/utilities/parse_csv.py diff --git a/scm_config_clone/commands/objects/address.py b/scm_config_clone/commands/objects/address.py index 6b5fd69..02b6e8c 100644 --- a/scm_config_clone/commands/objects/address.py +++ b/scm_config_clone/commands/objects/address.py @@ -16,28 +16,7 @@ from scm.models.objects.address import AddressCreateModel, AddressResponseModel from tabulate import tabulate -from scm_config_clone.utilities.settings import load_settings - - -def parse_csv_option(value: Optional[str]) -> Optional[List[str]]: - """ - Parse a comma-separated string into a list of stripped strings. - - This utility function converts an option like "val1,val2,val3" - into ["val1", "val2", "val3"]. If the input string is None - or empty, it returns None. - - Args: - value: The raw input string from a CLI option, possibly - containing comma-separated values. - - Returns: - A list of strings if values are present, or None if the input - is empty or None. - """ - if not value: - return None - return [v.strip() for v in value.split(",") if v.strip()] +from scm_config_clone.utilities import load_settings, parse_csv_option def build_create_params(src_obj: AddressResponseModel, folder: str) -> Dict[str, Any]: diff --git a/scm_config_clone/commands/objects/tag.py b/scm_config_clone/commands/objects/tag.py index 7c8022e..06e1e1d 100644 --- a/scm_config_clone/commands/objects/tag.py +++ b/scm_config_clone/commands/objects/tag.py @@ -15,26 +15,7 @@ ) from tabulate import tabulate -from scm_config_clone.utilities.settings import load_settings - - -def parse_csv_option(value: Optional[str]) -> Optional[List[str]]: - """ - Parse a comma-separated string into a list of stripped strings. - - This utility function converts options like "val1,val2,val3" - into ["val1", "val2", "val3"]. If the input is None or empty, - returns None. - - Args: - value: The raw input string from a CLI option. - - Returns: - A list of strings if values are present, or None if empty/None. - """ - if not value: - return None - return [v.strip() for v in value.split(",") if v.strip()] +from scm_config_clone.utilities import load_settings, parse_csv_option def tags( diff --git a/scm_config_clone/utilities/__init__.py b/scm_config_clone/utilities/__init__.py new file mode 100644 index 0000000..ccc297e --- /dev/null +++ b/scm_config_clone/utilities/__init__.py @@ -0,0 +1,4 @@ +# scm_config_clone/utilities/__init__.py + +from .parse_csv import parse_csv_option +from .settings import load_settings diff --git a/scm_config_clone/utilities/parse_csv.py b/scm_config_clone/utilities/parse_csv.py new file mode 100644 index 0000000..6dc4c60 --- /dev/null +++ b/scm_config_clone/utilities/parse_csv.py @@ -0,0 +1,24 @@ +# scm_config_clone/utilities/parse_csv.py + +from typing import Optional, List + + +def parse_csv_option(value: Optional[str]) -> Optional[List[str]]: + """ + Parse a comma-separated string into a list of stripped strings. + + This utility function converts an option like "val1,val2,val3" + into ["val1", "val2", "val3"]. If the input string is None + or empty, it returns None. + + Args: + value: The raw input string from a CLI option, possibly + containing comma-separated values. + + Returns: + A list of strings if values are present, or None if the input + is empty or None. + """ + if not value: + return None + return [v.strip() for v in value.split(",") if v.strip()] From 3720ff112c1e6ccd0e0cbe05a65214f0412994b3 Mon Sep 17 00:00:00 2001 From: Calvin Remsburg Date: Sun, 15 Dec 2024 08:07:05 -0600 Subject: [PATCH 07/18] Refactor CLI command and improve tag creation handling Simplified CLI flag names, updated Docstrings for clarity, and enhanced error handling in tag creation logic. Added a `build_create_params` helper for cleaner parameter construction and streamlined CSV parsing and settings overrides. --- scm_config_clone/commands/objects/tag.py | 175 +++++++++++++---------- 1 file changed, 103 insertions(+), 72 deletions(-) diff --git a/scm_config_clone/commands/objects/tag.py b/scm_config_clone/commands/objects/tag.py index 06e1e1d..772aaf8 100644 --- a/scm_config_clone/commands/objects/tag.py +++ b/scm_config_clone/commands/objects/tag.py @@ -1,7 +1,5 @@ -# scm_config_clone/commands/objects/tag.py - import logging -from typing import List, Optional, Any +from typing import List, Optional, Any, Dict import typer from scm.client import Scm @@ -13,32 +11,63 @@ NameNotUniqueError, ObjectNotPresentError, ) +from scm.models.objects.tag import TagCreateModel, TagResponseModel from tabulate import tabulate from scm_config_clone.utilities import load_settings, parse_csv_option +def build_create_params(src_obj: TagResponseModel, folder: str) -> Dict[str, Any]: + """ + Construct the dictionary of parameters required to create a new tag object. + + Given an existing TagResponseModel (source object) and a target folder, + this function builds a dictionary with all necessary fields for creating + a new tag in the destination tenant. It uses `model_dump` on a Pydantic model + to ensure only valid, explicitly set fields are included. + + Args: + src_obj: The TagResponseModel representing the source tag object. + folder: The folder in the destination tenant where the object should be created. + + Returns: + A dictionary containing the fields required for `Tag.create()`. + This dictionary is validated and pruned by TagCreateModel. + """ + data = { + "name": src_obj.name, + "folder": folder, + "comments": src_obj.comments if src_obj.comments is not None else None, + } + + create_model = TagCreateModel(**data) + return create_model.model_dump( + exclude_unset=True, + exclude_none=True, + ) + + def tags( folder: Optional[str] = typer.Option( None, "--folder", prompt="Please enter the folder name", - help="The folder from which to clone tag objects.", + help="The folder to focus on when retrieving and cloning tags.", ), exclude_folders: str = typer.Option( None, "--exclude-folders", - help="Comma-separated list of folders to exclude from retrieval.", + help="Comma-separated list of folders to exclude from the retrieval.", ), exclude_snippets: str = typer.Option( None, "--exclude-snippets", - help="Comma-separated list of snippets to exclude from retrieval.", + help="Comma-separated list of snippets to exclude from the retrieval.", ), exclude_devices: str = typer.Option( None, "--exclude-devices", - help="Comma-separated list of devices to exclude from retrieval.", + help="Comma-separated list of devices to exclude from the retrieval.", ), commit_and_push: bool = typer.Option( False, @@ -46,35 +75,35 @@ def tags( help="If set, commit the changes on the destination tenant after object creation.", is_flag=True, ), - auto_approve_flag: Optional[bool] = typer.Option( + auto_approve: bool = typer.Option( None, "--auto-approve", "-A", help="If set, skip the confirmation prompt and automatically proceed with creation.", is_flag=True, ), - create_report_flag: Optional[bool] = typer.Option( + create_report: bool = typer.Option( None, "--create-report", "-R", - help="If set, create/append a 'result.csv' file with the task results.", + help="If set, create or append to a 'result.csv' file with the task results.", is_flag=True, ), - dry_run_flag: Optional[bool] = typer.Option( + dry_run: bool = typer.Option( None, "--dry-run", "-D", - help="If set, perform a dry run without applying changes.", + help="If set, perform a dry run without applying any changes.", is_flag=True, ), - quiet_mode_flag: Optional[bool] = typer.Option( + quiet_mode: bool = typer.Option( None, "--quiet-mode", "-Q", help="If set, hide all console output (except log messages).", is_flag=True, ), - logging_level_flag: Optional[str] = typer.Option( + logging_level: str = typer.Option( None, "--logging-level", "-L", @@ -91,70 +120,68 @@ def tags( Clone tag objects from a source SCM tenant to a destination SCM tenant. This Typer CLI command automates the process of retrieving tag objects - from a specified folder in a source tenant, applying optional filters, - and then creating them in a destination tenant. + from a specified folder in a source tenant, optionally filters them out based + on user-defined exclusion criteria, and then creates them in a destination tenant. - Workflow: - 1. Load configuration and credentials from a YAML settings file. - 2. Override any settings with runtime flags if provided. + The workflow is: + 1. Load authentication and configuration settings (e.g., credentials, logging) from the YAML file. + 2. If any runtime flags are provided, they override the corresponding settings from the file. 3. Authenticate to the source tenant and retrieve tag objects from the given folder. - 4. Display the retrieved tag objects (if not quiet_mode). If not auto-approved, prompt the user. - 5. Authenticate to the destination tenant and create the retrieved tag objects there. - 6. If `--commit-and-push` is set and objects were created successfully, commit the changes. - 7. Display results and handle optional features (auto_approve, create_report, dry_run, quiet_mode). + 4. Display the retrieved source objects. If not auto-approved, prompt the user before proceeding. + 5. Authenticate to the destination tenant and create the retrieved objects there. + 6. If `--commit-and-push` is provided and objects were created successfully, commit the changes. + 7. Display the results, including successfully created objects and any errors. Args: - folder: The folder from which to list and clone tag objects. - exclude_folders: Folders to exclude from source retrieval. - exclude_snippets: Snippets to exclude from source retrieval. - exclude_devices: Devices to exclude from source retrieval. + folder: The source folder from which to retrieve tag objects. + exclude_folders: Comma-separated folder names to exclude from source retrieval. + exclude_snippets: Comma-separated snippet names to exclude from source retrieval. + exclude_devices: Comma-separated device names to exclude from source retrieval. commit_and_push: If True, commit changes in the destination tenant after creation. - auto_approve_flag: Override auto-approve setting; if True, skip confirmation prompt. - create_report_flag: Override create_report setting; if True, record results in 'result.csv'. - dry_run_flag: Override dry_run setting; if True, simulate without making changes. - quiet_mode_flag: Override quiet_mode setting; if True, suppress console output (except logs). - logging_level_flag: Override logging level if provided. - settings_file: Path to the YAML settings file. + auto_approve: If True or set in settings, skip the confirmation prompt before creating objects. + create_report: If True or set in settings, create/append a CSV file with task results. + dry_run: If True or set in settings, perform a dry run without applying changes (logic TBD). + quiet_mode: If True or set in settings, hide console output except log messages (logic TBD). + logging_level: If provided, override the logging level from settings.yaml. + settings_file: Path to the YAML settings file for loading authentication and configuration. Raises: - typer.Exit: If authentication or retrieval fails, or if user chooses not to proceed. + typer.Exit: Exits if authentication fails, retrieval fails, or if the user opts not to proceed. """ typer.echo("🚀 Starting tag objects cloning...") - # Load settings + # Load settings from file settings = load_settings(settings_file) - # Apply fallback logic for runtime flags vs settings - auto_approve = ( - settings["auto_approve"] if auto_approve_flag is None else auto_approve_flag - ) + # Apply fallback logic: if a flag wasn't provided at runtime, use settings.yaml values + auto_approve = settings["auto_approve"] if auto_approve is None else auto_approve create_report = ( - settings["create_report"] if create_report_flag is None else create_report_flag + settings["create_report"] if create_report is None else create_report ) - dry_run = settings["dry_run"] if dry_run_flag is None else dry_run_flag - quiet_mode = settings["quiet"] if quiet_mode_flag is None else quiet_mode_flag + dry_run = settings["dry_run"] if dry_run is None else dry_run + quiet_mode = settings["quiet"] if quiet_mode is None else quiet_mode - # Determine logging level - if logging_level_flag is None: - logging_level_flag = settings["logging"] - logging_level_flag = logging_level_flag.upper() + # Logging level fallback + if logging_level is None: + logging_level = settings["logging"] + logging_level = logging_level.upper() logger = logging.getLogger(__name__) - logger.setLevel(getattr(logging, logging_level_flag, logging.INFO)) + logger.setLevel(getattr(logging, logging_level, logging.INFO)) - # Convert comma-separated strings to lists + # Parse CSV options exclude_folders_list = parse_csv_option(exclude_folders) exclude_snippets_list = parse_csv_option(exclude_snippets) exclude_devices_list = parse_csv_option(exclude_devices) - # Authenticate with source tenant + # Authenticate and retrieve from source try: source_creds = settings["source_scm"] source_client = Scm( client_id=source_creds["client_id"], client_secret=source_creds["client_secret"], tsg_id=source_creds["tenant"], - log_level=logging_level_flag, + log_level=logging_level, ) logger.info(f"Authenticated with source SCM tenant: {source_creds['tenant']}") except (AuthenticationError, KeyError) as e: @@ -184,12 +211,19 @@ def tags( # Display retrieved tags if not quiet_mode if tag_objects and not quiet_mode: tag_table = [ - [t.name, t.folder, t.snippet or "", t.device or ""] for t in tag_objects + [ + t.name, + t.folder, + t.comments or "", + t.snippet or "", + t.device or "", + ] + for t in tag_objects ] typer.echo( tabulate( tag_table, - headers=["Name", "Folder", "Snippet", "Device"], + headers=["Name", "Folder", "Description", "Snippet", "Device"], tablefmt="fancy_grid", ) ) @@ -199,10 +233,10 @@ def tags( # Prompt if not auto-approved and objects exist if tag_objects and not auto_approve: proceed = typer.confirm( - "Do you want to proceed with creating these tag objects in the destination tenant?" + "Do you want to proceed with creating these objects in the destination tenant?" ) if not proceed: - typer.echo("Aborting tag objects cloning operation.") + typer.echo("Aborting cloning operation.") raise typer.Exit(code=0) # Authenticate with destination tenant @@ -212,7 +246,7 @@ def tags( client_id=dest_creds["client_id"], client_secret=dest_creds["client_secret"], tsg_id=dest_creds["tenant"], - log_level=logging_level_flag, + log_level=logging_level, ) logger.info( f"Authenticated with destination SCM tenant: {dest_creds['tenant']}" @@ -224,20 +258,19 @@ def tags( logger.error(f"Unexpected error with destination authentication: {e}") raise typer.Exit(code=1) - # Create tag objects in the destination + # Create tag objects in destination destination_tags = Tag(destination_client, max_limit=5000) - created_objs: List[Any] = [] + created_objs: List[TagResponseModel] = [] error_objects: List[List[str]] = [] - # For each tag from source, create in destination for src_obj in tag_objects: - create_params = { - "name": src_obj.name, - "folder": folder, - "description": getattr(src_obj, "description", None), - } + try: + create_params = build_create_params(src_obj, folder) + except ValueError as ve: + error_objects.append([src_obj.name, str(ve)]) + continue - # If dry_run: skip in future logic + # If dry_run is True, we might skip actual creation in the future. try: new_obj = destination_tags.create(create_params) created_objs.append(new_obj) @@ -257,13 +290,13 @@ def tags( # Display results if not quiet_mode if created_objs and not quiet_mode: typer.echo("\nSuccessfully created the following tag objects:") - # Assuming new_obj has attributes similar to retrieved tags (name, folder, snippet, device) created_table = [ [ obj.name, obj.folder, - obj.snippet if getattr(obj, "snippet", None) else "", - obj.device if getattr(obj, "device", None) else "", + obj.comments or "", + obj.snippet or "", + obj.device or "", ] for obj in created_objs ] @@ -271,7 +304,7 @@ def tags( typer.echo( tabulate( created_table, - headers=["Name", "Folder", "Snippet", "Device"], + headers=["Name", "Folder", "Description", "Snippet", "Device"], tablefmt="fancy_grid", ) ) @@ -286,7 +319,7 @@ def tags( ) ) - # Commit changes if requested + # Commit changes if requested and objects were created if commit_and_push and created_objs: try: commit_params = { @@ -305,11 +338,9 @@ def tags( else: if created_objs and not commit_and_push: logger.info( - "Tag objects created, but --commit-and-push not specified, skipping commit." + "Objects created, but --commit-and-push not specified, skipping commit." ) else: logger.info("No new tag objects were created, skipping commit.") - # If create_report is True, future logic will append results to 'result.csv' - typer.echo("🎉 Tag objects cloning completed successfully! 🎉") From 56d9b9f8d4e7caf238124bae82dc29971dddfe5f Mon Sep 17 00:00:00 2001 From: Calvin Remsburg Date: Sun, 15 Dec 2024 09:02:22 -0600 Subject: [PATCH 08/18] Refactor create_settings_file module location. Moved create_settings_file to the utilities directory for better organization. Updated imports accordingly to reflect the new location. Added address_groups to the module imports for improved object handling. --- scm_config_clone/__init__.py | 3 ++- .../commands/{ => utilities}/create_settings_file.py | 0 2 files changed, 2 insertions(+), 1 deletion(-) rename scm_config_clone/commands/{ => utilities}/create_settings_file.py (100%) diff --git a/scm_config_clone/__init__.py b/scm_config_clone/__init__.py index aa8eb19..7d3cde9 100644 --- a/scm_config_clone/__init__.py +++ b/scm_config_clone/__init__.py @@ -1,5 +1,6 @@ # scm_config_clone/__init__.py -from .commands.create_settings_file import create_settings from .commands.objects.address import addresses +from .commands.objects.address_groups import address_groups from .commands.objects.tag import tags +from .commands.utilities.create_settings_file import create_settings diff --git a/scm_config_clone/commands/create_settings_file.py b/scm_config_clone/commands/utilities/create_settings_file.py similarity index 100% rename from scm_config_clone/commands/create_settings_file.py rename to scm_config_clone/commands/utilities/create_settings_file.py From 0d652fa609296af4ab216cd32fd3b077515668ec Mon Sep 17 00:00:00 2001 From: Calvin Remsburg Date: Sun, 15 Dec 2024 09:02:34 -0600 Subject: [PATCH 09/18] Add address group cloning functionality to CLI Introduce a new command for cloning address groups from a source to a destination SCM tenant. This includes retrieving, filtering, and creating address group objects, with options for dry-run, auto-approve, and reporting. Updated `main.py` to register the new command in the CLI. --- .../commands/objects/address_groups.py | 388 ++++++++++++++++++ scm_config_clone/main.py | 30 +- 2 files changed, 414 insertions(+), 4 deletions(-) create mode 100644 scm_config_clone/commands/objects/address_groups.py diff --git a/scm_config_clone/commands/objects/address_groups.py b/scm_config_clone/commands/objects/address_groups.py new file mode 100644 index 0000000..4b88430 --- /dev/null +++ b/scm_config_clone/commands/objects/address_groups.py @@ -0,0 +1,388 @@ +# scm_config_clone/commands/objects/address_group.py + +import logging +from typing import List, Optional, Any, Dict + +import typer +from scm.client import Scm +from scm.config.objects import AddressGroup +from scm.exceptions import ( + AuthenticationError, + InvalidObjectError, + MissingQueryParameterError, + NameNotUniqueError, + ObjectNotPresentError, +) +from scm.models.objects.address_group import ( + AddressGroupCreateModel, + AddressGroupResponseModel, +) +from tabulate import tabulate + +from scm_config_clone.utilities import load_settings, parse_csv_option + + +def build_create_params( + src_obj: AddressGroupResponseModel, folder: str +) -> Dict[str, Any]: + """ + Construct the dictionary of parameters required to create a new address group object. + + Given an existing AddressGroupResponseModel (source object) and a target folder, + this function builds a dictionary with all necessary fields for creating + a new address group in the destination tenant. It identifies the group type + (static or dynamic) and uses `model_dump` on a Pydantic model to ensure + only valid, explicitly set fields are included. + + Args: + src_obj: The AddressGroupResponseModel representing the source address group object. + folder: The folder in the destination tenant where the object should be created. + + Returns: + A dictionary containing the fields required for `AddressGroup.create()`. + This dictionary is validated and pruned by AddressGroupCreateModel. + + Raises: + ValueError: If the source object does not contain a valid group type. + """ + data = { + "name": src_obj.name, + "folder": folder, + "description": src_obj.description if src_obj.description is not None else None, + "tag": src_obj.tag if src_obj.tag else [], + } + + # Determine which group type is set + if src_obj.static: + data["static"] = src_obj.static + elif src_obj.dynamic: + data["dynamic"] = {"filter": src_obj.dynamic.filter} + else: + raise ValueError(f"No valid group type found for {src_obj.name}") + + create_model = AddressGroupCreateModel(**data) + return create_model.model_dump( + exclude_unset=True, + exclude_none=True, + ) + + +def address_groups( + folder: Optional[str] = typer.Option( + None, + "--folder", + prompt="Please enter the folder name", + help="The folder to focus on when retrieving and cloning address groups.", + ), + exclude_folders: str = typer.Option( + None, + "--exclude-folders", + help="Comma-separated list of folders to exclude from the retrieval.", + ), + exclude_snippets: str = typer.Option( + None, + "--exclude-snippets", + help="Comma-separated list of snippets to exclude from the retrieval.", + ), + exclude_devices: str = typer.Option( + None, + "--exclude-devices", + help="Comma-separated list of devices to exclude from the retrieval.", + ), + commit_and_push: bool = typer.Option( + False, + "--commit-and-push", + help="If set, commit the changes on the destination tenant after object creation.", + is_flag=True, + ), + auto_approve: bool = typer.Option( + None, + "--auto-approve", + "-A", + help="If set, skip the confirmation prompt and automatically proceed with creation.", + is_flag=True, + ), + create_report: bool = typer.Option( + None, + "--create-report", + "-R", + help="If set, create or append to a 'result.csv' file with the task results.", + is_flag=True, + ), + dry_run: bool = typer.Option( + None, + "--dry-run", + "-D", + help="If set, perform a dry run without applying any changes.", + is_flag=True, + ), + quiet_mode: bool = typer.Option( + None, + "--quiet-mode", + "-Q", + help="If set, hide all console output (except log messages).", + is_flag=True, + ), + logging_level: str = typer.Option( + None, + "--logging-level", + "-L", + help="Override the logging level (DEBUG, INFO, WARNING, ERROR, CRITICAL).", + ), + settings_file: str = typer.Option( + "settings.yaml", + "--settings-file", + "-s", + help="Path to the YAML settings file containing tenant credentials and configuration.", + ), +): + """ + Clone address group objects from a source SCM tenant to a destination SCM tenant. + + This Typer CLI command automates the process of retrieving address group objects + from a specified folder in a source tenant, optionally filters them out based + on user-defined exclusion criteria, and then creates them in a destination tenant. + + The workflow is: + 1. Load authentication and configuration settings (e.g., credentials, logging) from the YAML file. + 2. If any runtime flags are provided, they override the corresponding settings from the file. + 3. Authenticate to the source tenant and retrieve address group objects from the given folder. + 4. Display the retrieved source objects. If not auto-approved, prompt the user before proceeding. + 5. Authenticate to the destination tenant and create the retrieved objects there. + 6. If `--commit-and-push` is provided and objects were created successfully, commit the changes. + 7. Display the results, including successfully created objects and any errors. + + Args: + folder: The source folder from which to retrieve address group objects. + exclude_folders: Comma-separated folder names to exclude from source retrieval. + exclude_snippets: Comma-separated snippet names to exclude from source retrieval. + exclude_devices: Comma-separated device names to exclude from source retrieval. + commit_and_push: If True, commit changes in the destination tenant after creation. + auto_approve: If True or set in settings, skip the confirmation prompt before creating objects. + create_report: If True or set in settings, create/append a CSV file with task results. + dry_run: If True or set in settings, perform a dry run without applying changes (logic TBD). + quiet_mode: If True or set in settings, hide console output except log messages (logic TBD). + logging_level: If provided, override the logging level from settings.yaml. + settings_file: Path to the YAML settings file for loading authentication and configuration. + + Raises: + typer.Exit: Exits if authentication fails, retrieval fails, or if the user opts not to proceed. + """ + typer.echo("🚀 Starting address group objects cloning...") + + # Load settings from file + settings = load_settings(settings_file) + + # Apply fallback logic: if a flag wasn't provided at runtime, use settings.yaml values + auto_approve = settings["auto_approve"] if auto_approve is None else auto_approve + create_report = ( + settings["create_report"] if create_report is None else create_report + ) + dry_run = settings["dry_run"] if dry_run is None else dry_run + quiet_mode = settings["quiet"] if quiet_mode is None else quiet_mode + + # Logging level fallback + if logging_level is None: + logging_level = settings["logging"] + logging_level = logging_level.upper() + + logger = logging.getLogger(__name__) + logger.setLevel(getattr(logging, logging_level, logging.INFO)) + + # Parse CSV options + exclude_folders_list = parse_csv_option(exclude_folders) + exclude_snippets_list = parse_csv_option(exclude_snippets) + exclude_devices_list = parse_csv_option(exclude_devices) + + # Authenticate and retrieve from source + try: + source_creds = settings["source_scm"] + source_client = Scm( + client_id=source_creds["client_id"], + client_secret=source_creds["client_secret"], + tsg_id=source_creds["tenant"], + log_level=logging_level, + ) + logger.info(f"Authenticated with source SCM tenant: {source_creds['tenant']}") + except (AuthenticationError, KeyError) as e: + logger.error(f"Error authenticating with source tenant: {e}") + raise typer.Exit(code=1) + except Exception as e: + logger.error(f"Unexpected error with source authentication: {e}") + raise typer.Exit(code=1) + + # Retrieve address group objects from source + try: + source_address_groups = AddressGroup(source_client, max_limit=5000) + address_group_objects = source_address_groups.list( + folder=folder, + exact_match=True, + exclude_folders=exclude_folders_list, + exclude_snippets=exclude_snippets_list, + exclude_devices=exclude_devices_list, + ) + logger.info( + f"Retrieved {len(address_group_objects)} address group objects from source tenant folder '{folder}'." + ) + except Exception as e: + logger.error(f"Error retrieving address group objects from source: {e}") + raise typer.Exit(code=1) + + # Display retrieved groups if not quiet_mode + if address_group_objects and not quiet_mode: + group_table = [] + for group in address_group_objects: + if group.static: + group_type = "Static" + group_value = ", ".join(group.static) + elif group.dynamic: + group_type = "Dynamic" + group_value = group.dynamic.filter + else: + group_type = "Unknown" + group_value = "N/A" + + group_table.append( + [ + group.name, + group.folder, + group_type, + group_value, + group.description or "", + ] + ) + + typer.echo( + tabulate( + group_table, + headers=["Name", "Folder", "Type", "Value", "Description"], + tablefmt="fancy_grid", + ) + ) + elif not address_group_objects: + typer.echo("No address group objects found in the source folder.") + + # Prompt if not auto-approved and objects exist + if address_group_objects and not auto_approve: + proceed = typer.confirm( + "Do you want to proceed with creating these objects in the destination tenant?" + ) + if not proceed: + typer.echo("Aborting cloning operation.") + raise typer.Exit(code=0) + + # Authenticate with destination tenant + try: + dest_creds = settings["destination_scm"] + destination_client = Scm( + client_id=dest_creds["client_id"], + client_secret=dest_creds["client_secret"], + tsg_id=dest_creds["tenant"], + log_level=logging_level, + ) + logger.info( + f"Authenticated with destination SCM tenant: {dest_creds['tenant']}" + ) + except (AuthenticationError, KeyError) as e: + logger.error(f"Error authenticating with destination tenant: {e}") + raise typer.Exit(code=1) + except Exception as e: + logger.error(f"Unexpected error with destination authentication: {e}") + raise typer.Exit(code=1) + + # Create address group objects in destination + destination_address_groups = AddressGroup(destination_client, max_limit=5000) + created_objs: List[AddressGroupResponseModel] = [] + error_objects: List[List[str]] = [] + + for src_obj in address_group_objects: + try: + create_params = build_create_params(src_obj, folder) + except ValueError as ve: + error_objects.append([src_obj.name, str(ve)]) + continue + + try: + new_obj = destination_address_groups.create(create_params) + created_objs.append(new_obj) + logger.info(f"Created address group object in destination: {new_obj.name}") + except ( + InvalidObjectError, + MissingQueryParameterError, + NameNotUniqueError, + ObjectNotPresentError, + ) as e: + error_objects.append([src_obj.name, str(e)]) + continue + except Exception as e: + error_objects.append([src_obj.name, str(e)]) + continue + + # Display results if not quiet_mode + if created_objs and not quiet_mode: + typer.echo("\nSuccessfully created the following address group objects:") + created_table = [] + for obj in created_objs: + if obj.static: + group_type = "Static" + group_value = ", ".join(obj.static) + elif obj.dynamic: + group_type = "Dynamic" + group_value = obj.dynamic.filter + else: + group_type = "Unknown" + group_value = "N/A" + + created_table.append( + [ + obj.name, + obj.folder, + group_type, + group_value, + obj.description or "", + ] + ) + + typer.echo( + tabulate( + created_table, + headers=["Name", "Folder", "Type", "Value", "Description"], + tablefmt="fancy_grid", + ) + ) + + if error_objects and not quiet_mode: + typer.echo("\nSome address group objects failed to be created:") + typer.echo( + tabulate( + error_objects, + headers=["Object Name", "Error"], + tablefmt="fancy_grid", + ) + ) + + # Commit changes if requested and objects were created + if commit_and_push and created_objs: + try: + commit_params = { + "folders": [folder], + "description": "Cloned address group objects", + "sync": True, + } + result = destination_address_groups.commit(**commit_params) + job_status = destination_address_groups.get_job_status(result.job_id) + logger.info( + f"Commit job ID {result.job_id} status: {job_status.data[0].status_str}" + ) + except Exception as e: + logger.error(f"Error committing address group objects in destination: {e}") + raise typer.Exit(code=1) + else: + if created_objs and not commit_and_push: + logger.info( + "Objects created, but --commit-and-push not specified, skipping commit." + ) + else: + logger.info("No new address group objects were created, skipping commit.") + + typer.echo("🎉 Address group objects cloning completed successfully! 🎉") diff --git a/scm_config_clone/main.py b/scm_config_clone/main.py index 858caf3..255112b 100644 --- a/scm_config_clone/main.py +++ b/scm_config_clone/main.py @@ -20,6 +20,7 @@ from scm_config_clone import ( addresses, + address_groups, create_settings, tags, ) @@ -34,21 +35,42 @@ logging.basicConfig(level=logging.INFO) logger = logging.getLogger(__name__) -# Register commands with explicit names and help text +# --------------------------------------------------------------------------------------------------------------------- +# scm-clone Configuration +# --------------------------------------------------------------------------------------------------------------------- + +# Create a `settings.yaml` file with configuration needed to accomplish our tasks (required one-time setup) app.command( name="settings", - help="Create a YAML file containing the settings of our SCM cloning job (required for authentication).", + help="Create a `settings.yaml` file with configuration needed to accomplish our tasks (required one-time setup).", )(create_settings) +# --------------------------------------------------------------------------------------------------------------------- +# Objects +# --------------------------------------------------------------------------------------------------------------------- + +# Addresses app.command( name="addresses", - help="Clone address objects from the source SCM tenant to the destination SCM tenant.", + help="Clone addresses.", )(addresses) +# Address Groups +app.command( + name="address-groups", + help="Clone address groups.", +)(address_groups) + +# Tags app.command( name="tags", - help="Clone tag objects from the source SCM tenant to the destination SCM tenant, filtered by the specified folder.", + help="Clone tags.", )(tags) +# --------------------------------------------------------------------------------------------------------------------- +# Security Services +# --------------------------------------------------------------------------------------------------------------------- + + if __name__ == "__main__": app() From 305ce64ab9473e07bca9da8ee9681a79079e65f6 Mon Sep 17 00:00:00 2001 From: Calvin Remsburg Date: Sun, 15 Dec 2024 09:28:06 -0600 Subject: [PATCH 10/18] Rename address_groups.py to address_group.py Adjusted the filename to ensure consistency with naming conventions. This change helps maintain uniformity across the codebase. --- .../commands/objects/{address_groups.py => address_group.py} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename scm_config_clone/commands/objects/{address_groups.py => address_group.py} (100%) diff --git a/scm_config_clone/commands/objects/address_groups.py b/scm_config_clone/commands/objects/address_group.py similarity index 100% rename from scm_config_clone/commands/objects/address_groups.py rename to scm_config_clone/commands/objects/address_group.py From cd20a51beba7683fba2e545b79ca85c708cd63e8 Mon Sep 17 00:00:00 2001 From: Calvin Remsburg Date: Sun, 15 Dec 2024 09:28:15 -0600 Subject: [PATCH 11/18] Add support for cloning applications via CLI. Introduced functionality to clone application objects from a source SCM tenant to a destination SCM tenant. Updated main CLI file to include the new "applications" command. Refactored imports and added necessary documentation for proper usage. --- scm_config_clone/__init__.py | 3 +- .../commands/objects/application.py | 383 ++++++++++++++++++ scm_config_clone/main.py | 7 + 3 files changed, 392 insertions(+), 1 deletion(-) create mode 100644 scm_config_clone/commands/objects/application.py diff --git a/scm_config_clone/__init__.py b/scm_config_clone/__init__.py index 7d3cde9..8fff841 100644 --- a/scm_config_clone/__init__.py +++ b/scm_config_clone/__init__.py @@ -1,6 +1,7 @@ # scm_config_clone/__init__.py from .commands.objects.address import addresses -from .commands.objects.address_groups import address_groups +from .commands.objects.address_group import address_groups +from .commands.objects.application import applications from .commands.objects.tag import tags from .commands.utilities.create_settings_file import create_settings diff --git a/scm_config_clone/commands/objects/application.py b/scm_config_clone/commands/objects/application.py new file mode 100644 index 0000000..608fab2 --- /dev/null +++ b/scm_config_clone/commands/objects/application.py @@ -0,0 +1,383 @@ +# scm_config_clone/commands/objects/application.py + +import logging +from typing import List, Optional, Any, Dict + +import typer +from scm.client import Scm +from scm.config.objects import Application +from scm.exceptions import ( + AuthenticationError, + InvalidObjectError, + MissingQueryParameterError, + NameNotUniqueError, + ObjectNotPresentError, +) +from scm.models.objects.application import ( + ApplicationCreateModel, + ApplicationResponseModel, +) +from tabulate import tabulate + +from scm_config_clone.utilities import load_settings, parse_csv_option + + +def build_create_params( + src_obj: ApplicationResponseModel, + folder: str, +) -> Dict[str, Any]: + """ + Construct the dictionary of parameters required to create a new application object. + + Given an existing ApplicationResponseModel (source object) and a target folder, + this function builds a dictionary with all necessary fields for creating + a new application in the destination tenant. It uses `model_dump` on a Pydantic model + to ensure only valid, explicitly set fields are included. + + Args: + src_obj: The ApplicationResponseModel representing the source application object. + folder: The folder in the destination tenant where the object should be created. + + Returns: + A dictionary containing the fields required for `Application.create()`. + This dictionary is validated and pruned by ApplicationCreateModel. + """ + data = { + "name": src_obj.name, + "folder": folder, + "category": src_obj.category, + "subcategory": src_obj.subcategory, + "technology": src_obj.technology, + "risk": src_obj.risk, + "description": src_obj.description if src_obj.description is not None else None, + "ports": src_obj.ports if src_obj.ports else None, + "evasive": src_obj.evasive, + "pervasive": src_obj.pervasive, + "excessive_bandwidth_use": src_obj.excessive_bandwidth_use, + "used_by_malware": src_obj.used_by_malware, + "transfers_files": src_obj.transfers_files, + "has_known_vulnerabilities": src_obj.has_known_vulnerabilities, + "tunnels_other_apps": src_obj.tunnels_other_apps, + "prone_to_misuse": src_obj.prone_to_misuse, + "no_certifications": src_obj.no_certifications, + } + + create_model = ApplicationCreateModel(**data) + return create_model.model_dump( + exclude_unset=True, + exclude_none=True, + ) + + +def applications( + folder: Optional[str] = typer.Option( + None, + "--folder", + prompt="Please enter the folder name", + help="The folder to focus on when retrieving and cloning applications.", + ), + exclude_folders: str = typer.Option( + None, + "--exclude-folders", + help="Comma-separated list of folders to exclude from the retrieval.", + ), + exclude_snippets: str = typer.Option( + None, + "--exclude-snippets", + help="Comma-separated list of snippets to exclude from the retrieval.", + ), + commit_and_push: bool = typer.Option( + False, + "--commit-and-push", + help="If set, commit the changes on the destination tenant after object creation.", + is_flag=True, + ), + auto_approve: bool = typer.Option( + None, + "--auto-approve", + "-A", + help="If set, skip the confirmation prompt and automatically proceed with creation.", + is_flag=True, + ), + create_report: bool = typer.Option( + None, + "--create-report", + "-R", + help="If set, create or append to a 'result.csv' file with the task results.", + is_flag=True, + ), + dry_run: bool = typer.Option( + None, + "--dry-run", + "-D", + help="If set, perform a dry run without applying any changes.", + is_flag=True, + ), + quiet_mode: bool = typer.Option( + None, + "--quiet-mode", + "-Q", + help="If set, hide all console output (except log messages).", + is_flag=True, + ), + logging_level: str = typer.Option( + None, + "--logging-level", + "-L", + help="Override the logging level (DEBUG, INFO, WARNING, ERROR, CRITICAL).", + ), + settings_file: str = typer.Option( + "settings.yaml", + "--settings-file", + "-s", + help="Path to the YAML settings file containing tenant credentials and configuration.", + ), +): + """ + Clone application objects from a source SCM tenant to a destination SCM tenant. + + This Typer CLI command automates the process of retrieving application objects + from a specified folder in a source tenant, optionally filters them out based + on user-defined exclusion criteria, and then creates them in a destination tenant. + + The workflow is: + 1. Load authentication and configuration settings (e.g., credentials, logging) from the YAML file. + 2. If any runtime flags are provided, they override the corresponding settings from the file. + 3. Authenticate to the source tenant and retrieve application objects from the given folder. + 4. Display the retrieved source objects. If not auto-approved, prompt the user before proceeding. + 5. Authenticate to the destination tenant and create the retrieved objects there. + 6. If `--commit-and-push` is provided and objects were created successfully, commit the changes. + 7. Display the results, including successfully created objects and any errors. + + Args: + folder: The source folder from which to retrieve application objects. + exclude_folders: Comma-separated folder names to exclude from source retrieval. + exclude_snippets: Comma-separated snippet names to exclude from source retrieval. + commit_and_push: If True, commit changes in the destination tenant after creation. + auto_approve: If True or set in settings, skip the confirmation prompt before creating objects. + create_report: If True or set in settings, create/append a CSV file with task results. + dry_run: If True or set in settings, perform a dry run without applying changes (logic TBD). + quiet_mode: If True or set in settings, hide console output except log messages (logic TBD). + logging_level: If provided, override the logging level from settings.yaml. + settings_file: Path to the YAML settings file for loading authentication and configuration. + + Raises: + typer.Exit: Exits if authentication fails, retrieval fails, or if the user opts not to proceed. + """ + typer.echo("🚀 Starting application objects cloning...") + + # Load settings from file + settings = load_settings(settings_file) + + # Apply fallback logic: if a flag wasn't provided at runtime, use settings.yaml values + auto_approve = settings["auto_approve"] if auto_approve is None else auto_approve + create_report = ( + settings["create_report"] if create_report is None else create_report + ) + dry_run = settings["dry_run"] if dry_run is None else dry_run + quiet_mode = settings["quiet"] if quiet_mode is None else quiet_mode + + # Logging level fallback + if logging_level is None: + logging_level = settings["logging"] + logging_level = logging_level.upper() + + logger = logging.getLogger(__name__) + logger.setLevel(getattr(logging, logging_level, logging.INFO)) + + # Parse CSV options + exclude_folders_list = parse_csv_option(exclude_folders) + exclude_snippets_list = parse_csv_option(exclude_snippets) + + # Authenticate and retrieve from source + try: + source_creds = settings["source_scm"] + source_client = Scm( + client_id=source_creds["client_id"], + client_secret=source_creds["client_secret"], + tsg_id=source_creds["tenant"], + log_level=logging_level, + ) + logger.info(f"Authenticated with source SCM tenant: {source_creds['tenant']}") + except (AuthenticationError, KeyError) as e: + logger.error(f"Error authenticating with source tenant: {e}") + raise typer.Exit(code=1) + except Exception as e: + logger.error(f"Unexpected error with source authentication: {e}") + raise typer.Exit(code=1) + + # Retrieve application objects from source + try: + source_applications = Application(source_client, max_limit=5000) + application_objects = source_applications.list( + folder=folder, + exact_match=True, + exclude_folders=exclude_folders_list, + exclude_snippets=exclude_snippets_list, + ) + logger.info( + f"Retrieved {len(application_objects)} application objects from source tenant folder '{folder}'." + ) + except Exception as e: + logger.error(f"Error retrieving application objects from source: {e}") + raise typer.Exit(code=1) + + # Display retrieved applications if not quiet_mode + if application_objects and not quiet_mode: + app_table = [ + [ + app.name, + app.folder, + app.category, + app.subcategory, + app.technology, + app.risk, + app.description or "", + ", ".join(app.ports) if app.ports else "", + ] + for app in application_objects + ] + typer.echo( + tabulate( + app_table, + headers=[ + "Name", + "Folder", + "Category", + "Subcategory", + "Technology", + "Risk", + "Description", + "Ports", + ], + tablefmt="fancy_grid", + ) + ) + elif not application_objects: + typer.echo("No application objects found in the source folder.") + + # Prompt if not auto-approved and objects exist + if application_objects and not auto_approve: + proceed = typer.confirm( + "Do you want to proceed with creating these objects in the destination tenant?" + ) + if not proceed: + typer.echo("Aborting cloning operation.") + raise typer.Exit(code=0) + + # Authenticate with destination tenant + try: + dest_creds = settings["destination_scm"] + destination_client = Scm( + client_id=dest_creds["client_id"], + client_secret=dest_creds["client_secret"], + tsg_id=dest_creds["tenant"], + log_level=logging_level, + ) + logger.info( + f"Authenticated with destination SCM tenant: {dest_creds['tenant']}" + ) + except (AuthenticationError, KeyError) as e: + logger.error(f"Error authenticating with destination tenant: {e}") + raise typer.Exit(code=1) + except Exception as e: + logger.error(f"Unexpected error with destination authentication: {e}") + raise typer.Exit(code=1) + + # Create application objects in destination + destination_applications = Application(destination_client, max_limit=5000) + created_objs: List[ApplicationResponseModel] = [] + error_objects: List[List[str]] = [] + + for src_obj in application_objects: + try: + create_params = build_create_params(src_obj, folder) + except ValueError as ve: + error_objects.append([src_obj.name, str(ve)]) + continue + + try: + new_obj = destination_applications.create(create_params) + created_objs.append(new_obj) + logger.info(f"Created application object in destination: {new_obj.name}") + except ( + InvalidObjectError, + MissingQueryParameterError, + NameNotUniqueError, + ObjectNotPresentError, + ) as e: + error_objects.append([src_obj.name, str(e)]) + continue + except Exception as e: + error_objects.append([src_obj.name, str(e)]) + continue + + # Display results if not quiet_mode + if created_objs and not quiet_mode: + typer.echo("\nSuccessfully created the following application objects:") + created_table = [ + [ + obj.name, + obj.folder, + obj.category, + obj.subcategory, + obj.technology, + obj.risk, + obj.description or "", + ", ".join(obj.ports) if obj.ports else "", + ] + for obj in created_objs + ] + + typer.echo( + tabulate( + created_table, + headers=[ + "Name", + "Folder", + "Category", + "Subcategory", + "Technology", + "Risk", + "Description", + "Ports", + ], + tablefmt="fancy_grid", + ) + ) + + if error_objects and not quiet_mode: + typer.echo("\nSome application objects failed to be created:") + typer.echo( + tabulate( + error_objects, + headers=["Object Name", "Error"], + tablefmt="fancy_grid", + ) + ) + + # Commit changes if requested and objects were created + if commit_and_push and created_objs: + try: + commit_params = { + "folders": [folder], + "description": "Cloned application objects", + "sync": True, + } + result = destination_applications.commit(**commit_params) + job_status = destination_applications.get_job_status(result.job_id) + logger.info( + f"Commit job ID {result.job_id} status: {job_status.data[0].status_str}" + ) + except Exception as e: + logger.error(f"Error committing application objects in destination: {e}") + raise typer.Exit(code=1) + else: + if created_objs and not commit_and_push: + logger.info( + "Objects created, but --commit-and-push not specified, skipping commit." + ) + else: + logger.info("No new application objects were created, skipping commit.") + + typer.echo("🎉 Application objects cloning completed successfully! 🎉") diff --git a/scm_config_clone/main.py b/scm_config_clone/main.py index 255112b..bb6c977 100644 --- a/scm_config_clone/main.py +++ b/scm_config_clone/main.py @@ -21,6 +21,7 @@ from scm_config_clone import ( addresses, address_groups, + applications, create_settings, tags, ) @@ -61,6 +62,12 @@ help="Clone address groups.", )(address_groups) +# Applications +app.command( + name="applications", + help="Clone applications.", +)(applications) + # Tags app.command( name="tags", From d44c36562f6afd2f3412325b5eb81b4a427b76dd Mon Sep 17 00:00:00 2001 From: Calvin Remsburg Date: Sun, 15 Dec 2024 11:31:40 -0600 Subject: [PATCH 12/18] Add support for cloning application filter objects Introduced functionality to clone application filters between SCM tenants, including exclusion options and configurable settings. Updated CLI commands and integrated the new `application_filters` module for end-to-end cloning workflows. --- scm_config_clone/__init__.py | 1 + .../commands/objects/application_filters.py | 371 ++++++++++++++++++ scm_config_clone/main.py | 7 + 3 files changed, 379 insertions(+) create mode 100644 scm_config_clone/commands/objects/application_filters.py diff --git a/scm_config_clone/__init__.py b/scm_config_clone/__init__.py index 8fff841..fcc926c 100644 --- a/scm_config_clone/__init__.py +++ b/scm_config_clone/__init__.py @@ -3,5 +3,6 @@ from .commands.objects.address import addresses from .commands.objects.address_group import address_groups from .commands.objects.application import applications +from .commands.objects.application_filters import application_filters from .commands.objects.tag import tags from .commands.utilities.create_settings_file import create_settings diff --git a/scm_config_clone/commands/objects/application_filters.py b/scm_config_clone/commands/objects/application_filters.py new file mode 100644 index 0000000..299afe4 --- /dev/null +++ b/scm_config_clone/commands/objects/application_filters.py @@ -0,0 +1,371 @@ +# scm_config_clone/commands/objects/application_filters.py + +import logging +from typing import List, Optional, Any, Dict + +import typer +from scm.client import Scm +from scm.config.objects import ApplicationFilters +from scm.exceptions import ( + AuthenticationError, + InvalidObjectError, + MissingQueryParameterError, + NameNotUniqueError, + ObjectNotPresentError, +) +from scm.models.objects.application_filters import ( + ApplicationFiltersCreateModel, + ApplicationFiltersResponseModel, +) +from tabulate import tabulate + +from scm_config_clone.utilities import load_settings, parse_csv_option + + +def build_create_params( + src_obj: ApplicationFiltersResponseModel, folder: str +) -> Dict[str, Any]: + """ + Construct the dictionary of parameters required to create a new application filter object. + + Given an existing ApplicationFiltersResponseModel (source object) and a target folder, + this function builds a dictionary with all necessary fields for creating + a new application filter in the destination tenant. It uses `model_dump` on a Pydantic model + to ensure only valid, explicitly set fields are included. + + Args: + src_obj: The ApplicationFiltersResponseModel representing the source application filter object. + folder: The folder in the destination tenant where the object should be created. + + Returns: + A dictionary containing the fields required for `ApplicationFilters.create()`. + This dictionary is validated and pruned by ApplicationFiltersCreateModel. + """ + data = { + "name": src_obj.name, + "folder": folder, + "category": src_obj.category, + "sub_category": src_obj.sub_category, + "technology": src_obj.technology, + "evasive": src_obj.evasive, + "used_by_malware": src_obj.used_by_malware, + "transfers_files": src_obj.transfers_files, + "has_known_vulnerabilities": src_obj.has_known_vulnerabilities, + "tunnels_other_apps": src_obj.tunnels_other_apps, + "prone_to_misuse": src_obj.prone_to_misuse, + "pervasive": src_obj.pervasive, + "is_saas": src_obj.is_saas, + "new_appid": src_obj.new_appid, + "risk": src_obj.risk, + "saas_certifications": src_obj.saas_certifications, + "saas_risk": src_obj.saas_risk, + } + + create_model = ApplicationFiltersCreateModel(**data) + return create_model.model_dump( + exclude_unset=True, + exclude_none=True, + ) + + +def application_filters( + folder: Optional[str] = typer.Option( + None, + "--folder", + prompt="Please enter the folder name", + help="The folder to focus on when retrieving and cloning application filters.", + ), + exclude_folders: str = typer.Option( + None, + "--exclude-folders", + help="Comma-separated list of folders to exclude from the retrieval.", + ), + exclude_snippets: str = typer.Option( + None, + "--exclude-snippets", + help="Comma-separated list of snippets to exclude from the retrieval.", + ), + commit_and_push: bool = typer.Option( + False, + "--commit-and-push", + help="If set, commit the changes on the destination tenant after object creation.", + is_flag=True, + ), + auto_approve: bool = typer.Option( + None, + "--auto-approve", + "-A", + help="If set, skip the confirmation prompt and automatically proceed with creation.", + is_flag=True, + ), + create_report: bool = typer.Option( + None, + "--create-report", + "-R", + help="If set, create or append to a 'result.csv' file with the task results.", + is_flag=True, + ), + dry_run: bool = typer.Option( + None, + "--dry-run", + "-D", + help="If set, perform a dry run without applying any changes.", + is_flag=True, + ), + quiet_mode: bool = typer.Option( + None, + "--quiet-mode", + "-Q", + help="If set, hide all console output (except log messages).", + is_flag=True, + ), + logging_level: str = typer.Option( + None, + "--logging-level", + "-L", + help="Override the logging level (DEBUG, INFO, WARNING, ERROR, CRITICAL).", + ), + settings_file: str = typer.Option( + "settings.yaml", + "--settings-file", + "-s", + help="Path to the YAML settings file containing tenant credentials and configuration.", + ), +): + """ + Clone application filter objects from a source SCM tenant to a destination SCM tenant. + + This Typer CLI command automates the process of retrieving application filter objects + from a specified folder in a source tenant, optionally filters them out based + on user-defined exclusion criteria, and then creates them in a destination tenant. + + The workflow is: + 1. Load authentication and configuration settings (e.g., credentials, logging) from the YAML file. + 2. If any runtime flags are provided, they override the corresponding settings from the file. + 3. Authenticate to the source tenant and retrieve application filter objects from the given folder. + 4. Display the retrieved source objects. If not auto-approved, prompt the user before proceeding. + 5. Authenticate to the destination tenant and create the retrieved objects there. + 6. If `--commit-and-push` is provided and objects were created successfully, commit the changes. + 7. Display the results, including successfully created objects and any errors. + + Args: + folder: The source folder from which to retrieve application filter objects. + exclude_folders: Comma-separated folder names to exclude from source retrieval. + exclude_snippets: Comma-separated snippet names to exclude from source retrieval. + commit_and_push: If True, commit changes in the destination tenant after creation. + auto_approve: If True or set in settings, skip the confirmation prompt before creating objects. + create_report: If True or set in settings, create/append a CSV file with task results. + dry_run: If True or set in settings, perform a dry run without applying changes (logic TBD). + quiet_mode: If True or set in settings, hide console output except log messages (logic TBD). + logging_level: If provided, override the logging level from settings.yaml. + settings_file: Path to the YAML settings file for loading authentication and configuration. + + Raises: + typer.Exit: Exits if authentication fails, retrieval fails, or if the user opts not to proceed. + """ + typer.echo("🚀 Starting application filter objects cloning...") + + # Load settings from file + settings = load_settings(settings_file) + + # Apply fallback logic: if a flag wasn't provided at runtime, use settings.yaml values + auto_approve = settings["auto_approve"] if auto_approve is None else auto_approve + create_report = ( + settings["create_report"] if create_report is None else create_report + ) + dry_run = settings["dry_run"] if dry_run is None else dry_run + quiet_mode = settings["quiet"] if quiet_mode is None else quiet_mode + + # Logging level fallback + if logging_level is None: + logging_level = settings["logging"] + logging_level = logging_level.upper() + + logger = logging.getLogger(__name__) + logger.setLevel(getattr(logging, logging_level, logging.INFO)) + + # Parse CSV options + exclude_folders_list = parse_csv_option(exclude_folders) + exclude_snippets_list = parse_csv_option(exclude_snippets) + + # Authenticate and retrieve from source + try: + source_creds = settings["source_scm"] + source_client = Scm( + client_id=source_creds["client_id"], + client_secret=source_creds["client_secret"], + tsg_id=source_creds["tenant"], + log_level=logging_level, + ) + logger.info(f"Authenticated with source SCM tenant: {source_creds['tenant']}") + except (AuthenticationError, KeyError) as e: + logger.error(f"Error authenticating with source tenant: {e}") + raise typer.Exit(code=1) + except Exception as e: + logger.error(f"Unexpected error with source authentication: {e}") + raise typer.Exit(code=1) + + # Retrieve application filter objects from source + try: + source_app_filters = ApplicationFilters(source_client, max_limit=5000) + app_filter_objects = source_app_filters.list( + folder=folder, + exact_match=True, + exclude_folders=exclude_folders_list, + exclude_snippets=exclude_snippets_list, + ) + logger.info( + f"Retrieved {len(app_filter_objects)} application filter objects from source tenant folder '{folder}'." + ) + except Exception as e: + logger.error(f"Error retrieving application filter objects from source: {e}") + raise typer.Exit(code=1) + + # Display retrieved filters if not quiet_mode + if app_filter_objects and not quiet_mode: + filter_table = [] + for app_filter in app_filter_objects: + filter_table.append( + [ + app_filter.name, + app_filter.folder, + ", ".join(app_filter.category) if app_filter.category else "", + ( + ", ".join(str(r) for r in app_filter.risk) + if app_filter.risk + else "" + ), + app_filter.is_saas, + ] + ) + + typer.echo( + tabulate( + filter_table, + headers=["Name", "Folder", "Categories", "Risk Levels", "SaaS"], + tablefmt="fancy_grid", + ) + ) + elif not app_filter_objects: + typer.echo("No application filter objects found in the source folder.") + + # Prompt if not auto-approved and objects exist + if app_filter_objects and not auto_approve: + proceed = typer.confirm( + "Do you want to proceed with creating these objects in the destination tenant?" + ) + if not proceed: + typer.echo("Aborting cloning operation.") + raise typer.Exit(code=0) + + # Authenticate with destination tenant + try: + dest_creds = settings["destination_scm"] + destination_client = Scm( + client_id=dest_creds["client_id"], + client_secret=dest_creds["client_secret"], + tsg_id=dest_creds["tenant"], + log_level=logging_level, + ) + logger.info( + f"Authenticated with destination SCM tenant: {dest_creds['tenant']}" + ) + except (AuthenticationError, KeyError) as e: + logger.error(f"Error authenticating with destination tenant: {e}") + raise typer.Exit(code=1) + except Exception as e: + logger.error(f"Unexpected error with destination authentication: {e}") + raise typer.Exit(code=1) + + # Create application filter objects in destination + destination_app_filters = ApplicationFilters(destination_client, max_limit=5000) + created_objs: List[ApplicationFiltersResponseModel] = [] + error_objects: List[List[str]] = [] + + for src_obj in app_filter_objects: + try: + create_params = build_create_params(src_obj, folder) + except ValueError as ve: + error_objects.append([src_obj.name, str(ve)]) + continue + + try: + new_obj = destination_app_filters.create(create_params) + created_objs.append(new_obj) + logger.info( + f"Created application filter object in destination: {new_obj.name}" + ) + except ( + InvalidObjectError, + MissingQueryParameterError, + NameNotUniqueError, + ObjectNotPresentError, + ) as e: + error_objects.append([src_obj.name, str(e)]) + continue + except Exception as e: + error_objects.append([src_obj.name, str(e)]) + continue + + # Display results if not quiet_mode + if created_objs and not quiet_mode: + typer.echo("\nSuccessfully created the following application filter objects:") + created_table = [] + for obj in created_objs: + created_table.append( + [ + obj.name, + obj.folder, + ", ".join(obj.category) if obj.category else "", + ", ".join(str(r) for r in obj.risk) if obj.risk else "", + obj.is_saas, + ] + ) + + typer.echo( + tabulate( + created_table, + headers=["Name", "Folder", "Categories", "Risk Levels", "SaaS"], + tablefmt="fancy_grid", + ) + ) + + if error_objects and not quiet_mode: + typer.echo("\nSome application filter objects failed to be created:") + typer.echo( + tabulate( + error_objects, + headers=["Object Name", "Error"], + tablefmt="fancy_grid", + ) + ) + + # Commit changes if requested and objects were created + if commit_and_push and created_objs: + try: + commit_params = { + "folders": [folder], + "description": "Cloned application filter objects", + "sync": True, + } + result = destination_app_filters.commit(**commit_params) + job_status = destination_app_filters.get_job_status(result.job_id) + logger.info( + f"Commit job ID {result.job_id} status: {job_status.data[0].status_str}" + ) + except Exception as e: + logger.error( + f"Error committing application filter objects in destination: {e}" + ) + raise typer.Exit(code=1) + else: + if created_objs and not commit_and_push: + logger.info( + "Objects created, but --commit-and-push not specified, skipping commit." + ) + else: + logger.info( + "No new application filter objects were created, skipping commit." + ) + + typer.echo("🎉 Application filter objects cloning completed successfully! 🎉") diff --git a/scm_config_clone/main.py b/scm_config_clone/main.py index bb6c977..b300075 100644 --- a/scm_config_clone/main.py +++ b/scm_config_clone/main.py @@ -22,6 +22,7 @@ addresses, address_groups, applications, + application_filters, create_settings, tags, ) @@ -68,6 +69,12 @@ help="Clone applications.", )(applications) +# Application Filters +app.command( + name="application-filters", + help="Clone application filters.", +)(application_filters) + # Tags app.command( name="tags", From 411f40c73a93dd316dbdae68c23f197a73e999d5 Mon Sep 17 00:00:00 2001 From: Calvin Remsburg Date: Sun, 15 Dec 2024 11:35:41 -0600 Subject: [PATCH 13/18] Add support for cloning application groups Introduced functionality to clone application group objects from a source SCM tenant to a destination SCM tenant. This includes a new `application_groups` command and the underlying logic to handle retrieval, filtering, creation, and optional commit operations for application groups. --- scm_config_clone/__init__.py | 1 + .../commands/objects/application_group.py | 356 ++++++++++++++++++ scm_config_clone/main.py | 7 + 3 files changed, 364 insertions(+) create mode 100644 scm_config_clone/commands/objects/application_group.py diff --git a/scm_config_clone/__init__.py b/scm_config_clone/__init__.py index fcc926c..d9d4642 100644 --- a/scm_config_clone/__init__.py +++ b/scm_config_clone/__init__.py @@ -4,5 +4,6 @@ from .commands.objects.address_group import address_groups from .commands.objects.application import applications from .commands.objects.application_filters import application_filters +from .commands.objects.application_group import application_groups from .commands.objects.tag import tags from .commands.utilities.create_settings_file import create_settings diff --git a/scm_config_clone/commands/objects/application_group.py b/scm_config_clone/commands/objects/application_group.py new file mode 100644 index 0000000..a759854 --- /dev/null +++ b/scm_config_clone/commands/objects/application_group.py @@ -0,0 +1,356 @@ +# scm_config_clone/commands/objects/application_group.py + +import logging +from typing import List, Optional, Any, Dict + +import typer +from scm.client import Scm +from scm.config.objects import ApplicationGroup +from scm.exceptions import ( + AuthenticationError, + InvalidObjectError, + MissingQueryParameterError, + NameNotUniqueError, + ObjectNotPresentError, +) +from scm.models.objects.application_group import ( + ApplicationGroupCreateModel, + ApplicationGroupResponseModel, +) +from tabulate import tabulate + +from scm_config_clone.utilities import load_settings, parse_csv_option + + +def build_create_params( + src_obj: ApplicationGroupResponseModel, folder: str +) -> Dict[str, Any]: + """ + Construct the dictionary of parameters required to create a new application group object. + + Given an existing ApplicationGroupResponseModel (source object) and a target folder, + this function builds a dictionary with all necessary fields for creating + a new application group in the destination tenant. It uses `model_dump` on a Pydantic model + to ensure only valid, explicitly set fields are included. + + Args: + src_obj: The ApplicationGroupResponseModel representing the source application group object. + folder: The folder in the destination tenant where the object should be created. + + Returns: + A dictionary containing the fields required for `ApplicationGroup.create()`. + This dictionary is validated and pruned by ApplicationGroupCreateModel. + """ + data = { + "name": src_obj.name, + "folder": folder, + "members": src_obj.members, + } + + create_model = ApplicationGroupCreateModel(**data) + return create_model.model_dump( + exclude_unset=True, + exclude_none=True, + ) + + +def application_groups( + folder: Optional[str] = typer.Option( + None, + "--folder", + prompt="Please enter the folder name", + help="The folder to focus on when retrieving and cloning application groups.", + ), + exclude_folders: str = typer.Option( + None, + "--exclude-folders", + help="Comma-separated list of folders to exclude from the retrieval.", + ), + exclude_snippets: str = typer.Option( + None, + "--exclude-snippets", + help="Comma-separated list of snippets to exclude from the retrieval.", + ), + exclude_devices: str = typer.Option( + None, + "--exclude-devices", + help="Comma-separated list of devices to exclude from the retrieval.", + ), + commit_and_push: bool = typer.Option( + False, + "--commit-and-push", + help="If set, commit the changes on the destination tenant after object creation.", + is_flag=True, + ), + auto_approve: bool = typer.Option( + None, + "--auto-approve", + "-A", + help="If set, skip the confirmation prompt and automatically proceed with creation.", + is_flag=True, + ), + create_report: bool = typer.Option( + None, + "--create-report", + "-R", + help="If set, create or append to a 'result.csv' file with the task results.", + is_flag=True, + ), + dry_run: bool = typer.Option( + None, + "--dry-run", + "-D", + help="If set, perform a dry run without applying any changes.", + is_flag=True, + ), + quiet_mode: bool = typer.Option( + None, + "--quiet-mode", + "-Q", + help="If set, hide all console output (except log messages).", + is_flag=True, + ), + logging_level: str = typer.Option( + None, + "--logging-level", + "-L", + help="Override the logging level (DEBUG, INFO, WARNING, ERROR, CRITICAL).", + ), + settings_file: str = typer.Option( + "settings.yaml", + "--settings-file", + "-s", + help="Path to the YAML settings file containing tenant credentials and configuration.", + ), +): + """ + Clone application group objects from a source SCM tenant to a destination SCM tenant. + + This Typer CLI command automates the process of retrieving application group objects + from a specified folder in a source tenant, optionally filters them out based + on user-defined exclusion criteria, and then creates them in a destination tenant. + + The workflow is: + 1. Load authentication and configuration settings (e.g., credentials, logging) from the YAML file. + 2. If any runtime flags are provided, they override the corresponding settings from the file. + 3. Authenticate to the source tenant and retrieve application group objects from the given folder. + 4. Display the retrieved source objects. If not auto-approved, prompt the user before proceeding. + 5. Authenticate to the destination tenant and create the retrieved objects there. + 6. If `--commit-and-push` is provided and objects were created successfully, commit the changes. + 7. Display the results, including successfully created objects and any errors. + + Args: + folder: The source folder from which to retrieve application group objects. + exclude_folders: Comma-separated folder names to exclude from source retrieval. + exclude_snippets: Comma-separated snippet names to exclude from source retrieval. + exclude_devices: Comma-separated device names to exclude from source retrieval. + commit_and_push: If True, commit changes in the destination tenant after creation. + auto_approve: If True or set in settings, skip the confirmation prompt before creating objects. + create_report: If True or set in settings, create/append a CSV file with task results. + dry_run: If True or set in settings, perform a dry run without applying changes (logic TBD). + quiet_mode: If True or set in settings, hide console output except log messages (logic TBD). + logging_level: If provided, override the logging level from settings.yaml. + settings_file: Path to the YAML settings file for loading authentication and configuration. + + Raises: + typer.Exit: Exits if authentication fails, retrieval fails, or if the user opts not to proceed. + """ + typer.echo("🚀 Starting application group objects cloning...") + + # Load settings from file + settings = load_settings(settings_file) + + # Apply fallback logic: if a flag wasn't provided at runtime, use settings.yaml values + auto_approve = settings["auto_approve"] if auto_approve is None else auto_approve + create_report = ( + settings["create_report"] if create_report is None else create_report + ) + dry_run = settings["dry_run"] if dry_run is None else dry_run + quiet_mode = settings["quiet"] if quiet_mode is None else quiet_mode + + # Logging level fallback + if logging_level is None: + logging_level = settings["logging"] + logging_level = logging_level.upper() + + logger = logging.getLogger(__name__) + logger.setLevel(getattr(logging, logging_level, logging.INFO)) + + # Parse CSV options + exclude_folders_list = parse_csv_option(exclude_folders) + exclude_snippets_list = parse_csv_option(exclude_snippets) + exclude_devices_list = parse_csv_option(exclude_devices) + + # Authenticate and retrieve from source + try: + source_creds = settings["source_scm"] + source_client = Scm( + client_id=source_creds["client_id"], + client_secret=source_creds["client_secret"], + tsg_id=source_creds["tenant"], + log_level=logging_level, + ) + logger.info(f"Authenticated with source SCM tenant: {source_creds['tenant']}") + except (AuthenticationError, KeyError) as e: + logger.error(f"Error authenticating with source tenant: {e}") + raise typer.Exit(code=1) + except Exception as e: + logger.error(f"Unexpected error with source authentication: {e}") + raise typer.Exit(code=1) + + # Retrieve application group objects from source + try: + source_application_groups = ApplicationGroup(source_client, max_limit=5000) + application_group_objects = source_application_groups.list( + folder=folder, + exact_match=True, + exclude_folders=exclude_folders_list, + exclude_snippets=exclude_snippets_list, + exclude_devices=exclude_devices_list, + ) + logger.info( + f"Retrieved {len(application_group_objects)} application group objects from source tenant folder '{folder}'." + ) + except Exception as e: + logger.error(f"Error retrieving application group objects from source: {e}") + raise typer.Exit(code=1) + + # Display retrieved groups if not quiet_mode + if application_group_objects and not quiet_mode: + group_table = [ + [ + group.name, + group.folder, + ", ".join(group.members), + ] + for group in application_group_objects + ] + typer.echo( + tabulate( + group_table, + headers=["Name", "Folder", "Members"], + tablefmt="fancy_grid", + ) + ) + elif not application_group_objects: + typer.echo("No application group objects found in the source folder.") + + # Prompt if not auto-approved and objects exist + if application_group_objects and not auto_approve: + proceed = typer.confirm( + "Do you want to proceed with creating these objects in the destination tenant?" + ) + if not proceed: + typer.echo("Aborting cloning operation.") + raise typer.Exit(code=0) + + # Authenticate with destination tenant + try: + dest_creds = settings["destination_scm"] + destination_client = Scm( + client_id=dest_creds["client_id"], + client_secret=dest_creds["client_secret"], + tsg_id=dest_creds["tenant"], + log_level=logging_level, + ) + logger.info( + f"Authenticated with destination SCM tenant: {dest_creds['tenant']}" + ) + except (AuthenticationError, KeyError) as e: + logger.error(f"Error authenticating with destination tenant: {e}") + raise typer.Exit(code=1) + except Exception as e: + logger.error(f"Unexpected error with destination authentication: {e}") + raise typer.Exit(code=1) + + # Create application group objects in destination + destination_application_groups = ApplicationGroup( + destination_client, max_limit=5000 + ) + created_objs: List[ApplicationGroupResponseModel] = [] + error_objects: List[List[str]] = [] + + for src_obj in application_group_objects: + try: + create_params = build_create_params(src_obj, folder) + except ValueError as ve: + error_objects.append([src_obj.name, str(ve)]) + continue + + try: + new_obj = destination_application_groups.create(create_params) + created_objs.append(new_obj) + logger.info( + f"Created application group object in destination: {new_obj.name}" + ) + except ( + InvalidObjectError, + MissingQueryParameterError, + NameNotUniqueError, + ObjectNotPresentError, + ) as e: + error_objects.append([src_obj.name, str(e)]) + continue + except Exception as e: + error_objects.append([src_obj.name, str(e)]) + continue + + # Display results if not quiet_mode + if created_objs and not quiet_mode: + typer.echo("\nSuccessfully created the following application group objects:") + created_table = [ + [ + obj.name, + obj.folder, + ", ".join(obj.members), + ] + for obj in created_objs + ] + + typer.echo( + tabulate( + created_table, + headers=["Name", "Folder", "Members"], + tablefmt="fancy_grid", + ) + ) + + if error_objects and not quiet_mode: + typer.echo("\nSome application group objects failed to be created:") + typer.echo( + tabulate( + error_objects, + headers=["Object Name", "Error"], + tablefmt="fancy_grid", + ) + ) + + # Commit changes if requested and objects were created + if commit_and_push and created_objs: + try: + commit_params = { + "folders": [folder], + "description": "Cloned application group objects", + "sync": True, + } + result = destination_application_groups.commit(**commit_params) + job_status = destination_application_groups.get_job_status(result.job_id) + logger.info( + f"Commit job ID {result.job_id} status: {job_status.data[0].status_str}" + ) + except Exception as e: + logger.error( + f"Error committing application group objects in destination: {e}" + ) + raise typer.Exit(code=1) + else: + if created_objs and not commit_and_push: + logger.info( + "Objects created, but --commit-and-push not specified, skipping commit." + ) + else: + logger.info( + "No new application group objects were created, skipping commit." + ) + + typer.echo("🎉 Application group objects cloning completed successfully! 🎉") diff --git a/scm_config_clone/main.py b/scm_config_clone/main.py index b300075..50b2a7d 100644 --- a/scm_config_clone/main.py +++ b/scm_config_clone/main.py @@ -23,6 +23,7 @@ address_groups, applications, application_filters, + application_groups, create_settings, tags, ) @@ -75,6 +76,12 @@ help="Clone application filters.", )(application_filters) +# Application Groups +app.command( + name="application-groups", + help="Clone application groups.", +)(application_groups) + # Tags app.command( name="tags", From a745538121752882c838f661f7ea37ec30c303bc Mon Sep 17 00:00:00 2001 From: Calvin Remsburg Date: Sun, 15 Dec 2024 15:07:45 -0600 Subject: [PATCH 14/18] Add support for cloning external dynamic lists (EDLs) Introduced a new command to clone external dynamic lists between SCM tenants. This includes authentication, retrieval, filtering, and object creation functionalities, as well as optional commit and reporting features. --- scm_config_clone/__init__.py | 1 + .../objects/external_dynamic_lists.py | 390 ++++++++++++++++++ scm_config_clone/main.py | 7 + 3 files changed, 398 insertions(+) create mode 100644 scm_config_clone/commands/objects/external_dynamic_lists.py diff --git a/scm_config_clone/__init__.py b/scm_config_clone/__init__.py index d9d4642..7430b66 100644 --- a/scm_config_clone/__init__.py +++ b/scm_config_clone/__init__.py @@ -5,5 +5,6 @@ from .commands.objects.application import applications from .commands.objects.application_filters import application_filters from .commands.objects.application_group import application_groups +from .commands.objects.external_dynamic_lists import external_dynamic_lists from .commands.objects.tag import tags from .commands.utilities.create_settings_file import create_settings diff --git a/scm_config_clone/commands/objects/external_dynamic_lists.py b/scm_config_clone/commands/objects/external_dynamic_lists.py new file mode 100644 index 0000000..ffaa6c1 --- /dev/null +++ b/scm_config_clone/commands/objects/external_dynamic_lists.py @@ -0,0 +1,390 @@ +# scm_config_clone/commands/objects/external_dynamic_lists.py + +import logging +from typing import List, Optional, Any, Dict + +import typer +from scm.client import Scm +from scm.config.objects import ExternalDynamicLists +from scm.exceptions import ( + AuthenticationError, + InvalidObjectError, + MissingQueryParameterError, + NameNotUniqueError, + ObjectNotPresentError, +) +from scm.models.objects.external_dynamic_lists import ( + ExternalDynamicListsCreateModel, + ExternalDynamicListsResponseModel, +) +from tabulate import tabulate + +from scm_config_clone.utilities import load_settings, parse_csv_option + + +def build_create_params( + src_obj: ExternalDynamicListsResponseModel, folder: str +) -> Dict[str, Any]: + """ + Construct the dictionary of parameters required to create a new EDL object. + + Given an existing ExternalDynamicListsResponseModel (source object) and a target folder, + this function builds a dictionary with all necessary fields for creating + a new EDL in the destination tenant. It identifies the EDL type and uses + `model_dump` on a Pydantic model to ensure only valid, explicitly set fields + are included. + + Args: + src_obj: The ExternalDynamicListsResponseModel representing the source EDL object. + folder: The folder in the destination tenant where the object should be created. + + Returns: + A dictionary containing the fields required for `ExternalDynamicLists.create()`. + This dictionary is validated and pruned by ExternalDynamicListsCreateModel. + + Raises: + ValueError: If the source object does not contain a valid EDL type. + """ + data = { + "name": src_obj.name, + "folder": folder, + "type": src_obj.type.model_dump() if src_obj.type else None, + } + + if not data["type"]: + raise ValueError(f"No valid EDL type found for {src_obj.name}") + + create_model = ExternalDynamicListsCreateModel(**data) + return create_model.model_dump( + exclude_unset=True, + exclude_none=True, + ) + + +def external_dynamic_lists( + folder: Optional[str] = typer.Option( + None, + "--folder", + prompt="Please enter the folder name", + help="The folder to focus on when retrieving and cloning EDLs.", + ), + exclude_folders: str = typer.Option( + None, + "--exclude-folders", + help="Comma-separated list of folders to exclude from the retrieval.", + ), + exclude_snippets: str = typer.Option( + None, + "--exclude-snippets", + help="Comma-separated list of snippets to exclude from the retrieval.", + ), + exclude_devices: str = typer.Option( + None, + "--exclude-devices", + help="Comma-separated list of devices to exclude from the retrieval.", + ), + commit_and_push: bool = typer.Option( + False, + "--commit-and-push", + help="If set, commit the changes on the destination tenant after object creation.", + is_flag=True, + ), + auto_approve: bool = typer.Option( + None, + "--auto-approve", + "-A", + help="If set, skip the confirmation prompt and automatically proceed with creation.", + is_flag=True, + ), + create_report: bool = typer.Option( + None, + "--create-report", + "-R", + help="If set, create or append to a 'result.csv' file with the task results.", + is_flag=True, + ), + dry_run: bool = typer.Option( + None, + "--dry-run", + "-D", + help="If set, perform a dry run without applying any changes.", + is_flag=True, + ), + quiet_mode: bool = typer.Option( + None, + "--quiet-mode", + "-Q", + help="If set, hide all console output (except log messages).", + is_flag=True, + ), + logging_level: str = typer.Option( + None, + "--logging-level", + "-L", + help="Override the logging level (DEBUG, INFO, WARNING, ERROR, CRITICAL).", + ), + settings_file: str = typer.Option( + "settings.yaml", + "--settings-file", + "-s", + help="Path to the YAML settings file containing tenant credentials and configuration.", + ), +): + """ + Clone External Dynamic List (EDL) objects from a source SCM tenant to a destination SCM tenant. + + This Typer CLI command automates the process of retrieving EDL objects + from a specified folder in a source tenant, optionally filters them out based + on user-defined exclusion criteria, and then creates them in a destination tenant. + + The workflow is: + 1. Load authentication and configuration settings (e.g., credentials, logging) from the YAML file. + 2. If any runtime flags are provided, they override the corresponding settings from the file. + 3. Authenticate to the source tenant and retrieve EDL objects from the given folder. + 4. Display the retrieved source objects. If not auto-approved, prompt the user before proceeding. + 5. Authenticate to the destination tenant and create the retrieved objects there. + 6. If `--commit-and-push` is provided and objects were created successfully, commit the changes. + 7. Display the results, including successfully created objects and any errors. + + Args: + folder: The source folder from which to retrieve EDL objects. + exclude_folders: Comma-separated folder names to exclude from source retrieval. + exclude_snippets: Comma-separated snippet names to exclude from source retrieval. + exclude_devices: Comma-separated device names to exclude from source retrieval. + commit_and_push: If True, commit changes in the destination tenant after creation. + auto_approve: If True or set in settings, skip the confirmation prompt before creating objects. + create_report: If True or set in settings, create/append a CSV file with task results. + dry_run: If True or set in settings, perform a dry run without applying changes (logic TBD). + quiet_mode: If True or set in settings, hide console output except log messages (logic TBD). + logging_level: If provided, override the logging level from settings.yaml. + settings_file: Path to the YAML settings file for loading authentication and configuration. + + Raises: + typer.Exit: Exits if authentication fails, retrieval fails, or if the user opts not to proceed. + """ + typer.echo("🚀 Starting EDL objects cloning...") + + # Load settings from file + settings = load_settings(settings_file) + + # Apply fallback logic: if a flag wasn't provided at runtime, use settings.yaml values + auto_approve = settings["auto_approve"] if auto_approve is None else auto_approve + create_report = ( + settings["create_report"] if create_report is None else create_report + ) + dry_run = settings["dry_run"] if dry_run is None else dry_run + quiet_mode = settings["quiet"] if quiet_mode is None else quiet_mode + + # Logging level fallback + if logging_level is None: + logging_level = settings["logging"] + logging_level = logging_level.upper() + + logger = logging.getLogger(__name__) + logger.setLevel(getattr(logging, logging_level, logging.INFO)) + + # Parse CSV options + exclude_folders_list = parse_csv_option(exclude_folders) + exclude_snippets_list = parse_csv_option(exclude_snippets) + exclude_devices_list = parse_csv_option(exclude_devices) + + # Authenticate and retrieve from source + try: + source_creds = settings["source_scm"] + source_client = Scm( + client_id=source_creds["client_id"], + client_secret=source_creds["client_secret"], + tsg_id=source_creds["tenant"], + log_level=logging_level, + ) + logger.info(f"Authenticated with source SCM tenant: {source_creds['tenant']}") + except (AuthenticationError, KeyError) as e: + logger.error(f"Error authenticating with source tenant: {e}") + raise typer.Exit(code=1) + except Exception as e: + logger.error(f"Unexpected error with source authentication: {e}") + raise typer.Exit(code=1) + + # Retrieve EDL objects from source + try: + source_edls = ExternalDynamicLists(source_client, max_limit=5000) + edl_objects = source_edls.list( + folder=folder, + exact_match=True, + exclude_folders=exclude_folders_list, + exclude_snippets=exclude_snippets_list, + exclude_devices=exclude_devices_list, + ) + logger.info( + f"Retrieved {len(edl_objects)} EDL objects from source tenant folder '{folder}'." + ) + except Exception as e: + logger.error(f"Error retrieving EDL objects from source: {e}") + raise typer.Exit(code=1) + + # Display retrieved EDLs if not quiet_mode + if edl_objects and not quiet_mode: + edl_table = [] + for edl in edl_objects: + edl_type = ( + next(iter(edl.type.model_dump().keys())) if edl.type else "Unknown" + ) + edl_url = ( + getattr(getattr(edl.type, edl_type), "url", "N/A") + if edl.type + else "N/A" + ) + edl_description = ( + getattr(getattr(edl.type, edl_type), "description", "") + if edl.type + else "" + ) + + edl_table.append( + [ + edl.name, + edl.folder, + edl_type, + edl_url, + edl_description or "", + ] + ) + + typer.echo( + tabulate( + edl_table, + headers=["Name", "Folder", "Type", "URL", "Description"], + tablefmt="fancy_grid", + ) + ) + elif not edl_objects: + typer.echo("No EDL objects found in the source folder.") + + # Prompt if not auto-approved and objects exist + if edl_objects and not auto_approve: + proceed = typer.confirm( + "Do you want to proceed with creating these objects in the destination tenant?" + ) + if not proceed: + typer.echo("Aborting cloning operation.") + raise typer.Exit(code=0) + + # Authenticate with destination tenant + try: + dest_creds = settings["destination_scm"] + destination_client = Scm( + client_id=dest_creds["client_id"], + client_secret=dest_creds["client_secret"], + tsg_id=dest_creds["tenant"], + log_level=logging_level, + ) + logger.info( + f"Authenticated with destination SCM tenant: {dest_creds['tenant']}" + ) + except (AuthenticationError, KeyError) as e: + logger.error(f"Error authenticating with destination tenant: {e}") + raise typer.Exit(code=1) + except Exception as e: + logger.error(f"Unexpected error with destination authentication: {e}") + raise typer.Exit(code=1) + + # Create EDL objects in destination + destination_edls = ExternalDynamicLists(destination_client, max_limit=5000) + created_objs: List[ExternalDynamicListsResponseModel] = [] + error_objects: List[List[str]] = [] + + for src_obj in edl_objects: + try: + create_params = build_create_params(src_obj, folder) + except ValueError as ve: + error_objects.append([src_obj.name, str(ve)]) + continue + + try: + new_obj = destination_edls.create(create_params) + created_objs.append(new_obj) + logger.info(f"Created EDL object in destination: {new_obj.name}") + except ( + InvalidObjectError, + MissingQueryParameterError, + NameNotUniqueError, + ObjectNotPresentError, + ) as e: + error_objects.append([src_obj.name, str(e)]) + continue + except Exception as e: + error_objects.append([src_obj.name, str(e)]) + continue + + # Display results if not quiet_mode + if created_objs and not quiet_mode: + typer.echo("\nSuccessfully created the following EDL objects:") + created_table = [] + for obj in created_objs: + obj_type = ( + next(iter(obj.type.model_dump().keys())) if obj.type else "Unknown" + ) + obj_url = ( + getattr(getattr(obj.type, obj_type), "url", "N/A") + if obj.type + else "N/A" + ) + obj_description = ( + getattr(getattr(obj.type, obj_type), "description", "") + if obj.type + else "" + ) + + created_table.append( + [ + obj.name, + obj.folder, + obj_type, + obj_url, + obj_description or "", + ] + ) + + typer.echo( + tabulate( + created_table, + headers=["Name", "Folder", "Type", "URL", "Description"], + tablefmt="fancy_grid", + ) + ) + + if error_objects and not quiet_mode: + typer.echo("\nSome EDL objects failed to be created:") + typer.echo( + tabulate( + error_objects, + headers=["Object Name", "Error"], + tablefmt="fancy_grid", + ) + ) + + # Commit changes if requested and objects were created + if commit_and_push and created_objs: + try: + commit_params = { + "folders": [folder], + "description": "Cloned EDL objects", + "sync": True, + } + result = destination_edls.commit(**commit_params) + job_status = destination_edls.get_job_status(result.job_id) + logger.info( + f"Commit job ID {result.job_id} status: {job_status.data[0].status_str}" + ) + except Exception as e: + logger.error(f"Error committing EDL objects in destination: {e}") + raise typer.Exit(code=1) + else: + if created_objs and not commit_and_push: + logger.info( + "Objects created, but --commit-and-push not specified, skipping commit." + ) + else: + logger.info("No new EDL objects were created, skipping commit.") + + typer.echo("🎉 EDL objects cloning completed successfully! 🎉") diff --git a/scm_config_clone/main.py b/scm_config_clone/main.py index 50b2a7d..edcd784 100644 --- a/scm_config_clone/main.py +++ b/scm_config_clone/main.py @@ -24,6 +24,7 @@ applications, application_filters, application_groups, + external_dynamic_lists, create_settings, tags, ) @@ -82,6 +83,12 @@ help="Clone application groups.", )(application_groups) +# External Dynamic Lists +app.command( + name="edls", + help="Clone external dynamic lists.", +)(external_dynamic_lists) + # Tags app.command( name="tags", From 4d9d933b76b42efbe344fa6956706424a5612ff1 Mon Sep 17 00:00:00 2001 From: Calvin Remsburg Date: Sun, 15 Dec 2024 15:15:32 -0600 Subject: [PATCH 15/18] Add service and service group cloning functionality Introduce commands to clone "service" and "service group" objects in the CLI, enabling retrieval from a source tenant and creation in a destination tenant. Includes features like filtering, auto-confirmation, dry-run, reporting, and commit options. --- scm_config_clone/__init__.py | 2 + scm_config_clone/commands/objects/service.py | 440 ++++++++++++++++++ .../commands/objects/service_group.py | 350 ++++++++++++++ scm_config_clone/main.py | 13 + 4 files changed, 805 insertions(+) create mode 100644 scm_config_clone/commands/objects/service.py create mode 100644 scm_config_clone/commands/objects/service_group.py diff --git a/scm_config_clone/__init__.py b/scm_config_clone/__init__.py index 7430b66..d4efe2b 100644 --- a/scm_config_clone/__init__.py +++ b/scm_config_clone/__init__.py @@ -6,5 +6,7 @@ from .commands.objects.application_filters import application_filters from .commands.objects.application_group import application_groups from .commands.objects.external_dynamic_lists import external_dynamic_lists +from .commands.objects.service import services +from .commands.objects.service_group import service_groups from .commands.objects.tag import tags from .commands.utilities.create_settings_file import create_settings diff --git a/scm_config_clone/commands/objects/service.py b/scm_config_clone/commands/objects/service.py new file mode 100644 index 0000000..0240c91 --- /dev/null +++ b/scm_config_clone/commands/objects/service.py @@ -0,0 +1,440 @@ +# scm_config_clone/commands/objects/service.py + +import logging +from typing import List, Optional, Any, Dict + +import typer +from scm.client import Scm +from scm.config.objects import Service +from scm.exceptions import ( + AuthenticationError, + InvalidObjectError, + MissingQueryParameterError, + NameNotUniqueError, + ObjectNotPresentError, +) +from scm.models.objects.service import ServiceCreateModel, ServiceResponseModel +from tabulate import tabulate + +from scm_config_clone.utilities import load_settings, parse_csv_option + + +def build_create_params(src_obj: ServiceResponseModel, folder: str) -> Dict[str, Any]: + """ + Construct the dictionary of parameters required to create a new service object. + + Given an existing ServiceResponseModel (source object) and a target folder, + this function builds a dictionary with all necessary fields for creating + a new service in the destination tenant. It identifies the protocol type + (TCP/UDP) and uses `model_dump` on a Pydantic model to ensure only valid, + explicitly set fields are included. + + Args: + src_obj: The ServiceResponseModel representing the source service object. + folder: The folder in the destination tenant where the object should be created. + + Returns: + A dictionary containing the fields required for `Service.create()`. + This dictionary is validated and pruned by ServiceCreateModel. + + Raises: + ValueError: If the source object does not contain a valid protocol configuration. + """ + data = { + "name": src_obj.name, + "folder": folder, + "description": src_obj.description if src_obj.description is not None else None, + "tag": src_obj.tag if src_obj.tag else [], + "protocol": {}, + } + + # Determine which protocol type is set + if src_obj.protocol.tcp: + data["protocol"]["tcp"] = { + "port": src_obj.protocol.tcp.port, + } + if src_obj.protocol.tcp.override: + data["protocol"]["tcp"]["override"] = ( + src_obj.protocol.tcp.override.model_dump( + exclude_unset=True, + exclude_none=True, + ) + ) + elif src_obj.protocol.udp: + data["protocol"]["udp"] = { + "port": src_obj.protocol.udp.port, + } + if src_obj.protocol.udp.override: + data["protocol"]["udp"]["override"] = ( + src_obj.protocol.udp.override.model_dump( + exclude_unset=True, + exclude_none=True, + ) + ) + else: + raise ValueError(f"No valid protocol configuration found for {src_obj.name}") + + create_model = ServiceCreateModel(**data) + return create_model.model_dump( + exclude_unset=True, + exclude_none=True, + ) + + +def services( + folder: Optional[str] = typer.Option( + None, + "--folder", + prompt="Please enter the folder name", + help="The folder to focus on when retrieving and cloning services.", + ), + exclude_folders: str = typer.Option( + None, + "--exclude-folders", + help="Comma-separated list of folders to exclude from the retrieval.", + ), + exclude_snippets: str = typer.Option( + None, + "--exclude-snippets", + help="Comma-separated list of snippets to exclude from the retrieval.", + ), + exclude_devices: str = typer.Option( + None, + "--exclude-devices", + help="Comma-separated list of devices to exclude from the retrieval.", + ), + commit_and_push: bool = typer.Option( + False, + "--commit-and-push", + help="If set, commit the changes on the destination tenant after object creation.", + is_flag=True, + ), + auto_approve: bool = typer.Option( + None, + "--auto-approve", + "-A", + help="If set, skip the confirmation prompt and automatically proceed with creation.", + is_flag=True, + ), + create_report: bool = typer.Option( + None, + "--create-report", + "-R", + help="If set, create or append to a 'result.csv' file with the task results.", + is_flag=True, + ), + dry_run: bool = typer.Option( + None, + "--dry-run", + "-D", + help="If set, perform a dry run without applying any changes.", + is_flag=True, + ), + quiet_mode: bool = typer.Option( + None, + "--quiet-mode", + "-Q", + help="If set, hide all console output (except log messages).", + is_flag=True, + ), + logging_level: str = typer.Option( + None, + "--logging-level", + "-L", + help="Override the logging level (DEBUG, INFO, WARNING, ERROR, CRITICAL).", + ), + settings_file: str = typer.Option( + "settings.yaml", + "--settings-file", + "-s", + help="Path to the YAML settings file containing tenant credentials and configuration.", + ), +): + """ + Clone service objects from a source SCM tenant to a destination SCM tenant. + + This Typer CLI command automates the process of retrieving service objects + from a specified folder in a source tenant, optionally filters them out based + on user-defined exclusion criteria, and then creates them in a destination tenant. + + The workflow is: + 1. Load authentication and configuration settings (e.g., credentials, logging) from the YAML file. + 2. If any runtime flags are provided, they override the corresponding settings from the file. + 3. Authenticate to the source tenant and retrieve service objects from the given folder. + 4. Display the retrieved source objects. If not auto-approved, prompt the user before proceeding. + 5. Authenticate to the destination tenant and create the retrieved objects there. + 6. If `--commit-and-push` is provided and objects were created successfully, commit the changes. + 7. Display the results, including successfully created objects and any errors. + + Args: + folder: The source folder from which to retrieve service objects. + exclude_folders: Comma-separated folder names to exclude from source retrieval. + exclude_snippets: Comma-separated snippet names to exclude from source retrieval. + exclude_devices: Comma-separated device names to exclude from source retrieval. + commit_and_push: If True, commit changes in the destination tenant after creation. + auto_approve: If True or set in settings, skip the confirmation prompt before creating objects. + create_report: If True or set in settings, create/append a CSV file with task results. + dry_run: If True or set in settings, perform a dry run without applying changes (logic TBD). + quiet_mode: If True or set in settings, hide console output except log messages (logic TBD). + logging_level: If provided, override the logging level from settings.yaml. + settings_file: Path to the YAML settings file for loading authentication and configuration. + + Raises: + typer.Exit: Exits if authentication fails, retrieval fails, or if the user opts not to proceed. + """ + typer.echo("🚀 Starting service objects cloning...") + + # Load settings from file + settings = load_settings(settings_file) + + # Apply fallback logic: if a flag wasn't provided at runtime, use settings.yaml values + auto_approve = settings["auto_approve"] if auto_approve is None else auto_approve + create_report = ( + settings["create_report"] if create_report is None else create_report + ) + dry_run = settings["dry_run"] if dry_run is None else dry_run + quiet_mode = settings["quiet"] if quiet_mode is None else quiet_mode + + # Logging level fallback + if logging_level is None: + logging_level = settings["logging"] + logging_level = logging_level.upper() + + logger = logging.getLogger(__name__) + logger.setLevel(getattr(logging, logging_level, logging.INFO)) + + # Parse CSV options + exclude_folders_list = parse_csv_option(exclude_folders) + exclude_snippets_list = parse_csv_option(exclude_snippets) + exclude_devices_list = parse_csv_option(exclude_devices) + + # Authenticate and retrieve from source + try: + source_creds = settings["source_scm"] + source_client = Scm( + client_id=source_creds["client_id"], + client_secret=source_creds["client_secret"], + tsg_id=source_creds["tenant"], + log_level=logging_level, + ) + logger.info(f"Authenticated with source SCM tenant: {source_creds['tenant']}") + except (AuthenticationError, KeyError) as e: + logger.error(f"Error authenticating with source tenant: {e}") + raise typer.Exit(code=1) + except Exception as e: + logger.error(f"Unexpected error with source authentication: {e}") + raise typer.Exit(code=1) + + # Retrieve service objects from source + try: + source_services = Service(source_client, max_limit=5000) + service_objects = source_services.list( + folder=folder, + exact_match=True, + exclude_folders=exclude_folders_list, + exclude_snippets=exclude_snippets_list, + exclude_devices=exclude_devices_list, + ) + logger.info( + f"Retrieved {len(service_objects)} service objects from source tenant folder '{folder}'." + ) + except Exception as e: + logger.error(f"Error retrieving service objects from source: {e}") + raise typer.Exit(code=1) + + # Display retrieved services if not quiet_mode + if service_objects and not quiet_mode: + service_table = [] + for svc in service_objects: + if svc.protocol.tcp: + protocol_type = "TCP" + port_value = svc.protocol.tcp.port + timeout = ( + svc.protocol.tcp.override.timeout + if svc.protocol.tcp.override + else None + ) + elif svc.protocol.udp: + protocol_type = "UDP" + port_value = svc.protocol.udp.port + timeout = ( + svc.protocol.udp.override.timeout + if svc.protocol.udp.override + else None + ) + else: + protocol_type = "Unknown" + port_value = "N/A" + timeout = None + + service_table.append( + [ + svc.name, + svc.folder, + protocol_type, + port_value, + timeout or "Default", + svc.description or "", + ] + ) + + typer.echo( + tabulate( + service_table, + headers=[ + "Name", + "Folder", + "Protocol", + "Ports", + "Timeout", + "Description", + ], + tablefmt="fancy_grid", + ) + ) + elif not service_objects: + typer.echo("No service objects found in the source folder.") + + # Prompt if not auto-approved and objects exist + if service_objects and not auto_approve: + proceed = typer.confirm( + "Do you want to proceed with creating these objects in the destination tenant?" + ) + if not proceed: + typer.echo("Aborting cloning operation.") + raise typer.Exit(code=0) + + # Authenticate with destination tenant + try: + dest_creds = settings["destination_scm"] + destination_client = Scm( + client_id=dest_creds["client_id"], + client_secret=dest_creds["client_secret"], + tsg_id=dest_creds["tenant"], + log_level=logging_level, + ) + logger.info( + f"Authenticated with destination SCM tenant: {dest_creds['tenant']}" + ) + except (AuthenticationError, KeyError) as e: + logger.error(f"Error authenticating with destination tenant: {e}") + raise typer.Exit(code=1) + except Exception as e: + logger.error(f"Unexpected error with destination authentication: {e}") + raise typer.Exit(code=1) + + # Create service objects in destination + destination_services = Service(destination_client, max_limit=5000) + created_objs: List[ServiceResponseModel] = [] + error_objects: List[List[str]] = [] + + for src_obj in service_objects: + try: + create_params = build_create_params(src_obj, folder) + except ValueError as ve: + error_objects.append([src_obj.name, str(ve)]) + continue + + try: + new_obj = destination_services.create(create_params) + created_objs.append(new_obj) + logger.info(f"Created service object in destination: {new_obj.name}") + except ( + InvalidObjectError, + MissingQueryParameterError, + NameNotUniqueError, + ObjectNotPresentError, + ) as e: + error_objects.append([src_obj.name, str(e)]) + continue + except Exception as e: + error_objects.append([src_obj.name, str(e)]) + continue + + # Display results if not quiet_mode + if created_objs and not quiet_mode: + typer.echo("\nSuccessfully created the following service objects:") + created_table = [] + for obj in created_objs: + if obj.protocol.tcp: + protocol_type = "TCP" + port_value = obj.protocol.tcp.port + timeout = ( + obj.protocol.tcp.override.timeout + if obj.protocol.tcp.override + else None + ) + elif obj.protocol.udp: + protocol_type = "UDP" + port_value = obj.protocol.udp.port + timeout = ( + obj.protocol.udp.override.timeout + if obj.protocol.udp.override + else None + ) + else: + protocol_type = "Unknown" + port_value = "N/A" + timeout = None + + created_table.append( + [ + obj.name, + obj.folder, + protocol_type, + port_value, + timeout or "Default", + obj.description or "", + ] + ) + + typer.echo( + tabulate( + created_table, + headers=[ + "Name", + "Folder", + "Protocol", + "Ports", + "Timeout", + "Description", + ], + tablefmt="fancy_grid", + ) + ) + + if error_objects and not quiet_mode: + typer.echo("\nSome service objects failed to be created:") + typer.echo( + tabulate( + error_objects, + headers=["Object Name", "Error"], + tablefmt="fancy_grid", + ) + ) + + # Commit changes if requested and objects were created + if commit_and_push and created_objs: + try: + commit_params = { + "folders": [folder], + "description": "Cloned service objects", + "sync": True, + } + result = destination_services.commit(**commit_params) + job_status = destination_services.get_job_status(result.job_id) + logger.info( + f"Commit job ID {result.job_id} status: {job_status.data[0].status_str}" + ) + except Exception as e: + logger.error(f"Error committing service objects in destination: {e}") + raise typer.Exit(code=1) + else: + if created_objs and not commit_and_push: + logger.info( + "Objects created, but --commit-and-push not specified, skipping commit." + ) + else: + logger.info("No new service objects were created, skipping commit.") + + typer.echo("🎉 Service objects cloning completed successfully! 🎉") diff --git a/scm_config_clone/commands/objects/service_group.py b/scm_config_clone/commands/objects/service_group.py new file mode 100644 index 0000000..ecb21fd --- /dev/null +++ b/scm_config_clone/commands/objects/service_group.py @@ -0,0 +1,350 @@ +# scm_config_clone/commands/objects/service_group.py + +import logging +from typing import List, Optional, Any, Dict + +import typer +from scm.client import Scm +from scm.config.objects import ServiceGroup +from scm.exceptions import ( + AuthenticationError, + InvalidObjectError, + MissingQueryParameterError, + NameNotUniqueError, + ObjectNotPresentError, +) +from scm.models.objects.service_group import ( + ServiceGroupCreateModel, + ServiceGroupResponseModel, +) +from tabulate import tabulate + +from scm_config_clone.utilities import load_settings, parse_csv_option + + +def build_create_params( + src_obj: ServiceGroupResponseModel, folder: str +) -> Dict[str, Any]: + """ + Construct the dictionary of parameters required to create a new service group object. + + Given an existing ServiceGroupResponseModel (source object) and a target folder, + this function builds a dictionary with all necessary fields for creating + a new service group in the destination tenant. It uses `model_dump` on a Pydantic model + to ensure only valid, explicitly set fields are included. + + Args: + src_obj: The ServiceGroupResponseModel representing the source service group object. + folder: The folder in the destination tenant where the object should be created. + + Returns: + A dictionary containing the fields required for `ServiceGroup.create()`. + This dictionary is validated and pruned by ServiceGroupCreateModel. + """ + data = { + "name": src_obj.name, + "folder": folder, + "members": src_obj.members, + "tag": src_obj.tag if src_obj.tag else [], + } + + create_model = ServiceGroupCreateModel(**data) + return create_model.model_dump( + exclude_unset=True, + exclude_none=True, + ) + + +def service_groups( + folder: Optional[str] = typer.Option( + None, + "--folder", + prompt="Please enter the folder name", + help="The folder to focus on when retrieving and cloning service groups.", + ), + exclude_folders: str = typer.Option( + None, + "--exclude-folders", + help="Comma-separated list of folders to exclude from the retrieval.", + ), + exclude_snippets: str = typer.Option( + None, + "--exclude-snippets", + help="Comma-separated list of snippets to exclude from the retrieval.", + ), + exclude_devices: str = typer.Option( + None, + "--exclude-devices", + help="Comma-separated list of devices to exclude from the retrieval.", + ), + commit_and_push: bool = typer.Option( + False, + "--commit-and-push", + help="If set, commit the changes on the destination tenant after object creation.", + is_flag=True, + ), + auto_approve: bool = typer.Option( + None, + "--auto-approve", + "-A", + help="If set, skip the confirmation prompt and automatically proceed with creation.", + is_flag=True, + ), + create_report: bool = typer.Option( + None, + "--create-report", + "-R", + help="If set, create or append to a 'result.csv' file with the task results.", + is_flag=True, + ), + dry_run: bool = typer.Option( + None, + "--dry-run", + "-D", + help="If set, perform a dry run without applying any changes.", + is_flag=True, + ), + quiet_mode: bool = typer.Option( + None, + "--quiet-mode", + "-Q", + help="If set, hide all console output (except log messages).", + is_flag=True, + ), + logging_level: str = typer.Option( + None, + "--logging-level", + "-L", + help="Override the logging level (DEBUG, INFO, WARNING, ERROR, CRITICAL).", + ), + settings_file: str = typer.Option( + "settings.yaml", + "--settings-file", + "-s", + help="Path to the YAML settings file containing tenant credentials and configuration.", + ), +): + """ + Clone service group objects from a source SCM tenant to a destination SCM tenant. + + This Typer CLI command automates the process of retrieving service group objects + from a specified folder in a source tenant, optionally filters them out based + on user-defined exclusion criteria, and then creates them in a destination tenant. + + The workflow is: + 1. Load authentication and configuration settings (e.g., credentials, logging) from the YAML file. + 2. If any runtime flags are provided, they override the corresponding settings from the file. + 3. Authenticate to the source tenant and retrieve service group objects from the given folder. + 4. Display the retrieved source objects. If not auto-approved, prompt the user before proceeding. + 5. Authenticate to the destination tenant and create the retrieved objects there. + 6. If `--commit-and-push` is provided and objects were created successfully, commit the changes. + 7. Display the results, including successfully created objects and any errors. + + Args: + folder: The source folder from which to retrieve service group objects. + exclude_folders: Comma-separated folder names to exclude from source retrieval. + exclude_snippets: Comma-separated snippet names to exclude from source retrieval. + exclude_devices: Comma-separated device names to exclude from source retrieval. + commit_and_push: If True, commit changes in the destination tenant after creation. + auto_approve: If True or set in settings, skip the confirmation prompt before creating objects. + create_report: If True or set in settings, create/append a CSV file with task results. + dry_run: If True or set in settings, perform a dry run without applying changes (logic TBD). + quiet_mode: If True or set in settings, hide console output except log messages (logic TBD). + logging_level: If provided, override the logging level from settings.yaml. + settings_file: Path to the YAML settings file for loading authentication and configuration. + + Raises: + typer.Exit: Exits if authentication fails, retrieval fails, or if the user opts not to proceed. + """ + typer.echo("🚀 Starting service group objects cloning...") + + # Load settings from file + settings = load_settings(settings_file) + + # Apply fallback logic: if a flag wasn't provided at runtime, use settings.yaml values + auto_approve = settings["auto_approve"] if auto_approve is None else auto_approve + create_report = ( + settings["create_report"] if create_report is None else create_report + ) + dry_run = settings["dry_run"] if dry_run is None else dry_run + quiet_mode = settings["quiet"] if quiet_mode is None else quiet_mode + + # Logging level fallback + if logging_level is None: + logging_level = settings["logging"] + logging_level = logging_level.upper() + + logger = logging.getLogger(__name__) + logger.setLevel(getattr(logging, logging_level, logging.INFO)) + + # Parse CSV options + exclude_folders_list = parse_csv_option(exclude_folders) + exclude_snippets_list = parse_csv_option(exclude_snippets) + exclude_devices_list = parse_csv_option(exclude_devices) + + # Authenticate and retrieve from source + try: + source_creds = settings["source_scm"] + source_client = Scm( + client_id=source_creds["client_id"], + client_secret=source_creds["client_secret"], + tsg_id=source_creds["tenant"], + log_level=logging_level, + ) + logger.info(f"Authenticated with source SCM tenant: {source_creds['tenant']}") + except (AuthenticationError, KeyError) as e: + logger.error(f"Error authenticating with source tenant: {e}") + raise typer.Exit(code=1) + except Exception as e: + logger.error(f"Unexpected error with source authentication: {e}") + raise typer.Exit(code=1) + + # Retrieve service group objects from source + try: + source_service_groups = ServiceGroup(source_client, max_limit=5000) + service_group_objects = source_service_groups.list( + folder=folder, + exact_match=True, + exclude_folders=exclude_folders_list, + exclude_snippets=exclude_snippets_list, + exclude_devices=exclude_devices_list, + ) + logger.info( + f"Retrieved {len(service_group_objects)} service group objects from source tenant folder '{folder}'." + ) + except Exception as e: + logger.error(f"Error retrieving service group objects from source: {e}") + raise typer.Exit(code=1) + + # Display retrieved groups if not quiet_mode + if service_group_objects and not quiet_mode: + group_table = [ + [ + group.name, + group.folder, + ", ".join(group.members), + ", ".join(group.tag) if group.tag else "", + ] + for group in service_group_objects + ] + typer.echo( + tabulate( + group_table, + headers=["Name", "Folder", "Members", "Tags"], + tablefmt="fancy_grid", + ) + ) + elif not service_group_objects: + typer.echo("No service group objects found in the source folder.") + + # Prompt if not auto-approved and objects exist + if service_group_objects and not auto_approve: + proceed = typer.confirm( + "Do you want to proceed with creating these objects in the destination tenant?" + ) + if not proceed: + typer.echo("Aborting cloning operation.") + raise typer.Exit(code=0) + + # Authenticate with destination tenant + try: + dest_creds = settings["destination_scm"] + destination_client = Scm( + client_id=dest_creds["client_id"], + client_secret=dest_creds["client_secret"], + tsg_id=dest_creds["tenant"], + log_level=logging_level, + ) + logger.info( + f"Authenticated with destination SCM tenant: {dest_creds['tenant']}" + ) + except (AuthenticationError, KeyError) as e: + logger.error(f"Error authenticating with destination tenant: {e}") + raise typer.Exit(code=1) + except Exception as e: + logger.error(f"Unexpected error with destination authentication: {e}") + raise typer.Exit(code=1) + + # Create service group objects in destination + destination_service_groups = ServiceGroup(destination_client, max_limit=5000) + created_objs: List[ServiceGroupResponseModel] = [] + error_objects: List[List[str]] = [] + + for src_obj in service_group_objects: + try: + create_params = build_create_params(src_obj, folder) + except ValueError as ve: + error_objects.append([src_obj.name, str(ve)]) + continue + + try: + new_obj = destination_service_groups.create(create_params) + created_objs.append(new_obj) + logger.info(f"Created service group object in destination: {new_obj.name}") + except ( + InvalidObjectError, + MissingQueryParameterError, + NameNotUniqueError, + ObjectNotPresentError, + ) as e: + error_objects.append([src_obj.name, str(e)]) + continue + except Exception as e: + error_objects.append([src_obj.name, str(e)]) + continue + + # Display results if not quiet_mode + if created_objs and not quiet_mode: + typer.echo("\nSuccessfully created the following service group objects:") + created_table = [ + [ + obj.name, + obj.folder, + ", ".join(obj.members), + ", ".join(obj.tag) if obj.tag else "", + ] + for obj in created_objs + ] + typer.echo( + tabulate( + created_table, + headers=["Name", "Folder", "Members", "Tags"], + tablefmt="fancy_grid", + ) + ) + + if error_objects and not quiet_mode: + typer.echo("\nSome service group objects failed to be created:") + typer.echo( + tabulate( + error_objects, + headers=["Object Name", "Error"], + tablefmt="fancy_grid", + ) + ) + + # Commit changes if requested and objects were created + if commit_and_push and created_objs: + try: + commit_params = { + "folders": [folder], + "description": "Cloned service group objects", + "sync": True, + } + result = destination_service_groups.commit(**commit_params) + job_status = destination_service_groups.get_job_status(result.job_id) + logger.info( + f"Commit job ID {result.job_id} status: {job_status.data[0].status_str}" + ) + except Exception as e: + logger.error(f"Error committing service group objects in destination: {e}") + raise typer.Exit(code=1) + else: + if created_objs and not commit_and_push: + logger.info( + "Objects created, but --commit-and-push not specified, skipping commit." + ) + else: + logger.info("No new service group objects were created, skipping commit.") + + typer.echo("🎉 Service group objects cloning completed successfully! 🎉") diff --git a/scm_config_clone/main.py b/scm_config_clone/main.py index edcd784..2e6a437 100644 --- a/scm_config_clone/main.py +++ b/scm_config_clone/main.py @@ -25,6 +25,7 @@ application_filters, application_groups, external_dynamic_lists, + services, create_settings, tags, ) @@ -89,6 +90,18 @@ help="Clone external dynamic lists.", )(external_dynamic_lists) +# Services +app.command( + name="services", + help="Clone services.", +)(services) + +# Service Groups +app.command( + name="service-groups", + help="Clone service groupss.", +)(services) + # Tags app.command( name="tags", From 8278adef9c14a0c9d070eea61b63dd0f0ec8dcd7 Mon Sep 17 00:00:00 2001 From: Calvin Remsburg Date: Sun, 15 Dec 2024 16:02:13 -0600 Subject: [PATCH 16/18] Add support for cloning security-related profiles. This commit introduces functionality for cloning decryption profiles, DNS security profiles, and security rules, along with their integration into the CLI. It enables automated retrieval, validation, and creation of these profiles across tenants, ensuring streamlined operations. --- scm_config_clone/__init__.py | 11 + .../commands/security/anti_spyware_profile.py | 372 ++++++++++++++++ .../commands/security/decryption_profile.py | 412 ++++++++++++++++++ .../commands/security/dns_security_profile.py | 379 ++++++++++++++++ .../commands/security/security_rule.py | 396 +++++++++++++++++ .../commands/security/url_category.py | 357 +++++++++++++++ .../vulnerability_protection_profile.py | 377 ++++++++++++++++ .../security/wildfire_antivirus_profile.py | 371 ++++++++++++++++ scm_config_clone/main.py | 52 ++- 9 files changed, 2726 insertions(+), 1 deletion(-) create mode 100644 scm_config_clone/commands/security/anti_spyware_profile.py create mode 100644 scm_config_clone/commands/security/decryption_profile.py create mode 100644 scm_config_clone/commands/security/dns_security_profile.py create mode 100644 scm_config_clone/commands/security/security_rule.py create mode 100644 scm_config_clone/commands/security/url_category.py create mode 100644 scm_config_clone/commands/security/vulnerability_protection_profile.py create mode 100644 scm_config_clone/commands/security/wildfire_antivirus_profile.py diff --git a/scm_config_clone/__init__.py b/scm_config_clone/__init__.py index d4efe2b..50ced6e 100644 --- a/scm_config_clone/__init__.py +++ b/scm_config_clone/__init__.py @@ -9,4 +9,15 @@ from .commands.objects.service import services from .commands.objects.service_group import service_groups from .commands.objects.tag import tags +from .commands.security.anti_spyware_profile import anti_spyware_profiles +from .commands.security.decryption_profile import decryption_profiles +from .commands.security.dns_security_profile import dns_security_profiles +from .commands.security.security_rule import security_rules +from .commands.security.url_category import url_categories +from .commands.security.vulnerability_protection_profile import ( + vulnerability_protection_profiles, +) +from .commands.security.wildfire_antivirus_profile import ( + wildfire_antivirus_profiles, +) from .commands.utilities.create_settings_file import create_settings diff --git a/scm_config_clone/commands/security/anti_spyware_profile.py b/scm_config_clone/commands/security/anti_spyware_profile.py new file mode 100644 index 0000000..a129fdd --- /dev/null +++ b/scm_config_clone/commands/security/anti_spyware_profile.py @@ -0,0 +1,372 @@ +# scm_config_clone/commands/security/anti_spyware_profile.py + +import logging +from typing import List, Optional, Any, Dict + +import typer +from scm.client import Scm +from scm.config.security import AntiSpywareProfile +from scm.exceptions import ( + AuthenticationError, + InvalidObjectError, + MissingQueryParameterError, + NameNotUniqueError, + ObjectNotPresentError, +) +from scm.models.security import ( + AntiSpywareProfileCreateModel, + AntiSpywareProfileResponseModel, +) +from tabulate import tabulate + +from scm_config_clone.utilities import load_settings, parse_csv_option + + +def build_create_params( + src_obj: AntiSpywareProfileResponseModel, folder: str +) -> Dict[str, Any]: + """ + Construct the dictionary of parameters required to create a new anti-spyware profile object. + + Given an existing AntiSpywareProfileResponseModel (source object) and a target folder, + this function builds a dictionary with all necessary fields for creating + a new profile in the destination tenant. It uses `model_dump` on a Pydantic model to ensure + only valid, explicitly set fields are included. + + Args: + src_obj: The AntiSpywareProfileResponseModel representing the source profile object. + folder: The folder in the destination tenant where the object should be created. + + Returns: + A dictionary containing the fields required for `AntiSpywareProfile.create()`. + This dictionary is validated and pruned by AntiSpywareProfileCreateModel. + """ + data = { + "name": src_obj.name, + "folder": folder, + "description": src_obj.description if src_obj.description is not None else None, + "cloud_inline_analysis": src_obj.cloud_inline_analysis, + "rules": src_obj.rules, + } + + if src_obj.inline_exception_edl_url: + data["inline_exception_edl_url"] = src_obj.inline_exception_edl_url + + if src_obj.inline_exception_ip_address: + data["inline_exception_ip_address"] = src_obj.inline_exception_ip_address + + if src_obj.mica_engine_spyware_enabled: + data["mica_engine_spyware_enabled"] = src_obj.mica_engine_spyware_enabled + + if src_obj.threat_exception: + data["threat_exception"] = src_obj.threat_exception + + create_model = AntiSpywareProfileCreateModel(**data) + return create_model.model_dump( + exclude_unset=True, + exclude_none=True, + ) + + +def anti_spyware_profiles( + folder: Optional[str] = typer.Option( + None, + "--folder", + prompt="Please enter the folder name", + help="The folder to focus on when retrieving and cloning anti-spyware profiles.", + ), + exclude_folders: str = typer.Option( + None, + "--exclude-folders", + help="Comma-separated list of folders to exclude from the retrieval.", + ), + exclude_snippets: str = typer.Option( + None, + "--exclude-snippets", + help="Comma-separated list of snippets to exclude from the retrieval.", + ), + exclude_devices: str = typer.Option( + None, + "--exclude-devices", + help="Comma-separated list of devices to exclude from the retrieval.", + ), + commit_and_push: bool = typer.Option( + False, + "--commit-and-push", + help="If set, commit the changes on the destination tenant after object creation.", + is_flag=True, + ), + auto_approve: bool = typer.Option( + None, + "--auto-approve", + "-A", + help="If set, skip the confirmation prompt and automatically proceed with creation.", + is_flag=True, + ), + create_report: bool = typer.Option( + None, + "--create-report", + "-R", + help="If set, create or append to a 'result.csv' file with the task results.", + is_flag=True, + ), + dry_run: bool = typer.Option( + None, + "--dry-run", + "-D", + help="If set, perform a dry run without applying any changes.", + is_flag=True, + ), + quiet_mode: bool = typer.Option( + None, + "--quiet-mode", + "-Q", + help="If set, hide all console output (except log messages).", + is_flag=True, + ), + logging_level: str = typer.Option( + None, + "--logging-level", + "-L", + help="Override the logging level (DEBUG, INFO, WARNING, ERROR, CRITICAL).", + ), + settings_file: str = typer.Option( + "settings.yaml", + "--settings-file", + "-s", + help="Path to the YAML settings file containing tenant credentials and configuration.", + ), +): + """ + Clone anti-spyware profile objects from a source SCM tenant to a destination SCM tenant. + + This Typer CLI command automates the process of retrieving anti-spyware profile objects + from a specified folder in a source tenant, optionally filters them out based + on user-defined exclusion criteria, and then creates them in a destination tenant. + + The workflow is: + 1. Load authentication and configuration settings (e.g., credentials, logging) from the YAML file. + 2. If any runtime flags are provided, they override the corresponding settings from the file. + 3. Authenticate to the source tenant and retrieve anti-spyware profile objects from the given folder. + 4. Display the retrieved source objects. If not auto-approved, prompt the user before proceeding. + 5. Authenticate to the destination tenant and create the retrieved objects there. + 6. If `--commit-and-push` is provided and objects were created successfully, commit the changes. + 7. Display the results, including successfully created objects and any errors. + + Args: + folder: The source folder from which to retrieve anti-spyware profile objects. + exclude_folders: Comma-separated folder names to exclude from source retrieval. + exclude_snippets: Comma-separated snippet names to exclude from source retrieval. + exclude_devices: Comma-separated device names to exclude from source retrieval. + commit_and_push: If True, commit changes in the destination tenant after creation. + auto_approve: If True or set in settings, skip the confirmation prompt before creating objects. + create_report: If True or set in settings, create/append a CSV file with task results. + dry_run: If True or set in settings, perform a dry run without applying changes (logic TBD). + quiet_mode: If True or set in settings, hide console output except log messages (logic TBD). + logging_level: If provided, override the logging level from settings.yaml. + settings_file: Path to the YAML settings file for loading authentication and configuration. + + Raises: + typer.Exit: Exits if authentication fails, retrieval fails, or if the user opts not to proceed. + """ + typer.echo("🚀 Starting anti-spyware profile objects cloning...") + + # Load settings from file + settings = load_settings(settings_file) + + # Apply fallback logic: if a flag wasn't provided at runtime, use settings.yaml values + auto_approve = settings["auto_approve"] if auto_approve is None else auto_approve + create_report = ( + settings["create_report"] if create_report is None else create_report + ) + dry_run = settings["dry_run"] if dry_run is None else dry_run + quiet_mode = settings["quiet"] if quiet_mode is None else quiet_mode + + # Logging level fallback + if logging_level is None: + logging_level = settings["logging"] + logging_level = logging_level.upper() + + logger = logging.getLogger(__name__) + logger.setLevel(getattr(logging, logging_level, logging.INFO)) + + # Parse CSV options + exclude_folders_list = parse_csv_option(exclude_folders) + exclude_snippets_list = parse_csv_option(exclude_snippets) + exclude_devices_list = parse_csv_option(exclude_devices) + + # Authenticate and retrieve from source + try: + source_creds = settings["source_scm"] + source_client = Scm( + client_id=source_creds["client_id"], + client_secret=source_creds["client_secret"], + tsg_id=source_creds["tenant"], + log_level=logging_level, + ) + logger.info(f"Authenticated with source SCM tenant: {source_creds['tenant']}") + except (AuthenticationError, KeyError) as e: + logger.error(f"Error authenticating with source tenant: {e}") + raise typer.Exit(code=1) + except Exception as e: + logger.error(f"Unexpected error with source authentication: {e}") + raise typer.Exit(code=1) + + # Retrieve anti-spyware profile objects from source + try: + source_profiles = AntiSpywareProfile(source_client, max_limit=5000) + profile_objects = source_profiles.list( + folder=folder, + exact_match=True, + exclude_folders=exclude_folders_list, + exclude_snippets=exclude_snippets_list, + exclude_devices=exclude_devices_list, + ) + logger.info( + f"Retrieved {len(profile_objects)} anti-spyware profile objects from source tenant folder '{folder}'." + ) + except Exception as e: + logger.error(f"Error retrieving anti-spyware profile objects from source: {e}") + raise typer.Exit(code=1) + + # Display retrieved profiles if not quiet_mode + if profile_objects and not quiet_mode: + profile_table = [ + [ + profile.name, + profile.folder, + len(profile.rules), + "Yes" if profile.cloud_inline_analysis else "No", + profile.description or "", + ] + for profile in profile_objects + ] + typer.echo( + tabulate( + profile_table, + headers=["Name", "Folder", "Rules", "Cloud Analysis", "Description"], + tablefmt="fancy_grid", + ) + ) + elif not profile_objects: + typer.echo("No anti-spyware profile objects found in the source folder.") + + # Prompt if not auto-approved and objects exist + if profile_objects and not auto_approve: + proceed = typer.confirm( + "Do you want to proceed with creating these objects in the destination tenant?" + ) + if not proceed: + typer.echo("Aborting cloning operation.") + raise typer.Exit(code=0) + + # Authenticate with destination tenant + try: + dest_creds = settings["destination_scm"] + destination_client = Scm( + client_id=dest_creds["client_id"], + client_secret=dest_creds["client_secret"], + tsg_id=dest_creds["tenant"], + log_level=logging_level, + ) + logger.info( + f"Authenticated with destination SCM tenant: {dest_creds['tenant']}" + ) + except (AuthenticationError, KeyError) as e: + logger.error(f"Error authenticating with destination tenant: {e}") + raise typer.Exit(code=1) + except Exception as e: + logger.error(f"Unexpected error with destination authentication: {e}") + raise typer.Exit(code=1) + + # Create anti-spyware profile objects in destination + destination_profiles = AntiSpywareProfile(destination_client, max_limit=5000) + created_objs: List[AntiSpywareProfileResponseModel] = [] + error_objects: List[List[str]] = [] + + for src_obj in profile_objects: + try: + create_params = build_create_params(src_obj, folder) + except ValueError as ve: + error_objects.append([src_obj.name, str(ve)]) + continue + + try: + new_obj = destination_profiles.create(create_params) + created_objs.append(new_obj) + logger.info( + f"Created anti-spyware profile object in destination: {new_obj.name}" + ) + except ( + InvalidObjectError, + MissingQueryParameterError, + NameNotUniqueError, + ObjectNotPresentError, + ) as e: + error_objects.append([src_obj.name, str(e)]) + continue + except Exception as e: + error_objects.append([src_obj.name, str(e)]) + continue + + # Display results if not quiet_mode + if created_objs and not quiet_mode: + typer.echo("\nSuccessfully created the following anti-spyware profile objects:") + created_table = [ + [ + obj.name, + obj.folder, + len(obj.rules), + "Yes" if obj.cloud_inline_analysis else "No", + obj.description or "", + ] + for obj in created_objs + ] + + typer.echo( + tabulate( + created_table, + headers=["Name", "Folder", "Rules", "Cloud Analysis", "Description"], + tablefmt="fancy_grid", + ) + ) + + if error_objects and not quiet_mode: + typer.echo("\nSome anti-spyware profile objects failed to be created:") + typer.echo( + tabulate( + error_objects, + headers=["Object Name", "Error"], + tablefmt="fancy_grid", + ) + ) + + # Commit changes if requested and objects were created + if commit_and_push and created_objs: + try: + commit_params = { + "folders": [folder], + "description": "Cloned anti-spyware profile objects", + "sync": True, + } + result = destination_profiles.commit(**commit_params) + job_status = destination_profiles.get_job_status(result.job_id) + logger.info( + f"Commit job ID {result.job_id} status: {job_status.data[0].status_str}" + ) + except Exception as e: + logger.error( + f"Error committing anti-spyware profile objects in destination: {e}" + ) + raise typer.Exit(code=1) + else: + if created_objs and not commit_and_push: + logger.info( + "Objects created, but --commit-and-push not specified, skipping commit." + ) + else: + logger.info( + "No new anti-spyware profile objects were created, skipping commit." + ) + + typer.echo("🎉 Anti-spyware profile objects cloning completed successfully! 🎉") diff --git a/scm_config_clone/commands/security/decryption_profile.py b/scm_config_clone/commands/security/decryption_profile.py new file mode 100644 index 0000000..f71fddf --- /dev/null +++ b/scm_config_clone/commands/security/decryption_profile.py @@ -0,0 +1,412 @@ +# scm_config_clone/commands/security/decryption_profile.py + +import logging +from typing import List, Optional, Any, Dict + +import typer +from scm.client import Scm +from scm.config.security import DecryptionProfile +from scm.exceptions import ( + AuthenticationError, + InvalidObjectError, + MissingQueryParameterError, + NameNotUniqueError, + ObjectNotPresentError, +) +from scm.models.security.decryption_profiles import ( + DecryptionProfileCreateModel, + DecryptionProfileResponseModel, +) +from tabulate import tabulate + +from scm_config_clone.utilities import load_settings, parse_csv_option + + +def build_create_params( + src_obj: DecryptionProfileResponseModel, folder: str +) -> Dict[str, Any]: + """ + Construct the dictionary of parameters required to create a new decryption profile object. + + Given an existing DecryptionProfileResponseModel (source object) and a target folder, + this function builds a dictionary with all necessary fields for creating + a new decryption profile in the destination tenant. It uses `model_dump` on a Pydantic model + to ensure only valid, explicitly set fields are included. + + Args: + src_obj: The DecryptionProfileResponseModel representing the source decryption profile object. + folder: The folder in the destination tenant where the object should be created. + + Returns: + A dictionary containing the fields required for `DecryptionProfile.create()`. + This dictionary is validated and pruned by DecryptionProfileCreateModel. + """ + data = { + "name": src_obj.name, + "folder": folder, + "ssl_protocol_settings": ( + src_obj.ssl_protocol_settings.model_dump() + if src_obj.ssl_protocol_settings + else None + ), + "ssl_forward_proxy": ( + src_obj.ssl_forward_proxy.model_dump() + if src_obj.ssl_forward_proxy + else None + ), + "ssl_inbound_proxy": ( + src_obj.ssl_inbound_proxy.model_dump() + if src_obj.ssl_inbound_proxy + else None + ), + "ssl_no_proxy": ( + src_obj.ssl_no_proxy.model_dump() if src_obj.ssl_no_proxy else None + ), + } + + create_model = DecryptionProfileCreateModel(**data) + return create_model.model_dump( + exclude_unset=True, + exclude_none=True, + ) + + +def decryption_profiles( + folder: Optional[str] = typer.Option( + None, + "--folder", + prompt="Please enter the folder name", + help="The folder to focus on when retrieving and cloning decryption profiles.", + ), + exclude_folders: str = typer.Option( + None, + "--exclude-folders", + help="Comma-separated list of folders to exclude from the retrieval.", + ), + exclude_snippets: str = typer.Option( + None, + "--exclude-snippets", + help="Comma-separated list of snippets to exclude from the retrieval.", + ), + exclude_devices: str = typer.Option( + None, + "--exclude-devices", + help="Comma-separated list of devices to exclude from the retrieval.", + ), + commit_and_push: bool = typer.Option( + False, + "--commit-and-push", + help="If set, commit the changes on the destination tenant after object creation.", + is_flag=True, + ), + auto_approve: bool = typer.Option( + None, + "--auto-approve", + "-A", + help="If set, skip the confirmation prompt and automatically proceed with creation.", + is_flag=True, + ), + create_report: bool = typer.Option( + None, + "--create-report", + "-R", + help="If set, create or append to a 'result.csv' file with the task results.", + is_flag=True, + ), + dry_run: bool = typer.Option( + None, + "--dry-run", + "-D", + help="If set, perform a dry run without applying any changes.", + is_flag=True, + ), + quiet_mode: bool = typer.Option( + None, + "--quiet-mode", + "-Q", + help="If set, hide all console output (except log messages).", + is_flag=True, + ), + logging_level: str = typer.Option( + None, + "--logging-level", + "-L", + help="Override the logging level (DEBUG, INFO, WARNING, ERROR, CRITICAL).", + ), + settings_file: str = typer.Option( + "settings.yaml", + "--settings-file", + "-s", + help="Path to the YAML settings file containing tenant credentials and configuration.", + ), +): + """ + Clone decryption profile objects from a source SCM tenant to a destination SCM tenant. + + This Typer CLI command automates the process of retrieving decryption profile objects + from a specified folder in a source tenant, optionally filters them out based + on user-defined exclusion criteria, and then creates them in a destination tenant. + + The workflow is: + 1. Load authentication and configuration settings (e.g., credentials, logging) from the YAML file. + 2. If any runtime flags are provided, they override the corresponding settings from the file. + 3. Authenticate to the source tenant and retrieve decryption profile objects from the given folder. + 4. Display the retrieved source objects. If not auto-approved, prompt the user before proceeding. + 5. Authenticate to the destination tenant and create the retrieved objects there. + 6. If `--commit-and-push` is provided and objects were created successfully, commit the changes. + 7. Display the results, including successfully created objects and any errors. + + Args: + folder: The source folder from which to retrieve decryption profile objects. + exclude_folders: Comma-separated folder names to exclude from source retrieval. + exclude_snippets: Comma-separated snippet names to exclude from source retrieval. + exclude_devices: Comma-separated device names to exclude from source retrieval. + commit_and_push: If True, commit changes in the destination tenant after creation. + auto_approve: If True or set in settings, skip the confirmation prompt before creating objects. + create_report: If True or set in settings, create/append a CSV file with task results. + dry_run: If True or set in settings, perform a dry run without applying changes (logic TBD). + quiet_mode: If True or set in settings, hide console output except log messages (logic TBD). + logging_level: If provided, override the logging level from settings.yaml. + settings_file: Path to the YAML settings file for loading authentication and configuration. + + Raises: + typer.Exit: Exits if authentication fails, retrieval fails, or if the user opts not to proceed. + """ + typer.echo("🚀 Starting decryption profile objects cloning...") + + # Load settings from file + settings = load_settings(settings_file) + + # Apply fallback logic: if a flag wasn't provided at runtime, use settings.yaml values + auto_approve = settings["auto_approve"] if auto_approve is None else auto_approve + create_report = ( + settings["create_report"] if create_report is None else create_report + ) + dry_run = settings["dry_run"] if dry_run is None else dry_run + quiet_mode = settings["quiet"] if quiet_mode is None else quiet_mode + + # Logging level fallback + if logging_level is None: + logging_level = settings["logging"] + logging_level = logging_level.upper() + + logger = logging.getLogger(__name__) + logger.setLevel(getattr(logging, logging_level, logging.INFO)) + + # Parse CSV options + exclude_folders_list = parse_csv_option(exclude_folders) + exclude_snippets_list = parse_csv_option(exclude_snippets) + exclude_devices_list = parse_csv_option(exclude_devices) + + # Authenticate and retrieve from source + try: + source_creds = settings["source_scm"] + source_client = Scm( + client_id=source_creds["client_id"], + client_secret=source_creds["client_secret"], + tsg_id=source_creds["tenant"], + log_level=logging_level, + ) + logger.info(f"Authenticated with source SCM tenant: {source_creds['tenant']}") + except (AuthenticationError, KeyError) as e: + logger.error(f"Error authenticating with source tenant: {e}") + raise typer.Exit(code=1) + except Exception as e: + logger.error(f"Unexpected error with source authentication: {e}") + raise typer.Exit(code=1) + + # Retrieve decryption profile objects from source + try: + source_profiles = DecryptionProfile(source_client, max_limit=5000) + profile_objects = source_profiles.list( + folder=folder, + exact_match=True, + exclude_folders=exclude_folders_list, + exclude_snippets=exclude_snippets_list, + exclude_devices=exclude_devices_list, + ) + logger.info( + f"Retrieved {len(profile_objects)} decryption profile objects from source tenant folder '{folder}'." + ) + except Exception as e: + logger.error(f"Error retrieving decryption profile objects from source: {e}") + raise typer.Exit(code=1) + + # Display retrieved profiles if not quiet_mode + if profile_objects and not quiet_mode: + profile_table = [] + for profile in profile_objects: + if profile.ssl_forward_proxy: + profile_type = "Forward Proxy" + elif profile.ssl_inbound_proxy: + profile_type = "Inbound Proxy" + elif profile.ssl_no_proxy: + profile_type = "No Proxy" + else: + profile_type = "Unknown" + + ssl_settings = [] + if profile.ssl_protocol_settings: + ssl_settings.extend( + [ + f"Min: {profile.ssl_protocol_settings.min_version}", + f"Max: {profile.ssl_protocol_settings.max_version}", + ] + ) + + profile_table.append( + [ + profile.name, + profile.folder, + profile_type, + ", ".join(ssl_settings) if ssl_settings else "Default Settings", + ] + ) + + typer.echo( + tabulate( + profile_table, + headers=["Name", "Folder", "Type", "SSL Settings"], + tablefmt="fancy_grid", + ) + ) + elif not profile_objects: + typer.echo("No decryption profile objects found in the source folder.") + + # Prompt if not auto-approved and objects exist + if profile_objects and not auto_approve: + proceed = typer.confirm( + "Do you want to proceed with creating these objects in the destination tenant?" + ) + if not proceed: + typer.echo("Aborting cloning operation.") + raise typer.Exit(code=0) + + # Authenticate with destination tenant + try: + dest_creds = settings["destination_scm"] + destination_client = Scm( + client_id=dest_creds["client_id"], + client_secret=dest_creds["client_secret"], + tsg_id=dest_creds["tenant"], + log_level=logging_level, + ) + logger.info( + f"Authenticated with destination SCM tenant: {dest_creds['tenant']}" + ) + except (AuthenticationError, KeyError) as e: + logger.error(f"Error authenticating with destination tenant: {e}") + raise typer.Exit(code=1) + except Exception as e: + logger.error(f"Unexpected error with destination authentication: {e}") + raise typer.Exit(code=1) + + # Create decryption profile objects in destination + destination_profiles = DecryptionProfile(destination_client, max_limit=5000) + created_objs: List[DecryptionProfileResponseModel] = [] + error_objects: List[List[str]] = [] + + for src_obj in profile_objects: + try: + create_params = build_create_params(src_obj, folder) + except ValueError as ve: + error_objects.append([src_obj.name, str(ve)]) + continue + + try: + new_obj = destination_profiles.create(create_params) + created_objs.append(new_obj) + logger.info( + f"Created decryption profile object in destination: {new_obj.name}" + ) + except ( + InvalidObjectError, + MissingQueryParameterError, + NameNotUniqueError, + ObjectNotPresentError, + ) as e: + error_objects.append([src_obj.name, str(e)]) + continue + except Exception as e: + error_objects.append([src_obj.name, str(e)]) + continue + + # Display results if not quiet_mode + if created_objs and not quiet_mode: + typer.echo("\nSuccessfully created the following decryption profile objects:") + created_table = [] + for obj in created_objs: + if obj.ssl_forward_proxy: + profile_type = "Forward Proxy" + elif obj.ssl_inbound_proxy: + profile_type = "Inbound Proxy" + elif obj.ssl_no_proxy: + profile_type = "No Proxy" + else: + profile_type = "Unknown" + + ssl_settings = [] + if obj.ssl_protocol_settings: + ssl_settings.extend( + [ + f"Min: {obj.ssl_protocol_settings.min_version}", + f"Max: {obj.ssl_protocol_settings.max_version}", + ] + ) + + created_table.append( + [ + obj.name, + obj.folder, + profile_type, + ", ".join(ssl_settings) if ssl_settings else "Default Settings", + ] + ) + + typer.echo( + tabulate( + created_table, + headers=["Name", "Folder", "Type", "SSL Settings"], + tablefmt="fancy_grid", + ) + ) + + if error_objects and not quiet_mode: + typer.echo("\nSome decryption profile objects failed to be created:") + typer.echo( + tabulate( + error_objects, + headers=["Object Name", "Error"], + tablefmt="fancy_grid", + ) + ) + + # Commit changes if requested and objects were created + if commit_and_push and created_objs: + try: + commit_params = { + "folders": [folder], + "description": "Cloned decryption profile objects", + "sync": True, + } + result = destination_profiles.commit(**commit_params) + job_status = destination_profiles.get_job_status(result.job_id) + logger.info( + f"Commit job ID {result.job_id} status: {job_status.data[0].status_str}" + ) + except Exception as e: + logger.error( + f"Error committing decryption profile objects in destination: {e}" + ) + raise typer.Exit(code=1) + else: + if created_objs and not commit_and_push: + logger.info( + "Objects created, but --commit-and-push not specified, skipping commit." + ) + else: + logger.info( + "No new decryption profile objects were created, skipping commit." + ) + + typer.echo("🎉 Decryption profile objects cloning completed successfully! 🎉") diff --git a/scm_config_clone/commands/security/dns_security_profile.py b/scm_config_clone/commands/security/dns_security_profile.py new file mode 100644 index 0000000..69ce5db --- /dev/null +++ b/scm_config_clone/commands/security/dns_security_profile.py @@ -0,0 +1,379 @@ +# scm_config_clone/commands/security/dns_security_profile.py + +import logging +from typing import List, Optional, Any, Dict + +import typer +from scm.client import Scm +from scm.config.security import DNSSecurityProfile +from scm.exceptions import ( + AuthenticationError, + InvalidObjectError, + MissingQueryParameterError, + NameNotUniqueError, + ObjectNotPresentError, +) +from scm.models.security import ( + DNSSecurityProfileCreateModel, + DNSSecurityProfileResponseModel, +) +from tabulate import tabulate + +from scm_config_clone.utilities import load_settings, parse_csv_option + + +def build_create_params( + src_obj: DNSSecurityProfileResponseModel, folder: str +) -> Dict[str, Any]: + """ + Construct the dictionary of parameters required to create a new DNS security profile object. + + Given an existing DNSSecurityProfileResponseModel (source object) and a target folder, + this function builds a dictionary with all necessary fields for creating + a new profile in the destination tenant. It uses `model_dump` on a Pydantic model + to ensure only valid, explicitly set fields are included. + + Args: + src_obj: The DNSSecurityProfileResponseModel representing the source profile object. + folder: The folder in the destination tenant where the object should be created. + + Returns: + A dictionary containing the fields required for `DNSSecurityProfile.create()`. + This dictionary is validated and pruned by DNSSecurityProfileCreateModel. + """ + data = { + "name": src_obj.name, + "folder": folder, + "description": src_obj.description if src_obj.description is not None else None, + } + + if src_obj.botnet_domains: + data["botnet_domains"] = src_obj.botnet_domains.model_dump(exclude_unset=True) + + create_model = DNSSecurityProfileCreateModel(**data) + return create_model.model_dump( + exclude_unset=True, + exclude_none=True, + ) + + +def dns_security_profiles( + folder: Optional[str] = typer.Option( + None, + "--folder", + prompt="Please enter the folder name", + help="The folder to focus on when retrieving and cloning DNS security profiles.", + ), + exclude_folders: str = typer.Option( + None, + "--exclude-folders", + help="Comma-separated list of folders to exclude from the retrieval.", + ), + exclude_snippets: str = typer.Option( + None, + "--exclude-snippets", + help="Comma-separated list of snippets to exclude from the retrieval.", + ), + exclude_devices: str = typer.Option( + None, + "--exclude-devices", + help="Comma-separated list of devices to exclude from the retrieval.", + ), + commit_and_push: bool = typer.Option( + False, + "--commit-and-push", + help="If set, commit the changes on the destination tenant after object creation.", + is_flag=True, + ), + auto_approve: bool = typer.Option( + None, + "--auto-approve", + "-A", + help="If set, skip the confirmation prompt and automatically proceed with creation.", + is_flag=True, + ), + create_report: bool = typer.Option( + None, + "--create-report", + "-R", + help="If set, create or append to a 'result.csv' file with the task results.", + is_flag=True, + ), + dry_run: bool = typer.Option( + None, + "--dry-run", + "-D", + help="If set, perform a dry run without applying any changes.", + is_flag=True, + ), + quiet_mode: bool = typer.Option( + None, + "--quiet-mode", + "-Q", + help="If set, hide all console output (except log messages).", + is_flag=True, + ), + logging_level: str = typer.Option( + None, + "--logging-level", + "-L", + help="Override the logging level (DEBUG, INFO, WARNING, ERROR, CRITICAL).", + ), + settings_file: str = typer.Option( + "settings.yaml", + "--settings-file", + "-s", + help="Path to the YAML settings file containing tenant credentials and configuration.", + ), +): + """ + Clone DNS security profile objects from a source SCM tenant to a destination SCM tenant. + + This Typer CLI command automates the process of retrieving DNS security profile objects + from a specified folder in a source tenant, optionally filters them out based + on user-defined exclusion criteria, and then creates them in a destination tenant. + + The workflow is: + 1. Load authentication and configuration settings (e.g., credentials, logging) from the YAML file. + 2. If any runtime flags are provided, they override the corresponding settings from the file. + 3. Authenticate to the source tenant and retrieve DNS security profile objects from the given folder. + 4. Display the retrieved source objects. If not auto-approved, prompt the user before proceeding. + 5. Authenticate to the destination tenant and create the retrieved objects there. + 6. If `--commit-and-push` is provided and objects were created successfully, commit the changes. + 7. Display the results, including successfully created objects and any errors. + + Args: + folder: The source folder from which to retrieve DNS security profile objects. + exclude_folders: Comma-separated folder names to exclude from source retrieval. + exclude_snippets: Comma-separated snippet names to exclude from source retrieval. + exclude_devices: Comma-separated device names to exclude from source retrieval. + commit_and_push: If True, commit changes in the destination tenant after creation. + auto_approve: If True or set in settings, skip the confirmation prompt before creating objects. + create_report: If True or set in settings, create/append a CSV file with task results. + dry_run: If True or set in settings, perform a dry run without applying changes (logic TBD). + quiet_mode: If True or set in settings, hide console output except log messages (logic TBD). + logging_level: If provided, override the logging level from settings.yaml. + settings_file: Path to the YAML settings file for loading authentication and configuration. + + Raises: + typer.Exit: Exits if authentication fails, retrieval fails, or if the user opts not to proceed. + """ + typer.echo("🚀 Starting DNS security profile objects cloning...") + + # Load settings from file + settings = load_settings(settings_file) + + # Apply fallback logic: if a flag wasn't provided at runtime, use settings.yaml values + auto_approve = settings["auto_approve"] if auto_approve is None else auto_approve + create_report = ( + settings["create_report"] if create_report is None else create_report + ) + dry_run = settings["dry_run"] if dry_run is None else dry_run + quiet_mode = settings["quiet"] if quiet_mode is None else quiet_mode + + # Logging level fallback + if logging_level is None: + logging_level = settings["logging"] + logging_level = logging_level.upper() + + logger = logging.getLogger(__name__) + logger.setLevel(getattr(logging, logging_level, logging.INFO)) + + # Parse CSV options + exclude_folders_list = parse_csv_option(exclude_folders) + exclude_snippets_list = parse_csv_option(exclude_snippets) + exclude_devices_list = parse_csv_option(exclude_devices) + + # Authenticate and retrieve from source + try: + source_creds = settings["source_scm"] + source_client = Scm( + client_id=source_creds["client_id"], + client_secret=source_creds["client_secret"], + tsg_id=source_creds["tenant"], + log_level=logging_level, + ) + logger.info(f"Authenticated with source SCM tenant: {source_creds['tenant']}") + except (AuthenticationError, KeyError) as e: + logger.error(f"Error authenticating with source tenant: {e}") + raise typer.Exit(code=1) + except Exception as e: + logger.error(f"Unexpected error with source authentication: {e}") + raise typer.Exit(code=1) + + # Retrieve DNS security profile objects from source + try: + source_profiles = DNSSecurityProfile(source_client, max_limit=5000) + profile_objects = source_profiles.list( + folder=folder, + exact_match=True, + exclude_folders=exclude_folders_list, + exclude_snippets=exclude_snippets_list, + exclude_devices=exclude_devices_list, + ) + logger.info( + f"Retrieved {len(profile_objects)} DNS security profile objects from source tenant folder '{folder}'." + ) + except Exception as e: + logger.error(f"Error retrieving DNS security profile objects from source: {e}") + raise typer.Exit(code=1) + + # Display retrieved profiles if not quiet_mode + if profile_objects and not quiet_mode: + profile_table = [] + for profile in profile_objects: + categories = [] + if ( + profile.botnet_domains + and profile.botnet_domains.dns_security_categories + ): + categories = [ + f"{cat.name}({cat.action})" + for cat in profile.botnet_domains.dns_security_categories + ] + + profile_table.append( + [ + profile.name, + profile.folder, + ", ".join(categories) or "No categories", + profile.description or "", + ] + ) + + typer.echo( + tabulate( + profile_table, + headers=["Name", "Folder", "Categories", "Description"], + tablefmt="fancy_grid", + ) + ) + elif not profile_objects: + typer.echo("No DNS security profile objects found in the source folder.") + + # Prompt if not auto-approved and objects exist + if profile_objects and not auto_approve: + proceed = typer.confirm( + "Do you want to proceed with creating these objects in the destination tenant?" + ) + if not proceed: + typer.echo("Aborting cloning operation.") + raise typer.Exit(code=0) + + # Authenticate with destination tenant + try: + dest_creds = settings["destination_scm"] + destination_client = Scm( + client_id=dest_creds["client_id"], + client_secret=dest_creds["client_secret"], + tsg_id=dest_creds["tenant"], + log_level=logging_level, + ) + logger.info( + f"Authenticated with destination SCM tenant: {dest_creds['tenant']}" + ) + except (AuthenticationError, KeyError) as e: + logger.error(f"Error authenticating with destination tenant: {e}") + raise typer.Exit(code=1) + except Exception as e: + logger.error(f"Unexpected error with destination authentication: {e}") + raise typer.Exit(code=1) + + # Create DNS security profile objects in destination + destination_profiles = DNSSecurityProfile(destination_client, max_limit=5000) + created_objs: List[DNSSecurityProfileResponseModel] = [] + error_objects: List[List[str]] = [] + + for src_obj in profile_objects: + try: + create_params = build_create_params(src_obj, folder) + except ValueError as ve: + error_objects.append([src_obj.name, str(ve)]) + continue + + try: + new_obj = destination_profiles.create(create_params) + created_objs.append(new_obj) + logger.info( + f"Created DNS security profile object in destination: {new_obj.name}" + ) + except ( + InvalidObjectError, + MissingQueryParameterError, + NameNotUniqueError, + ObjectNotPresentError, + ) as e: + error_objects.append([src_obj.name, str(e)]) + continue + except Exception as e: + error_objects.append([src_obj.name, str(e)]) + continue + + # Display results if not quiet_mode + if created_objs and not quiet_mode: + typer.echo("\nSuccessfully created the following DNS security profile objects:") + created_table = [] + for obj in created_objs: + categories = [] + if obj.botnet_domains and obj.botnet_domains.dns_security_categories: + categories = [ + f"{cat.name}({cat.action})" + for cat in obj.botnet_domains.dns_security_categories + ] + + created_table.append( + [ + obj.name, + obj.folder, + ", ".join(categories) or "No categories", + obj.description or "", + ] + ) + + typer.echo( + tabulate( + created_table, + headers=["Name", "Folder", "Categories", "Description"], + tablefmt="fancy_grid", + ) + ) + + if error_objects and not quiet_mode: + typer.echo("\nSome DNS security profile objects failed to be created:") + typer.echo( + tabulate( + error_objects, + headers=["Object Name", "Error"], + tablefmt="fancy_grid", + ) + ) + + # Commit changes if requested and objects were created + if commit_and_push and created_objs: + try: + commit_params = { + "folders": [folder], + "description": "Cloned DNS security profile objects", + "sync": True, + } + result = destination_profiles.commit(**commit_params) + job_status = destination_profiles.get_job_status(result.job_id) + logger.info( + f"Commit job ID {result.job_id} status: {job_status.data[0].status_str}" + ) + except Exception as e: + logger.error( + f"Error committing DNS security profile objects in destination: {e}" + ) + raise typer.Exit(code=1) + else: + if created_objs and not commit_and_push: + logger.info( + "Objects created, but --commit-and-push not specified, skipping commit." + ) + else: + logger.info( + "No new DNS security profile objects were created, skipping commit." + ) + + typer.echo("🎉 DNS security profile objects cloning completed successfully! 🎉") diff --git a/scm_config_clone/commands/security/security_rule.py b/scm_config_clone/commands/security/security_rule.py new file mode 100644 index 0000000..a98af62 --- /dev/null +++ b/scm_config_clone/commands/security/security_rule.py @@ -0,0 +1,396 @@ +# scm_config_clone/commands/security/security_rule.py + +import logging +from typing import List, Optional, Any, Dict + +import typer +from scm.client import Scm +from scm.config.security import SecurityRule +from scm.exceptions import ( + AuthenticationError, + InvalidObjectError, + MissingQueryParameterError, + NameNotUniqueError, + ObjectNotPresentError, +) +from scm.models.security import SecurityRuleCreateModel, SecurityRuleResponseModel +from tabulate import tabulate + +from scm_config_clone.utilities import load_settings, parse_csv_option + + +def build_create_params( + src_obj: SecurityRuleResponseModel, folder: str +) -> Dict[str, Any]: + """ + Construct the dictionary of parameters required to create a new security rule. + + Given an existing SecurityRuleResponseModel (source object) and a target folder, + this function builds a dictionary with all necessary fields for creating + a new security rule in the destination tenant. It uses `model_dump` on a Pydantic model + to ensure only valid, explicitly set fields are included. + + Args: + src_obj: The SecurityRuleResponseModel representing the source security rule. + folder: The folder in the destination tenant where the object should be created. + + Returns: + A dictionary containing the fields required for `SecurityRule.create()`. + This dictionary is validated and pruned by SecurityRuleCreateModel. + """ + data = { + "name": src_obj.name, + "folder": folder, + "description": src_obj.description if src_obj.description is not None else None, + "tag": src_obj.tag if src_obj.tag else [], + "from_": src_obj.from_, + "source": src_obj.source, + "negate_source": src_obj.negate_source, + "source_user": src_obj.source_user, + "source_hip": src_obj.source_hip, + "to_": src_obj.to_, + "destination": src_obj.destination, + "negate_destination": src_obj.negate_destination, + "destination_hip": src_obj.destination_hip, + "application": src_obj.application, + "service": src_obj.service, + "category": src_obj.category, + "action": src_obj.action, + "profile_setting": src_obj.profile_setting, + "log_setting": src_obj.log_setting, + "schedule": src_obj.schedule, + "log_start": src_obj.log_start, + "log_end": src_obj.log_end, + "disabled": src_obj.disabled, + } + + create_model = SecurityRuleCreateModel(**data) + return create_model.model_dump( + exclude_unset=True, + exclude_none=True, + by_alias=True, + ) + + +def security_rules( + folder: Optional[str] = typer.Option( + None, + "--folder", + prompt="Please enter the folder name", + help="The folder to focus on when retrieving and cloning security rules.", + ), + rulebase: str = typer.Option( + "pre", + "--rulebase", + help="The rulebase to target (pre or post).", + ), + exclude_folders: str = typer.Option( + None, + "--exclude-folders", + help="Comma-separated list of folders to exclude from the retrieval.", + ), + exclude_snippets: str = typer.Option( + None, + "--exclude-snippets", + help="Comma-separated list of snippets to exclude from the retrieval.", + ), + exclude_devices: str = typer.Option( + None, + "--exclude-devices", + help="Comma-separated list of devices to exclude from the retrieval.", + ), + commit_and_push: bool = typer.Option( + False, + "--commit-and-push", + help="If set, commit the changes on the destination tenant after object creation.", + is_flag=True, + ), + auto_approve: bool = typer.Option( + None, + "--auto-approve", + "-A", + help="If set, skip the confirmation prompt and automatically proceed with creation.", + is_flag=True, + ), + create_report: bool = typer.Option( + None, + "--create-report", + "-R", + help="If set, create or append to a 'result.csv' file with the task results.", + is_flag=True, + ), + dry_run: bool = typer.Option( + None, + "--dry-run", + "-D", + help="If set, perform a dry run without applying any changes.", + is_flag=True, + ), + quiet_mode: bool = typer.Option( + None, + "--quiet-mode", + "-Q", + help="If set, hide all console output (except log messages).", + is_flag=True, + ), + logging_level: str = typer.Option( + None, + "--logging-level", + "-L", + help="Override the logging level (DEBUG, INFO, WARNING, ERROR, CRITICAL).", + ), + settings_file: str = typer.Option( + "settings.yaml", + "--settings-file", + "-s", + help="Path to the YAML settings file containing tenant credentials and configuration.", + ), +): + """ + Clone security rules from a source SCM tenant to a destination SCM tenant. + + This Typer CLI command automates the process of retrieving security rules + from a specified folder in a source tenant, optionally filters them out based + on user-defined exclusion criteria, and then creates them in a destination tenant. + + The workflow is: + 1. Load authentication and configuration settings (e.g., credentials, logging) from the YAML file. + 2. If any runtime flags are provided, they override the corresponding settings from the file. + 3. Authenticate to the source tenant and retrieve security rules from the given folder. + 4. Display the retrieved source objects. If not auto-approved, prompt the user before proceeding. + 5. Authenticate to the destination tenant and create the retrieved objects there. + 6. If `--commit-and-push` is provided and objects were created successfully, commit the changes. + 7. Display the results, including successfully created objects and any errors. + + Args: + folder: The source folder from which to retrieve security rules. + rulebase: The rulebase to target (pre or post). + exclude_folders: Comma-separated folder names to exclude from source retrieval. + exclude_snippets: Comma-separated snippet names to exclude from source retrieval. + exclude_devices: Comma-separated device names to exclude from source retrieval. + commit_and_push: If True, commit changes in the destination tenant after creation. + auto_approve: If True or set in settings, skip the confirmation prompt before creating objects. + create_report: If True or set in settings, create/append a CSV file with task results. + dry_run: If True or set in settings, perform a dry run without applying changes (logic TBD). + quiet_mode: If True or set in settings, hide console output except log messages (logic TBD). + logging_level: If provided, override the logging level from settings.yaml. + settings_file: Path to the YAML settings file for loading authentication and configuration. + + Raises: + typer.Exit: Exits if authentication fails, retrieval fails, or if the user opts not to proceed. + """ + typer.echo("🚀 Starting security rule cloning...") + + # Load settings from file + settings = load_settings(settings_file) + + # Apply fallback logic: if a flag wasn't provided at runtime, use settings.yaml values + auto_approve = settings["auto_approve"] if auto_approve is None else auto_approve + create_report = ( + settings["create_report"] if create_report is None else create_report + ) + dry_run = settings["dry_run"] if dry_run is None else dry_run + quiet_mode = settings["quiet"] if quiet_mode is None else quiet_mode + + # Logging level fallback + if logging_level is None: + logging_level = settings["logging"] + logging_level = logging_level.upper() + + logger = logging.getLogger(__name__) + logger.setLevel(getattr(logging, logging_level, logging.INFO)) + + # Parse CSV options + exclude_folders_list = parse_csv_option(exclude_folders) + exclude_snippets_list = parse_csv_option(exclude_snippets) + exclude_devices_list = parse_csv_option(exclude_devices) + + # Authenticate and retrieve from source + try: + source_creds = settings["source_scm"] + source_client = Scm( + client_id=source_creds["client_id"], + client_secret=source_creds["client_secret"], + tsg_id=source_creds["tenant"], + log_level=logging_level, + ) + logger.info(f"Authenticated with source SCM tenant: {source_creds['tenant']}") + except (AuthenticationError, KeyError) as e: + logger.error(f"Error authenticating with source tenant: {e}") + raise typer.Exit(code=1) + except Exception as e: + logger.error(f"Unexpected error with source authentication: {e}") + raise typer.Exit(code=1) + + # Retrieve security rules from source + try: + source_rules = SecurityRule(source_client, max_limit=5000) + rule_objects = source_rules.list( + folder=folder, + rulebase=rulebase, + exact_match=True, + exclude_folders=exclude_folders_list, + exclude_snippets=exclude_snippets_list, + exclude_devices=exclude_devices_list, + ) + logger.info( + f"Retrieved {len(rule_objects)} security rules from source tenant folder '{folder}'." + ) + except Exception as e: + logger.error(f"Error retrieving security rules from source: {e}") + raise typer.Exit(code=1) + + # Display retrieved rules if not quiet_mode + if rule_objects and not quiet_mode: + rule_table = [ + [ + rule.name, + rule.folder, + rule.action, + ", ".join(rule.from_), + ", ".join(rule.to_), + ", ".join(rule.application), + rule.description or "", + ] + for rule in rule_objects + ] + typer.echo( + tabulate( + rule_table, + headers=[ + "Name", + "Folder", + "Action", + "From", + "To", + "Applications", + "Description", + ], + tablefmt="fancy_grid", + ) + ) + elif not rule_objects: + typer.echo("No security rules found in the source folder.") + + # Prompt if not auto-approved and objects exist + if rule_objects and not auto_approve: + proceed = typer.confirm( + "Do you want to proceed with creating these rules in the destination tenant?" + ) + if not proceed: + typer.echo("Aborting cloning operation.") + raise typer.Exit(code=0) + + # Authenticate with destination tenant + try: + dest_creds = settings["destination_scm"] + destination_client = Scm( + client_id=dest_creds["client_id"], + client_secret=dest_creds["client_secret"], + tsg_id=dest_creds["tenant"], + log_level=logging_level, + ) + logger.info( + f"Authenticated with destination SCM tenant: {dest_creds['tenant']}" + ) + except (AuthenticationError, KeyError) as e: + logger.error(f"Error authenticating with destination tenant: {e}") + raise typer.Exit(code=1) + except Exception as e: + logger.error(f"Unexpected error with destination authentication: {e}") + raise typer.Exit(code=1) + + # Create security rules in destination + destination_rules = SecurityRule(destination_client, max_limit=5000) + created_objs: List[SecurityRuleResponseModel] = [] + error_objects: List[List[str]] = [] + + for src_obj in rule_objects: + try: + create_params = build_create_params(src_obj, folder) + except ValueError as ve: + error_objects.append([src_obj.name, str(ve)]) + continue + + try: + new_obj = destination_rules.create(create_params, rulebase=rulebase) + created_objs.append(new_obj) + logger.info(f"Created security rule in destination: {new_obj.name}") + except ( + InvalidObjectError, + MissingQueryParameterError, + NameNotUniqueError, + ObjectNotPresentError, + ) as e: + error_objects.append([src_obj.name, str(e)]) + continue + except Exception as e: + error_objects.append([src_obj.name, str(e)]) + continue + + # Display results if not quiet_mode + if created_objs and not quiet_mode: + typer.echo("\nSuccessfully created the following security rules:") + created_table = [ + [ + rule.name, + rule.folder, + rule.action, + ", ".join(rule.from_), + ", ".join(rule.to_), + ", ".join(rule.application), + rule.description or "", + ] + for rule in created_objs + ] + typer.echo( + tabulate( + created_table, + headers=[ + "Name", + "Folder", + "Action", + "From", + "To", + "Applications", + "Description", + ], + tablefmt="fancy_grid", + ) + ) + + if error_objects and not quiet_mode: + typer.echo("\nSome security rules failed to be created:") + typer.echo( + tabulate( + error_objects, + headers=["Rule Name", "Error"], + tablefmt="fancy_grid", + ) + ) + + # Commit changes if requested and objects were created + if commit_and_push and created_objs: + try: + commit_params = { + "folders": [folder], + "description": "Cloned security rules", + "sync": True, + } + result = destination_rules.commit(**commit_params) + job_status = destination_rules.get_job_status(result.job_id) + logger.info( + f"Commit job ID {result.job_id} status: {job_status.data[0].status_str}" + ) + except Exception as e: + logger.error(f"Error committing security rules in destination: {e}") + raise typer.Exit(code=1) + else: + if created_objs and not commit_and_push: + logger.info( + "Rules created, but --commit-and-push not specified, skipping commit." + ) + else: + logger.info("No new security rules were created, skipping commit.") + + typer.echo("🎉 Security rule cloning completed successfully! 🎉") diff --git a/scm_config_clone/commands/security/url_category.py b/scm_config_clone/commands/security/url_category.py new file mode 100644 index 0000000..2571929 --- /dev/null +++ b/scm_config_clone/commands/security/url_category.py @@ -0,0 +1,357 @@ +# scm_config_clone/commands/security/url_categories.py + +import logging +from typing import List, Optional, Any, Dict + +import typer +from scm.client import Scm +from scm.config.security import URLCategories +from scm.exceptions import ( + AuthenticationError, + InvalidObjectError, + MissingQueryParameterError, + NameNotUniqueError, + ObjectNotPresentError, +) +from scm.models.security.url_categories import ( + URLCategoriesCreateModel, + URLCategoriesResponseModel, +) +from tabulate import tabulate + +from scm_config_clone.utilities import load_settings, parse_csv_option + + +def build_create_params( + src_obj: URLCategoriesResponseModel, folder: str +) -> Dict[str, Any]: + """ + Construct the dictionary of parameters required to create a new URL category object. + + Given an existing URLCategoriesResponseModel (source object) and a target folder, + this function builds a dictionary with all necessary fields for creating + a new URL category in the destination tenant. It uses `model_dump` on a Pydantic model + to ensure only valid, explicitly set fields are included. + + Args: + src_obj: The URLCategoriesResponseModel representing the source URL category object. + folder: The folder in the destination tenant where the object should be created. + + Returns: + A dictionary containing the fields required for `URLCategories.create()`. + This dictionary is validated and pruned by URLCategoriesCreateModel. + """ + data = { + "name": src_obj.name, + "folder": folder, + "description": src_obj.description if src_obj.description is not None else None, + "list": src_obj.list, + "type": src_obj.type, + } + + create_model = URLCategoriesCreateModel(**data) + return create_model.model_dump( + exclude_unset=True, + exclude_none=True, + ) + + +def url_categories( + folder: Optional[str] = typer.Option( + None, + "--folder", + prompt="Please enter the folder name", + help="The folder to focus on when retrieving and cloning URL categories.", + ), + exclude_folders: str = typer.Option( + None, + "--exclude-folders", + help="Comma-separated list of folders to exclude from the retrieval.", + ), + exclude_snippets: str = typer.Option( + None, + "--exclude-snippets", + help="Comma-separated list of snippets to exclude from the retrieval.", + ), + exclude_devices: str = typer.Option( + None, + "--exclude-devices", + help="Comma-separated list of devices to exclude from the retrieval.", + ), + commit_and_push: bool = typer.Option( + False, + "--commit-and-push", + help="If set, commit the changes on the destination tenant after object creation.", + is_flag=True, + ), + auto_approve: bool = typer.Option( + None, + "--auto-approve", + "-A", + help="If set, skip the confirmation prompt and automatically proceed with creation.", + is_flag=True, + ), + create_report: bool = typer.Option( + None, + "--create-report", + "-R", + help="If set, create or append to a 'result.csv' file with the task results.", + is_flag=True, + ), + dry_run: bool = typer.Option( + None, + "--dry-run", + "-D", + help="If set, perform a dry run without applying any changes.", + is_flag=True, + ), + quiet_mode: bool = typer.Option( + None, + "--quiet-mode", + "-Q", + help="If set, hide all console output (except log messages).", + is_flag=True, + ), + logging_level: str = typer.Option( + None, + "--logging-level", + "-L", + help="Override the logging level (DEBUG, INFO, WARNING, ERROR, CRITICAL).", + ), + settings_file: str = typer.Option( + "settings.yaml", + "--settings-file", + "-s", + help="Path to the YAML settings file containing tenant credentials and configuration.", + ), +): + """ + Clone URL category objects from a source SCM tenant to a destination SCM tenant. + + This Typer CLI command automates the process of retrieving URL category objects + from a specified folder in a source tenant, optionally filters them out based + on user-defined exclusion criteria, and then creates them in a destination tenant. + + The workflow is: + 1. Load authentication and configuration settings (e.g., credentials, logging) from the YAML file. + 2. If any runtime flags are provided, they override the corresponding settings from the file. + 3. Authenticate to the source tenant and retrieve URL category objects from the given folder. + 4. Display the retrieved source objects. If not auto-approved, prompt the user before proceeding. + 5. Authenticate to the destination tenant and create the retrieved objects there. + 6. If `--commit-and-push` is provided and objects were created successfully, commit the changes. + 7. Display the results, including successfully created objects and any errors. + + Args: + folder: The source folder from which to retrieve URL category objects. + exclude_folders: Comma-separated folder names to exclude from source retrieval. + exclude_snippets: Comma-separated snippet names to exclude from source retrieval. + exclude_devices: Comma-separated device names to exclude from source retrieval. + commit_and_push: If True, commit changes in the destination tenant after creation. + auto_approve: If True or set in settings, skip the confirmation prompt before creating objects. + create_report: If True or set in settings, create/append a CSV file with task results. + dry_run: If True or set in settings, perform a dry run without applying changes (logic TBD). + quiet_mode: If True or set in settings, hide console output except log messages (logic TBD). + logging_level: If provided, override the logging level from settings.yaml. + settings_file: Path to the YAML settings file for loading authentication and configuration. + + Raises: + typer.Exit: Exits if authentication fails, retrieval fails, or if the user opts not to proceed. + """ + typer.echo("🚀 Starting URL category objects cloning...") + + # Load settings from file + settings = load_settings(settings_file) + + # Apply fallback logic: if a flag wasn't provided at runtime, use settings.yaml values + auto_approve = settings["auto_approve"] if auto_approve is None else auto_approve + create_report = ( + settings["create_report"] if create_report is None else create_report + ) + dry_run = settings["dry_run"] if dry_run is None else dry_run + quiet_mode = settings["quiet"] if quiet_mode is None else quiet_mode + + # Logging level fallback + if logging_level is None: + logging_level = settings["logging"] + logging_level = logging_level.upper() + + logger = logging.getLogger(__name__) + logger.setLevel(getattr(logging, logging_level, logging.INFO)) + + # Parse CSV options + exclude_folders_list = parse_csv_option(exclude_folders) + exclude_snippets_list = parse_csv_option(exclude_snippets) + exclude_devices_list = parse_csv_option(exclude_devices) + + # Authenticate and retrieve from source + try: + source_creds = settings["source_scm"] + source_client = Scm( + client_id=source_creds["client_id"], + client_secret=source_creds["client_secret"], + tsg_id=source_creds["tenant"], + log_level=logging_level, + ) + logger.info(f"Authenticated with source SCM tenant: {source_creds['tenant']}") + except (AuthenticationError, KeyError) as e: + logger.error(f"Error authenticating with source tenant: {e}") + raise typer.Exit(code=1) + except Exception as e: + logger.error(f"Unexpected error with source authentication: {e}") + raise typer.Exit(code=1) + + # Retrieve URL category objects from source + try: + source_url_categories = URLCategories(source_client, max_limit=5000) + url_category_objects = source_url_categories.list( + folder=folder, + exact_match=True, + exclude_folders=exclude_folders_list, + exclude_snippets=exclude_snippets_list, + exclude_devices=exclude_devices_list, + ) + logger.info( + f"Retrieved {len(url_category_objects)} URL category objects from source tenant folder '{folder}'." + ) + except Exception as e: + logger.error(f"Error retrieving URL category objects from source: {e}") + raise typer.Exit(code=1) + + # Display retrieved categories if not quiet_mode + if url_category_objects and not quiet_mode: + category_table = [] + for category in url_category_objects: + category_table.append( + [ + category.name, + category.folder, + category.type, + ", ".join(category.list), + category.description or "", + ] + ) + + typer.echo( + tabulate( + category_table, + headers=["Name", "Folder", "Type", "URLs/Categories", "Description"], + tablefmt="fancy_grid", + ) + ) + elif not url_category_objects: + typer.echo("No URL category objects found in the source folder.") + + # Prompt if not auto-approved and objects exist + if url_category_objects and not auto_approve: + proceed = typer.confirm( + "Do you want to proceed with creating these objects in the destination tenant?" + ) + if not proceed: + typer.echo("Aborting cloning operation.") + raise typer.Exit(code=0) + + # Authenticate with destination tenant + try: + dest_creds = settings["destination_scm"] + destination_client = Scm( + client_id=dest_creds["client_id"], + client_secret=dest_creds["client_secret"], + tsg_id=dest_creds["tenant"], + log_level=logging_level, + ) + logger.info( + f"Authenticated with destination SCM tenant: {dest_creds['tenant']}" + ) + except (AuthenticationError, KeyError) as e: + logger.error(f"Error authenticating with destination tenant: {e}") + raise typer.Exit(code=1) + except Exception as e: + logger.error(f"Unexpected error with destination authentication: {e}") + raise typer.Exit(code=1) + + # Create URL category objects in destination + destination_url_categories = URLCategories(destination_client, max_limit=5000) + created_objs: List[URLCategoriesResponseModel] = [] + error_objects: List[List[str]] = [] + + for src_obj in url_category_objects: + try: + create_params = build_create_params(src_obj, folder) + except ValueError as ve: + error_objects.append([src_obj.name, str(ve)]) + continue + + try: + new_obj = destination_url_categories.create(create_params) + created_objs.append(new_obj) + logger.info(f"Created URL category object in destination: {new_obj.name}") + except ( + InvalidObjectError, + MissingQueryParameterError, + NameNotUniqueError, + ObjectNotPresentError, + ) as e: + error_objects.append([src_obj.name, str(e)]) + continue + except Exception as e: + error_objects.append([src_obj.name, str(e)]) + continue + + # Display results if not quiet_mode + if created_objs and not quiet_mode: + typer.echo("\nSuccessfully created the following URL category objects:") + created_table = [] + for obj in created_objs: + created_table.append( + [ + obj.name, + obj.folder, + obj.type, + ", ".join(obj.list), + obj.description or "", + ] + ) + + typer.echo( + tabulate( + created_table, + headers=["Name", "Folder", "Type", "URLs/Categories", "Description"], + tablefmt="fancy_grid", + ) + ) + + if error_objects and not quiet_mode: + typer.echo("\nSome URL category objects failed to be created:") + typer.echo( + tabulate( + error_objects, + headers=["Object Name", "Error"], + tablefmt="fancy_grid", + ) + ) + + # Commit changes if requested and objects were created + if commit_and_push and created_objs: + try: + commit_params = { + "folders": [folder], + "description": "Cloned URL category objects", + "sync": True, + } + result = destination_url_categories.commit(**commit_params) + job_status = destination_url_categories.get_job_status(result.job_id) + logger.info( + f"Commit job ID {result.job_id} status: {job_status.data[0].status_str}" + ) + except Exception as e: + logger.error(f"Error committing URL category objects in destination: {e}") + raise typer.Exit(code=1) + else: + if created_objs and not commit_and_push: + logger.info( + "Objects created, but --commit-and-push not specified, skipping commit." + ) + else: + logger.info("No new URL category objects were created, skipping commit.") + + typer.echo("🎉 URL category objects cloning completed successfully! 🎉") diff --git a/scm_config_clone/commands/security/vulnerability_protection_profile.py b/scm_config_clone/commands/security/vulnerability_protection_profile.py new file mode 100644 index 0000000..5b0a999 --- /dev/null +++ b/scm_config_clone/commands/security/vulnerability_protection_profile.py @@ -0,0 +1,377 @@ +# scm_config_clone/commands/security/vulnerability_protection_profile.py + +import logging +from typing import List, Optional, Any, Dict + +import typer +from scm.client import Scm +from scm.config.security import VulnerabilityProtectionProfile +from scm.exceptions import ( + AuthenticationError, + InvalidObjectError, + MissingQueryParameterError, + NameNotUniqueError, + ObjectNotPresentError, +) +from scm.models.security import ( + VulnerabilityProfileCreateModel, + VulnerabilityProfileResponseModel, +) +from tabulate import tabulate + +from scm_config_clone.utilities import load_settings, parse_csv_option + + +def build_create_params( + src_obj: VulnerabilityProfileResponseModel, folder: str +) -> Dict[str, Any]: + """ + Construct the dictionary of parameters required to create a new vulnerability protection profile. + + Given an existing VulnerabilityProfileResponseModel (source object) and a target folder, + this function builds a dictionary with all necessary fields for creating + a new profile in the destination tenant. It uses `model_dump` on a Pydantic model to ensure + only valid, explicitly set fields are included. + + Args: + src_obj: The VulnerabilityProfileResponseModel representing the source profile. + folder: The folder in the destination tenant where the object should be created. + + Returns: + A dictionary containing the fields required for `VulnerabilityProtectionProfile.create()`. + This dictionary is validated and pruned by VulnerabilityProfileCreateModel. + """ + data = { + "name": src_obj.name, + "folder": folder, + "description": src_obj.description if src_obj.description is not None else None, + "rules": src_obj.rules, + "threat_exception": ( + src_obj.threat_exception if src_obj.threat_exception else None + ), + } + + create_model = VulnerabilityProfileCreateModel(**data) + return create_model.model_dump( + exclude_unset=True, + exclude_none=True, + ) + + +def vulnerability_protection_profiles( + folder: Optional[str] = typer.Option( + None, + "--folder", + prompt="Please enter the folder name", + help="The folder to focus on when retrieving and cloning vulnerability protection profiles.", + ), + exclude_folders: str = typer.Option( + None, + "--exclude-folders", + help="Comma-separated list of folders to exclude from the retrieval.", + ), + exclude_snippets: str = typer.Option( + None, + "--exclude-snippets", + help="Comma-separated list of snippets to exclude from the retrieval.", + ), + exclude_devices: str = typer.Option( + None, + "--exclude-devices", + help="Comma-separated list of devices to exclude from the retrieval.", + ), + commit_and_push: bool = typer.Option( + False, + "--commit-and-push", + help="If set, commit the changes on the destination tenant after object creation.", + is_flag=True, + ), + auto_approve: bool = typer.Option( + None, + "--auto-approve", + "-A", + help="If set, skip the confirmation prompt and automatically proceed with creation.", + is_flag=True, + ), + create_report: bool = typer.Option( + None, + "--create-report", + "-R", + help="If set, create or append to a 'result.csv' file with the task results.", + is_flag=True, + ), + dry_run: bool = typer.Option( + None, + "--dry-run", + "-D", + help="If set, perform a dry run without applying any changes.", + is_flag=True, + ), + quiet_mode: bool = typer.Option( + None, + "--quiet-mode", + "-Q", + help="If set, hide all console output (except log messages).", + is_flag=True, + ), + logging_level: str = typer.Option( + None, + "--logging-level", + "-L", + help="Override the logging level (DEBUG, INFO, WARNING, ERROR, CRITICAL).", + ), + settings_file: str = typer.Option( + "settings.yaml", + "--settings-file", + "-s", + help="Path to the YAML settings file containing tenant credentials and configuration.", + ), +): + """ + Clone vulnerability protection profile objects from a source SCM tenant to a destination SCM tenant. + + This Typer CLI command automates the process of retrieving vulnerability protection profile objects + from a specified folder in a source tenant, optionally filters them out based + on user-defined exclusion criteria, and then creates them in a destination tenant. + + The workflow is: + 1. Load authentication and configuration settings (e.g., credentials, logging) from the YAML file. + 2. If any runtime flags are provided, they override the corresponding settings from the file. + 3. Authenticate to the source tenant and retrieve vulnerability protection profile objects from the given folder. + 4. Display the retrieved source objects. If not auto-approved, prompt the user before proceeding. + 5. Authenticate to the destination tenant and create the retrieved objects there. + 6. If `--commit-and-push` is provided and objects were created successfully, commit the changes. + 7. Display the results, including successfully created objects and any errors. + + Args: + folder: The source folder from which to retrieve vulnerability protection profile objects. + exclude_folders: Comma-separated folder names to exclude from source retrieval. + exclude_snippets: Comma-separated snippet names to exclude from source retrieval. + exclude_devices: Comma-separated device names to exclude from source retrieval. + commit_and_push: If True, commit changes in the destination tenant after creation. + auto_approve: If True or set in settings, skip the confirmation prompt before creating objects. + create_report: If True or set in settings, create/append a CSV file with task results. + dry_run: If True or set in settings, perform a dry run without applying changes (logic TBD). + quiet_mode: If True or set in settings, hide console output except log messages (logic TBD). + logging_level: If provided, override the logging level from settings.yaml. + settings_file: Path to the YAML settings file for loading authentication and configuration. + + Raises: + typer.Exit: Exits if authentication fails, retrieval fails, or if the user opts not to proceed. + """ + typer.echo("🚀 Starting vulnerability protection profile objects cloning...") + + # Load settings from file + settings = load_settings(settings_file) + + # Apply fallback logic: if a flag wasn't provided at runtime, use settings.yaml values + auto_approve = settings["auto_approve"] if auto_approve is None else auto_approve + create_report = ( + settings["create_report"] if create_report is None else create_report + ) + dry_run = settings["dry_run"] if dry_run is None else dry_run + quiet_mode = settings["quiet"] if quiet_mode is None else quiet_mode + + # Logging level fallback + if logging_level is None: + logging_level = settings["logging"] + logging_level = logging_level.upper() + + logger = logging.getLogger(__name__) + logger.setLevel(getattr(logging, logging_level, logging.INFO)) + + # Parse CSV options + exclude_folders_list = parse_csv_option(exclude_folders) + exclude_snippets_list = parse_csv_option(exclude_snippets) + exclude_devices_list = parse_csv_option(exclude_devices) + + # Authenticate and retrieve from source + try: + source_creds = settings["source_scm"] + source_client = Scm( + client_id=source_creds["client_id"], + client_secret=source_creds["client_secret"], + tsg_id=source_creds["tenant"], + log_level=logging_level, + ) + logger.info(f"Authenticated with source SCM tenant: {source_creds['tenant']}") + except (AuthenticationError, KeyError) as e: + logger.error(f"Error authenticating with source tenant: {e}") + raise typer.Exit(code=1) + except Exception as e: + logger.error(f"Unexpected error with source authentication: {e}") + raise typer.Exit(code=1) + + # Retrieve vulnerability protection profile objects from source + try: + source_profiles = VulnerabilityProtectionProfile(source_client, max_limit=5000) + profile_objects = source_profiles.list( + folder=folder, + exact_match=True, + exclude_folders=exclude_folders_list, + exclude_snippets=exclude_snippets_list, + exclude_devices=exclude_devices_list, + ) + logger.info( + f"Retrieved {len(profile_objects)} vulnerability protection profile objects from source tenant folder '{folder}'." + ) + except Exception as e: + logger.error( + f"Error retrieving vulnerability protection profile objects from source: {e}" + ) + raise typer.Exit(code=1) + + # Display retrieved profiles if not quiet_mode + if profile_objects and not quiet_mode: + profile_table = [] + for profile in profile_objects: + profile_table.append( + [ + profile.name, + profile.folder, + len(profile.rules), + len(profile.threat_exception) if profile.threat_exception else 0, + profile.description or "", + ] + ) + + typer.echo( + tabulate( + profile_table, + headers=["Name", "Folder", "Rules", "Exceptions", "Description"], + tablefmt="fancy_grid", + ) + ) + elif not profile_objects: + typer.echo( + "No vulnerability protection profile objects found in the source folder." + ) + + # Prompt if not auto-approved and objects exist + if profile_objects and not auto_approve: + proceed = typer.confirm( + "Do you want to proceed with creating these objects in the destination tenant?" + ) + if not proceed: + typer.echo("Aborting cloning operation.") + raise typer.Exit(code=0) + + # Authenticate with destination tenant + try: + dest_creds = settings["destination_scm"] + destination_client = Scm( + client_id=dest_creds["client_id"], + client_secret=dest_creds["client_secret"], + tsg_id=dest_creds["tenant"], + log_level=logging_level, + ) + logger.info( + f"Authenticated with destination SCM tenant: {dest_creds['tenant']}" + ) + except (AuthenticationError, KeyError) as e: + logger.error(f"Error authenticating with destination tenant: {e}") + raise typer.Exit(code=1) + except Exception as e: + logger.error(f"Unexpected error with destination authentication: {e}") + raise typer.Exit(code=1) + + # Create vulnerability protection profile objects in destination + destination_profiles = VulnerabilityProtectionProfile( + destination_client, max_limit=5000 + ) + created_objs: List[VulnerabilityProfileResponseModel] = [] + error_objects: List[List[str]] = [] + + for src_obj in profile_objects: + try: + create_params = build_create_params(src_obj, folder) + except ValueError as ve: + error_objects.append([src_obj.name, str(ve)]) + continue + + try: + new_obj = destination_profiles.create(create_params) + created_objs.append(new_obj) + logger.info( + f"Created vulnerability protection profile object in destination: {new_obj.name}" + ) + except ( + InvalidObjectError, + MissingQueryParameterError, + NameNotUniqueError, + ObjectNotPresentError, + ) as e: + error_objects.append([src_obj.name, str(e)]) + continue + except Exception as e: + error_objects.append([src_obj.name, str(e)]) + continue + + # Display results if not quiet_mode + if created_objs and not quiet_mode: + typer.echo( + "\nSuccessfully created the following vulnerability protection profile objects:" + ) + created_table = [] + for obj in created_objs: + created_table.append( + [ + obj.name, + obj.folder, + len(obj.rules), + len(obj.threat_exception) if obj.threat_exception else 0, + obj.description or "", + ] + ) + + typer.echo( + tabulate( + created_table, + headers=["Name", "Folder", "Rules", "Exceptions", "Description"], + tablefmt="fancy_grid", + ) + ) + + if error_objects and not quiet_mode: + typer.echo( + "\nSome vulnerability protection profile objects failed to be created:" + ) + typer.echo( + tabulate( + error_objects, + headers=["Object Name", "Error"], + tablefmt="fancy_grid", + ) + ) + + # Commit changes if requested and objects were created + if commit_and_push and created_objs: + try: + commit_params = { + "folders": [folder], + "description": "Cloned vulnerability protection profile objects", + "sync": True, + } + result = destination_profiles.commit(**commit_params) + job_status = destination_profiles.get_job_status(result.job_id) + logger.info( + f"Commit job ID {result.job_id} status: {job_status.data[0].status_str}" + ) + except Exception as e: + logger.error( + f"Error committing vulnerability protection profile objects in destination: {e}" + ) + raise typer.Exit(code=1) + else: + if created_objs and not commit_and_push: + logger.info( + "Objects created, but --commit-and-push not specified, skipping commit." + ) + else: + logger.info( + "No new vulnerability protection profile objects were created, skipping commit." + ) + + typer.echo( + "🎉 Vulnerability protection profile objects cloning completed successfully! 🎉" + ) diff --git a/scm_config_clone/commands/security/wildfire_antivirus_profile.py b/scm_config_clone/commands/security/wildfire_antivirus_profile.py new file mode 100644 index 0000000..663442b --- /dev/null +++ b/scm_config_clone/commands/security/wildfire_antivirus_profile.py @@ -0,0 +1,371 @@ +# scm_config_clone/commands/security/wildfire_antivirus_profile.py + +import logging +from typing import List, Optional, Any, Dict + +import typer +from scm.client import Scm +from scm.config.security import WildfireAntivirusProfile +from scm.exceptions import ( + AuthenticationError, + InvalidObjectError, + MissingQueryParameterError, + NameNotUniqueError, + ObjectNotPresentError, +) +from scm.models.security.wildfire_antivirus_profiles import ( + WildfireAvProfileCreateModel, + WildfireAvProfileResponseModel, +) +from tabulate import tabulate + +from scm_config_clone.utilities import load_settings, parse_csv_option + + +def build_create_params( + src_obj: WildfireAvProfileResponseModel, folder: str +) -> Dict[str, Any]: + """ + Construct the dictionary of parameters required to create a new WildFire antivirus profile. + + Given an existing WildfireAvProfileResponseModel (source object) and a target folder, + this function builds a dictionary with all necessary fields for creating + a new profile in the destination tenant. It uses `model_dump` on a Pydantic model + to ensure only valid, explicitly set fields are included. + + Args: + src_obj: The WildfireAvProfileResponseModel representing the source profile. + folder: The folder in the destination tenant where the object should be created. + + Returns: + A dictionary containing the fields required for `WildfireAntivirusProfile.create()`. + This dictionary is validated and pruned by WildfireAvProfileCreateModel. + """ + data = { + "name": src_obj.name, + "folder": folder, + "description": src_obj.description if src_obj.description is not None else None, + "packet_capture": src_obj.packet_capture, + "rules": [rule.model_dump() for rule in src_obj.rules], + } + + if src_obj.mlav_exception: + data["mlav_exception"] = [exc.model_dump() for exc in src_obj.mlav_exception] + + if src_obj.threat_exception: + data["threat_exception"] = [ + exc.model_dump() for exc in src_obj.threat_exception + ] + + create_model = WildfireAvProfileCreateModel(**data) + return create_model.model_dump( + exclude_unset=True, + exclude_none=True, + ) + + +def wildfire_antivirus_profiles( + folder: Optional[str] = typer.Option( + None, + "--folder", + prompt="Please enter the folder name", + help="The folder to focus on when retrieving and cloning WildFire antivirus profiles.", + ), + exclude_folders: str = typer.Option( + None, + "--exclude-folders", + help="Comma-separated list of folders to exclude from the retrieval.", + ), + exclude_snippets: str = typer.Option( + None, + "--exclude-snippets", + help="Comma-separated list of snippets to exclude from the retrieval.", + ), + exclude_devices: str = typer.Option( + None, + "--exclude-devices", + help="Comma-separated list of devices to exclude from the retrieval.", + ), + commit_and_push: bool = typer.Option( + False, + "--commit-and-push", + help="If set, commit the changes on the destination tenant after object creation.", + is_flag=True, + ), + auto_approve: bool = typer.Option( + None, + "--auto-approve", + "-A", + help="If set, skip the confirmation prompt and automatically proceed with creation.", + is_flag=True, + ), + create_report: bool = typer.Option( + None, + "--create-report", + "-R", + help="If set, create or append to a 'result.csv' file with the task results.", + is_flag=True, + ), + dry_run: bool = typer.Option( + None, + "--dry-run", + "-D", + help="If set, perform a dry run without applying any changes.", + is_flag=True, + ), + quiet_mode: bool = typer.Option( + None, + "--quiet-mode", + "-Q", + help="If set, hide all console output (except log messages).", + is_flag=True, + ), + logging_level: str = typer.Option( + None, + "--logging-level", + "-L", + help="Override the logging level (DEBUG, INFO, WARNING, ERROR, CRITICAL).", + ), + settings_file: str = typer.Option( + "settings.yaml", + "--settings-file", + "-s", + help="Path to the YAML settings file containing tenant credentials and configuration.", + ), +): + """ + Clone WildFire antivirus profiles from a source SCM tenant to a destination SCM tenant. + + This Typer CLI command automates the process of retrieving WildFire antivirus profiles + from a specified folder in a source tenant, optionally filters them out based + on user-defined exclusion criteria, and then creates them in a destination tenant. + + The workflow is: + 1. Load authentication and configuration settings (e.g., credentials, logging) from the YAML file. + 2. If any runtime flags are provided, they override the corresponding settings from the file. + 3. Authenticate to the source tenant and retrieve profiles from the given folder. + 4. Display the retrieved source objects. If not auto-approved, prompt the user before proceeding. + 5. Authenticate to the destination tenant and create the retrieved objects there. + 6. If `--commit-and-push` is provided and objects were created successfully, commit the changes. + 7. Display the results, including successfully created objects and any errors. + + Args: + folder: The source folder from which to retrieve WildFire antivirus profiles. + exclude_folders: Comma-separated folder names to exclude from source retrieval. + exclude_snippets: Comma-separated snippet names to exclude from source retrieval. + exclude_devices: Comma-separated device names to exclude from source retrieval. + commit_and_push: If True, commit changes in the destination tenant after creation. + auto_approve: If True or set in settings, skip the confirmation prompt before creating objects. + create_report: If True or set in settings, create/append a CSV file with task results. + dry_run: If True or set in settings, perform a dry run without applying changes (logic TBD). + quiet_mode: If True or set in settings, hide console output except log messages (logic TBD). + logging_level: If provided, override the logging level from settings.yaml. + settings_file: Path to the YAML settings file for loading authentication and configuration. + + Raises: + typer.Exit: Exits if authentication fails, retrieval fails, or if the user opts not to proceed. + """ + typer.echo("🚀 Starting WildFire antivirus profiles cloning...") + + # Load settings from file + settings = load_settings(settings_file) + + # Apply fallback logic: if a flag wasn't provided at runtime, use settings.yaml values + auto_approve = settings["auto_approve"] if auto_approve is None else auto_approve + create_report = ( + settings["create_report"] if create_report is None else create_report + ) + dry_run = settings["dry_run"] if dry_run is None else dry_run + quiet_mode = settings["quiet"] if quiet_mode is None else quiet_mode + + # Logging level fallback + if logging_level is None: + logging_level = settings["logging"] + logging_level = logging_level.upper() + + logger = logging.getLogger(__name__) + logger.setLevel(getattr(logging, logging_level, logging.INFO)) + + # Parse CSV options + exclude_folders_list = parse_csv_option(exclude_folders) + exclude_snippets_list = parse_csv_option(exclude_snippets) + exclude_devices_list = parse_csv_option(exclude_devices) + + # Authenticate and retrieve from source + try: + source_creds = settings["source_scm"] + source_client = Scm( + client_id=source_creds["client_id"], + client_secret=source_creds["client_secret"], + tsg_id=source_creds["tenant"], + log_level=logging_level, + ) + logger.info(f"Authenticated with source SCM tenant: {source_creds['tenant']}") + except (AuthenticationError, KeyError) as e: + logger.error(f"Error authenticating with source tenant: {e}") + raise typer.Exit(code=1) + except Exception as e: + logger.error(f"Unexpected error with source authentication: {e}") + raise typer.Exit(code=1) + + # Retrieve profiles from source + try: + source_profiles = WildfireAntivirusProfile(source_client, max_limit=5000) + profile_objects = source_profiles.list( + folder=folder, + exact_match=True, + exclude_folders=exclude_folders_list, + exclude_snippets=exclude_snippets_list, + exclude_devices=exclude_devices_list, + ) + logger.info( + f"Retrieved {len(profile_objects)} WildFire antivirus profiles from source tenant folder '{folder}'." + ) + except Exception as e: + logger.error(f"Error retrieving WildFire antivirus profiles from source: {e}") + raise typer.Exit(code=1) + + # Display retrieved profiles if not quiet_mode + if profile_objects and not quiet_mode: + profile_table = [] + for profile in profile_objects: + profile_table.append( + [ + profile.name, + profile.folder, + len(profile.rules), + "Yes" if profile.packet_capture else "No", + profile.description or "", + ] + ) + + typer.echo( + tabulate( + profile_table, + headers=["Name", "Folder", "Rules", "Packet Capture", "Description"], + tablefmt="fancy_grid", + ) + ) + elif not profile_objects: + typer.echo("No WildFire antivirus profiles found in the source folder.") + + # Prompt if not auto-approved and objects exist + if profile_objects and not auto_approve: + proceed = typer.confirm( + "Do you want to proceed with creating these objects in the destination tenant?" + ) + if not proceed: + typer.echo("Aborting cloning operation.") + raise typer.Exit(code=0) + + # Authenticate with destination tenant + try: + dest_creds = settings["destination_scm"] + destination_client = Scm( + client_id=dest_creds["client_id"], + client_secret=dest_creds["client_secret"], + tsg_id=dest_creds["tenant"], + log_level=logging_level, + ) + logger.info( + f"Authenticated with destination SCM tenant: {dest_creds['tenant']}" + ) + except (AuthenticationError, KeyError) as e: + logger.error(f"Error authenticating with destination tenant: {e}") + raise typer.Exit(code=1) + except Exception as e: + logger.error(f"Unexpected error with destination authentication: {e}") + raise typer.Exit(code=1) + + # Create profiles in destination + destination_profiles = WildfireAntivirusProfile(destination_client, max_limit=5000) + created_objs: List[WildfireAvProfileResponseModel] = [] + error_objects: List[List[str]] = [] + + for src_obj in profile_objects: + try: + create_params = build_create_params(src_obj, folder) + except ValueError as ve: + error_objects.append([src_obj.name, str(ve)]) + continue + + try: + new_obj = destination_profiles.create(create_params) + created_objs.append(new_obj) + logger.info( + f"Created WildFire antivirus profile in destination: {new_obj.name}" + ) + except ( + InvalidObjectError, + MissingQueryParameterError, + NameNotUniqueError, + ObjectNotPresentError, + ) as e: + error_objects.append([src_obj.name, str(e)]) + continue + except Exception as e: + error_objects.append([src_obj.name, str(e)]) + continue + + # Display results if not quiet_mode + if created_objs and not quiet_mode: + typer.echo("\nSuccessfully created the following WildFire antivirus profiles:") + created_table = [] + for obj in created_objs: + created_table.append( + [ + obj.name, + obj.folder, + len(obj.rules), + "Yes" if obj.packet_capture else "No", + obj.description or "", + ] + ) + + typer.echo( + tabulate( + created_table, + headers=["Name", "Folder", "Rules", "Packet Capture", "Description"], + tablefmt="fancy_grid", + ) + ) + + if error_objects and not quiet_mode: + typer.echo("\nSome WildFire antivirus profiles failed to be created:") + typer.echo( + tabulate( + error_objects, + headers=["Object Name", "Error"], + tablefmt="fancy_grid", + ) + ) + + # Commit changes if requested and objects were created + if commit_and_push and created_objs: + try: + commit_params = { + "folders": [folder], + "description": "Cloned WildFire antivirus profiles", + "sync": True, + } + result = destination_profiles.commit(**commit_params) + job_status = destination_profiles.get_job_status(result.job_id) + logger.info( + f"Commit job ID {result.job_id} status: {job_status.data[0].status_str}" + ) + except Exception as e: + logger.error( + f"Error committing WildFire antivirus profiles in destination: {e}" + ) + raise typer.Exit(code=1) + else: + if created_objs and not commit_and_push: + logger.info( + "Objects created, but --commit-and-push not specified, skipping commit." + ) + else: + logger.info( + "No new WildFire antivirus profiles were created, skipping commit." + ) + + typer.echo("🎉 WildFire antivirus profiles cloning completed successfully! 🎉") diff --git a/scm_config_clone/main.py b/scm_config_clone/main.py index 2e6a437..cf39342 100644 --- a/scm_config_clone/main.py +++ b/scm_config_clone/main.py @@ -21,13 +21,20 @@ from scm_config_clone import ( addresses, address_groups, + anti_spyware_profiles, applications, application_filters, application_groups, + create_settings, + decryption_profiles, + dns_security_profiles, external_dynamic_lists, + security_rules, services, - create_settings, tags, + url_categories, + vulnerability_protection_profiles, + wildfire_antivirus_profiles, ) # Initialize Typer app @@ -112,6 +119,49 @@ # Security Services # --------------------------------------------------------------------------------------------------------------------- +# Anti-Spyware Profiles +app.command( + name="anti-spyware-profiles", + help="Clone anti-spyware profiles.", +)(anti_spyware_profiles) + + +# Decryption Profiles +app.command( + name="decryption-profiles", + help="Clone decryption profiles.", +)(decryption_profiles) + +# DNS Security Profiles +app.command( + name="dns-security-profiles", + help="Clone DNS Security profiles.", +)(dns_security_profiles) + +# Security Rules +app.command( + name="security-rules", + help="Clone security rules.", +)(security_rules) + +# URL Categories Rules +app.command( + name="url-categories", + help="Clone URL categories.", +)(url_categories) + +# Vulnerability Protection Profiles +app.command( + name="vulnerability-profiles", + help="Clone vulnerability protection profiles.", +)(vulnerability_protection_profiles) + +# Wildfire AV Profiles +app.command( + name="wildfire-profiles", + help="Clone Wildfire AV profiles.", +)(wildfire_antivirus_profiles) + if __name__ == "__main__": app() From b58099ad68fc4a685846bb7ff734303109de2493 Mon Sep 17 00:00:00 2001 From: Calvin Remsburg Date: Sun, 15 Dec 2024 16:45:32 -0600 Subject: [PATCH 17/18] Remove Docker documentation from SCM config clone user guide Deleted all Docker-related documentation files, including installation, commands, examples, getting started, and troubleshooting guides. Updated the remaining Python-specific documentation to reflect changes and streamline usage instructions. --- docs/index.md | 62 +++++-- docs/user-guide/docker/commands.md | 67 ------- docs/user-guide/docker/examples.md | 69 ------- docs/user-guide/docker/getting-started.md | 56 ------ docs/user-guide/docker/installation.md | 57 ------ docs/user-guide/docker/troubleshooting.md | 52 ------ docs/user-guide/introduction.md | 16 +- docs/user-guide/python/commands.md | 214 +++++++++++++++++++--- docs/user-guide/python/examples.md | 193 ++++++++++++++++--- docs/user-guide/python/getting-started.md | 134 ++++++++------ docs/user-guide/python/troubleshooting.md | 6 +- 11 files changed, 492 insertions(+), 434 deletions(-) delete mode 100644 docs/user-guide/docker/commands.md delete mode 100644 docs/user-guide/docker/examples.md delete mode 100644 docs/user-guide/docker/getting-started.md delete mode 100644 docs/user-guide/docker/installation.md delete mode 100644 docs/user-guide/docker/troubleshooting.md diff --git a/docs/index.md b/docs/index.md index b2eb88c..5c5fdd0 100644 --- a/docs/index.md +++ b/docs/index.md @@ -1,6 +1,6 @@ --- hide: - - navigation + - navigation ---