diff --git a/.github/dependabot.yml b/.github/dependabot.yml
new file mode 100644
index 0000000..bdb81a8
--- /dev/null
+++ b/.github/dependabot.yml
@@ -0,0 +1,10 @@
+# Please see the documentation for all configuration options:
+# https://help.github.com/github/administering-a-repository/configuration-options-for-dependency-updates
+version: 2
+updates:
+ # Maintain dependencies for GitHub Actions
+ - package-ecosystem: "github-actions"
+ directory: "/"
+ schedule:
+ # Check for updates to GitHub Actions every weekday
+ interval: "daily"
diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml
new file mode 100644
index 0000000..9b15cd1
--- /dev/null
+++ b/.github/workflows/test.yml
@@ -0,0 +1,60 @@
+name: Tests
+on:
+ workflow_dispatch:
+ pull_request:
+ push:
+ branches:
+ - main
+
+jobs:
+ static:
+ name: Tests
+ runs-on: ubuntu-latest
+ timeout-minutes: 10
+ concurrency: testing_environment
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+
+ - name: Python Setup
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.10"
+
+ - name: Terraform Setup
+ uses: hashicorp/setup-terraform@v3
+ with:
+ terraform_version: 1.10.5
+
+ - name: Tflint Setup
+ uses: terraform-linters/setup-tflint@v4
+
+ - name: Terraform Docs Setup
+ run: |
+ mkdir terraform-docs && cd terraform-docs
+ curl -sSLo terraform-docs.tar.gz https://terraform-docs.io/dl/v0.18.0/terraform-docs-v0.18.0-linux-amd64.tar.gz
+ tar -xzf terraform-docs.tar.gz
+ chmod +x terraform-docs
+ echo "$GITHUB_WORKSPACE/terraform-docs" >> $GITHUB_PATH
+
+ - name: Pre-commit Checks
+ uses: pre-commit/action@v3.0.1
+
+ - name: Terraform Init
+ run: terraform init -input=false -no-color
+
+ - name: Terraform Validate
+ run: terraform validate -no-color
+
+ - name: Webex Notification
+ if: always() && github.event_name != 'pull_request'
+ uses: qsnyder/action-wxt@master
+ env:
+ TOKEN: ${{ secrets.WEBEX_TOKEN }}
+ ROOMID: ${{ secrets.WEBEX_ROOM_ID }}
+ MESSAGE: |
+ [**[${{ job.status }}] ${{ github.repository }} #${{ github.run_number }}**](https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }})
+ * Commit: [${{ github.event.head_commit.message }}](${{ github.event.head_commit.url }})[${{ github.event.pull_request.title }}](${{ github.event.pull_request.html_url }})
+ * Author: ${{ github.event.sender.login }}
+ * Branch: ${{ github.ref }} ${{ github.head_ref }}
+ * Event: ${{ github.event_name }}
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..11a0827
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,7 @@
+*.log
+.terraform
+.terraform.lock.hcl
+terraform.tfstate
+terraform.tfstate.backup
+.envrc
+tflint.hcl
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
new file mode 100644
index 0000000..fb39da8
--- /dev/null
+++ b/.pre-commit-config.yaml
@@ -0,0 +1,17 @@
+---
+repos:
+ - repo: https://github.com/antonbabenko/pre-commit-terraform
+ rev: v1.62.3
+ hooks:
+ - id: terraform_fmt
+ args:
+ - --args=-recursive
+ - id: terraform_tflint
+
+ - repo: https://github.com/terraform-docs/terraform-docs
+ rev: v0.18.0
+ hooks:
+ - id: terraform-docs-system
+ args: ["./examples/fabric"]
+ - id: terraform-docs-system
+ args: ["."]
diff --git a/.terraform-docs.yml b/.terraform-docs.yml
new file mode 100644
index 0000000..fb95742
--- /dev/null
+++ b/.terraform-docs.yml
@@ -0,0 +1,41 @@
+version: ">= 0.14.0"
+
+formatter: markdown table
+
+content: |-
+ # Terraform Network-as-Code Cisco Hyperfabric Module
+
+ A Terraform module to configure Cisco Hyperfabric.
+
+ ## Usage
+
+ This module supports an inventory driven approach, where a complete Hyperfabric configuration or parts of it are either modeled in one or more YAML files or natively using Terraform variables.
+
+ ## Examples
+
+ Configuring a Fabric using YAML:
+
+ #### `fabric.yaml`
+
+ ```yaml
+ {{ include "./examples/fabric/fabric.yaml" }}
+ ```
+
+ #### `main.tf`
+
+ ```hcl
+ {{ include "./examples/fabric/main.tf" }}
+ ```
+
+ {{ .Requirements }}
+ {{ .Inputs }}
+ {{ .Outputs }}
+ {{ .Resources }}
+ {{ .Modules }}
+output:
+ file: README.md
+ mode: replace
+
+sort:
+ enabled: true
+ by: required
diff --git a/CHANGELOG.md b/CHANGELOG.md
new file mode 100644
index 0000000..d8fc569
--- /dev/null
+++ b/CHANGELOG.md
@@ -0,0 +1,3 @@
+## 0.1.0 (unreleased)
+
+- Initial release
diff --git a/LICENSE b/LICENSE
new file mode 100644
index 0000000..d9a10c0
--- /dev/null
+++ b/LICENSE
@@ -0,0 +1,176 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
diff --git a/README.md b/README.md
new file mode 100644
index 0000000..443a27d
--- /dev/null
+++ b/README.md
@@ -0,0 +1,80 @@
+
+# Terraform Network-as-Code Cisco Hyperfabric Module
+
+A Terraform module to configure Cisco Hyperfabric.
+
+## Usage
+
+This module supports an inventory driven approach, where a complete Hyperfabric configuration or parts of it are either modeled in one or more YAML files or natively using Terraform variables.
+
+## Examples
+
+Configuring a Fabric using YAML:
+
+#### `fabric.yaml`
+
+```yaml
+---
+hyperfabric:
+ fabrics:
+ - name: My Fabric 01
+ description: My first HyperFabric
+ address: 170 West Tasman Dr.
+ city: San Jose
+ country: USA
+ location: sj01-1-101-AAA01
+```
+
+#### `main.tf`
+
+```hcl
+module "hyperfabric" {
+ source = "netascode/nac-hyperfabric/hyperfabric"
+ version = ">= 0.1.0"
+
+ yaml_files = ["fabric.yaml"]
+}
+```
+
+## Requirements
+
+| Name | Version |
+|------|---------|
+| [terraform](#requirement\_terraform) | >= 1.8.0 |
+| [hyperfabric](#requirement\_hyperfabric) | >= 0.1.0 |
+| [local](#requirement\_local) | >= 2.3.0 |
+| [utils](#requirement\_utils) | >= 0.2.5 |
+## Inputs
+
+| Name | Description | Type | Default | Required |
+|------|-------------|------|---------|:--------:|
+| [model](#input\_model) | As an alternative to YAML files, a native Terraform data structure can be provided as well. | `map(any)` | `{}` | no |
+| [write\_default\_values\_file](#input\_write\_default\_values\_file) | Write all default values to a YAML file. Value is a path pointing to the file to be created. | `string` | `""` | no |
+| [yaml\_directories](#input\_yaml\_directories) | List of paths to YAML directories. | `list(string)` | `[]` | no |
+| [yaml\_files](#input\_yaml\_files) | List of paths to YAML files. | `list(string)` | `[]` | no |
+## Outputs
+
+| Name | Description |
+|------|-------------|
+| [default\_values](#output\_default\_values) | All default values. |
+| [model](#output\_model) | Full model. |
+## Resources
+
+| Name | Type |
+|------|------|
+| [hyperfabric_connection.connection](https://registry.terraform.io/providers/cisco-open/hyperfabric/latest/docs/resources/connection) | resource |
+| [hyperfabric_fabric.fabric](https://registry.terraform.io/providers/cisco-open/hyperfabric/latest/docs/resources/fabric) | resource |
+| [hyperfabric_node.node](https://registry.terraform.io/providers/cisco-open/hyperfabric/latest/docs/resources/node) | resource |
+| [hyperfabric_node_loopback.node_loopback](https://registry.terraform.io/providers/cisco-open/hyperfabric/latest/docs/resources/node_loopback) | resource |
+| [hyperfabric_node_management_port.node_management_port](https://registry.terraform.io/providers/cisco-open/hyperfabric/latest/docs/resources/node_management_port) | resource |
+| [hyperfabric_node_port.node_port](https://registry.terraform.io/providers/cisco-open/hyperfabric/latest/docs/resources/node_port) | resource |
+| [hyperfabric_node_sub_interface.node_sub_interface](https://registry.terraform.io/providers/cisco-open/hyperfabric/latest/docs/resources/node_sub_interface) | resource |
+| [hyperfabric_user.user](https://registry.terraform.io/providers/cisco-open/hyperfabric/latest/docs/resources/user) | resource |
+| [hyperfabric_vni.vni](https://registry.terraform.io/providers/cisco-open/hyperfabric/latest/docs/resources/vni) | resource |
+| [hyperfabric_vrf.vrf](https://registry.terraform.io/providers/cisco-open/hyperfabric/latest/docs/resources/vrf) | resource |
+| [local_sensitive_file.defaults](https://registry.terraform.io/providers/hashicorp/local/latest/docs/resources/sensitive_file) | resource |
+| [terraform_data.validation](https://registry.terraform.io/providers/hashicorp/terraform/latest/docs/resources/data) | resource |
+## Modules
+
+No modules.
+
\ No newline at end of file
diff --git a/defaults/defaults.yaml b/defaults/defaults.yaml
new file mode 100644
index 0000000..63eb7a5
--- /dev/null
+++ b/defaults/defaults.yaml
@@ -0,0 +1,16 @@
+defaults:
+ hyperfabric:
+ fabrics:
+ topology: MESH
+ nodes:
+ ports:
+ roles: [HOST_PORT]
+ enabled: true
+ sub_interfaces:
+ enabled: true
+ vnis:
+ svi:
+ enabled: true
+ users:
+ role: READ_ONLY
+ enabled: true
\ No newline at end of file
diff --git a/examples/fabric/.terraform-docs.yml b/examples/fabric/.terraform-docs.yml
new file mode 100644
index 0000000..47dfcb3
--- /dev/null
+++ b/examples/fabric/.terraform-docs.yml
@@ -0,0 +1,38 @@
+version: ">= 0.14.0"
+
+formatter: markdown table
+
+content: |-
+ # Hyperfabric Example
+
+ Set environment variables pointing to Hyperfabric:
+
+ ```bash
+ export HYPERFABRIC_TOKEN=abc123
+ ```
+
+ To run this example you need to execute:
+
+ ```bash
+ $ terraform init
+ $ terraform plan
+ $ terraform apply
+ ```
+
+ Note that this example will create resources. Resources can be destroyed with `terraform destroy`.
+
+ #### `fabric.yaml`
+
+ ```yaml
+ {{ include "./fabric.yaml" }}
+ ```
+
+ #### `main.tf`
+
+ ```hcl
+ {{ include "./main.tf" }}
+ ```
+
+output:
+ file: README.md
+ mode: replace
diff --git a/examples/fabric/README.md b/examples/fabric/README.md
new file mode 100644
index 0000000..18cfa24
--- /dev/null
+++ b/examples/fabric/README.md
@@ -0,0 +1,44 @@
+
+# Hyperfabric Example
+
+Set environment variables pointing to Hyperfabric:
+
+```bash
+export HYPERFABRIC_TOKEN=abc123
+```
+
+To run this example you need to execute:
+
+```bash
+$ terraform init
+$ terraform plan
+$ terraform apply
+```
+
+Note that this example will create resources. Resources can be destroyed with `terraform destroy`.
+
+#### `fabric.yaml`
+
+```yaml
+---
+hyperfabric:
+ fabrics:
+ - name: My Fabric 01
+ description: My first HyperFabric
+ address: 170 West Tasman Dr.
+ city: San Jose
+ country: USA
+ location: sj01-1-101-AAA01
+```
+
+#### `main.tf`
+
+```hcl
+module "hyperfabric" {
+ source = "netascode/nac-hyperfabric/hyperfabric"
+ version = ">= 0.1.0"
+
+ yaml_files = ["fabric.yaml"]
+}
+```
+
\ No newline at end of file
diff --git a/examples/fabric/fabric.yaml b/examples/fabric/fabric.yaml
new file mode 100644
index 0000000..0d5f127
--- /dev/null
+++ b/examples/fabric/fabric.yaml
@@ -0,0 +1,9 @@
+---
+hyperfabric:
+ fabrics:
+ - name: My Fabric 01
+ description: My first HyperFabric
+ address: 170 West Tasman Dr.
+ city: San Jose
+ country: USA
+ location: sj01-1-101-AAA01
diff --git a/examples/fabric/main.tf b/examples/fabric/main.tf
new file mode 100644
index 0000000..ebf2d85
--- /dev/null
+++ b/examples/fabric/main.tf
@@ -0,0 +1,6 @@
+module "hyperfabric" {
+ source = "netascode/nac-hyperfabric/hyperfabric"
+ version = ">= 0.1.0"
+
+ yaml_files = ["fabric.yaml"]
+}
diff --git a/examples/fabric/versions.tf b/examples/fabric/versions.tf
new file mode 100644
index 0000000..1e3f40b
--- /dev/null
+++ b/examples/fabric/versions.tf
@@ -0,0 +1,3 @@
+terraform {
+ required_version = ">= 1.8.0"
+}
diff --git a/hf_fabric.tf b/hf_fabric.tf
new file mode 100644
index 0000000..4b76e91
--- /dev/null
+++ b/hf_fabric.tf
@@ -0,0 +1,356 @@
+resource "hyperfabric_fabric" "fabric" {
+ for_each = { for fabric in try(local.hyperfabric.fabrics, []) : fabric.name => fabric }
+
+ name = each.key
+ description = try(each.value.description, local.defaults.hyperfabric.fabrics.description, null)
+ address = try(each.value.address, local.defaults.hyperfabric.fabrics.address, null)
+ city = try(each.value.city, local.defaults.hyperfabric.fabrics.city, null)
+ country = try(each.value.country, local.defaults.hyperfabric.fabrics.country, null)
+ location = try(each.value.location, local.defaults.hyperfabric.fabrics.location, null)
+ labels = try(each.value.labels, local.defaults.hyperfabric.fabrics.labels, null)
+ annotations = [for key, value in try(each.value.annotations, {}) : {
+ name = key
+ value = value
+ }]
+}
+
+locals {
+ fabric_nodes = flatten([
+ for fabric in try(local.hyperfabric.fabrics, []) : [
+ for node in try(fabric.nodes, []) : {
+ key = format("%s/%s", fabric.name, node.name)
+ fabric_id = hyperfabric_fabric.fabric[fabric.name].id
+ name = node.name
+ description = try(node.description, local.defaults.hyperfabric.fabrics.nodes.description, null)
+ model_name = try(node.model, local.defaults.hyperfabric.fabrics.nodes.model, null)
+ roles = try(node.roles, local.defaults.hyperfabric.fabrics.nodes.roles, null)
+ location = try(node.location, local.defaults.hyperfabric.fabrics.nodes.location, null)
+ labels = try(node.labels, local.defaults.hyperfabric.fabrics.nodes.labels, null)
+ annotations = [for key, value in try(node.annotations, {}) : {
+ name = key
+ value = value
+ }]
+ }
+ ]
+ ])
+}
+
+resource "hyperfabric_node" "node" {
+ for_each = { for node in local.fabric_nodes : node.key => node }
+
+ fabric_id = each.value.fabric_id
+ name = each.value.name
+ description = each.value.description
+ model_name = each.value.model_name
+ roles = each.value.roles
+ location = each.value.location
+ labels = each.value.labels
+ annotations = each.value.annotations
+}
+
+locals {
+ fabric_connections = flatten([
+ for fabric in try(local.hyperfabric.fabrics, []) : [
+ for connection in try(fabric.connections, []) : {
+ key = format("%s/%s/%s/%s/%s", fabric.name, connection.local_node, connection.local_port, connection.remote_node, connection.remote_port)
+ fabric_id = hyperfabric_fabric.fabric[fabric.name].id
+ description = try(connection.description, local.defaults.hyperfabric.fabrics.connections.description, null)
+ pluggable = try(connection.pluggable, local.defaults.hyperfabric.fabrics.connections.pluggable, null)
+ local = {
+ node_id = hyperfabric_node.node["${fabric.name}/${connection.local_node}"].node_id
+ port_name = connection.local_port
+ }
+ remote = {
+ node_id = hyperfabric_node.node["${fabric.name}/${connection.remote_node}"].node_id
+ port_name = connection.remote_port
+ }
+ }
+ ]
+ ])
+}
+
+resource "hyperfabric_connection" "connection" {
+ for_each = { for connection in local.fabric_connections : connection.key => connection }
+
+ fabric_id = each.value.fabric_id
+ description = each.value.description
+ pluggable = each.value.pluggable
+ local = each.value.local
+ remote = each.value.remote
+}
+
+locals {
+ fabric_nodes_management_port = flatten([
+ for fabric in try(local.hyperfabric.fabrics, []) : [
+ for node in try(fabric.nodes, []) : {
+ key = format("%s/%s", fabric.name, node.name)
+ node_id = hyperfabric_node.node[format("%s/%s", fabric.name, node.name)].node_id
+ name = try(node.management_port.name, local.defaults.hyperfabric.fabrics.nodes.management_port.name, null)
+ description = try(node.management_port.description, local.defaults.hyperfabric.fabrics.nodes.management_port.description, null)
+ cloud_urls = try(node.management_port.cloud_urls, local.defaults.hyperfabric.fabrics.nodes.management_port.cloud_urls, null)
+ ipv4_config_type = try(node.management_port.ipv4_address, null) != null ? "CONFIG_TYPE_STATIC" : "CONFIG_TYPE_DHCP"
+ ipv4_address = try(node.management_port.ipv4_address, local.defaults.hyperfabric.fabrics.nodes.management_port.ipv4_address, null)
+ ipv4_gateway = try(node.management_port.ipv4_gateway, local.defaults.hyperfabric.fabrics.nodes.management_port.ipv4_gateway, null)
+ ipv6_config_type = try(node.management_port.ipv6_address, null) != null ? "CONFIG_TYPE_STATIC" : "CONFIG_TYPE_DHCP"
+ ipv6_address = try(node.management_port.ipv6_address, local.defaults.hyperfabric.fabrics.nodes.management_port.ipv6_address, null)
+ ipv6_gateway = try(node.management_port.ipv6_gateway, local.defaults.hyperfabric.fabrics.nodes.management_port.ipv6_gateway, null)
+ dns_addresses = try(node.management_port.dns_addresses, local.defaults.hyperfabric.fabrics.nodes.management_port.dns_addresses, null)
+ ntp_addresses = try(node.management_port.ntp_addresses, local.defaults.hyperfabric.fabrics.nodes.management_port.ntp_addresses, null)
+ no_proxy = try(node.management_port.no_proxy, local.defaults.hyperfabric.fabrics.nodes.management_port.no_proxy, null)
+ proxy_address = try(node.management_port.proxy_address, local.defaults.hyperfabric.fabrics.nodes.management_port.proxy_address, null)
+ proxy_username = try(node.management_port.proxy_username, local.defaults.hyperfabric.fabrics.nodes.management_port.proxy_username, null)
+ proxy_password = try(node.management_port.proxy_password, local.defaults.hyperfabric.fabrics.nodes.management_port.proxy_password, null)
+ } if try(node.management_port, null) != null
+ ]
+ ])
+}
+
+resource "hyperfabric_node_management_port" "node_management_port" {
+ for_each = { for port in local.fabric_nodes_management_port : port.key => port }
+
+ node_id = each.value.node_id
+ name = each.value.name
+ description = each.value.description
+ cloud_urls = each.value.cloud_urls
+ ipv4_config_type = each.value.ipv4_config_type
+ ipv4_address = each.value.ipv4_address
+ ipv4_gateway = each.value.ipv4_gateway
+ ipv6_config_type = each.value.ipv6_config_type
+ ipv6_address = each.value.ipv6_address
+ ipv6_gateway = each.value.ipv6_gateway
+ dns_addresses = each.value.dns_addresses
+ ntp_addresses = each.value.ntp_addresses
+ no_proxy = each.value.no_proxy
+ proxy_address = each.value.proxy_address
+ proxy_username = each.value.proxy_username
+ proxy_password = each.value.proxy_password
+}
+
+locals {
+ fabric_vrfs = flatten([
+ for fabric in try(local.hyperfabric.fabrics, []) : [
+ for vrf in try(fabric.vrfs, []) : {
+ key = format("%s/%s", fabric.name, vrf.name)
+ fabric_id = hyperfabric_fabric.fabric[fabric.name].id
+ name = vrf.name
+ description = try(vrf.description, local.defaults.hyperfabric.fabrics.vrfs.description, null)
+ asn = try(vrf.asn, local.defaults.hyperfabric.fabrics.vrfs.asn, null)
+ vni = try(vrf.vni, local.defaults.hyperfabric.fabrics.vrfs.vni, null)
+ labels = try(vrf.labels, local.defaults.hyperfabric.fabrics.vrfs.labels, null)
+ annotations = [for key, value in try(vrf.annotations, {}) : {
+ name = key
+ value = value
+ }]
+ }
+ ]
+ ])
+}
+
+resource "hyperfabric_vrf" "vrf" {
+ for_each = { for vrf in local.fabric_vrfs : vrf.key => vrf }
+
+ fabric_id = each.value.fabric_id
+ name = each.value.name
+ description = each.value.description
+ asn = each.value.asn
+ vni = each.value.vni
+ labels = each.value.labels
+ annotations = each.value.annotations
+}
+
+locals {
+ fabric_node_loopbacks = flatten([
+ for fabric in try(local.hyperfabric.fabrics, []) : [
+ for node in try(fabric.nodes, []) : [
+ for loopback in try(node.loopbacks, []) : {
+ key = format("%s/%s/%s", fabric.name, node.name, loopback.name)
+ node_id = hyperfabric_node.node[node.name].node_id
+ name = loopback.name
+ description = try(loopback.description, local.defaults.hyperfabric.fabrics.nodes.loopbacks.description, null)
+ ipv4_address = try(loopback.ipv4_address, local.defaults.hyperfabric.fabrics.nodes.loopbacks.ipv4_address, null)
+ ipv6_address = try(loopback.ipv6_address, local.defaults.hyperfabric.fabrics.nodes.loopbacks.ipv6_address, null)
+ vrf_id = try(hyperfabric_vrf.vrf[loopback.vrf].vrf_id, null)
+ labels = try(loopback.labels, local.defaults.hyperfabric.fabrics.nodes.loopbacks.labels, null)
+ annotations = [for key, value in try(loopback.annotations, {}) : {
+ name = key
+ value = value
+ }]
+ }
+ ]
+ ]
+ ])
+}
+
+resource "hyperfabric_node_loopback" "node_loopback" {
+ for_each = { for loopback in local.fabric_node_loopbacks : loopback.key => loopback }
+
+ node_id = each.value.node_id
+ name = each.value.name
+ description = each.value.description
+ ipv4_address = each.value.ipv4_address
+ ipv6_address = each.value.ipv6_address
+ vrf_id = each.value.vrf_id
+ labels = each.value.labels
+ annotations = each.value.annotations
+}
+
+locals {
+ fabric_node_ports = flatten([
+ for fabric in try(local.hyperfabric.fabrics, []) : [
+ for node in try(fabric.nodes, []) : [
+ for port in try(node.ports, []) : {
+ key = format("%s/%s/%s", fabric.name, node.name, port.name)
+ node_id = hyperfabric_node.node[node.name].node_id
+ name = port.name
+ roles = try(port.roles, local.defaults.hyperfabric.fabrics.nodes.ports.roles, null)
+ description = try(port.description, local.defaults.hyperfabric.fabrics.nodes.ports.description, null)
+ enabled = try(port.enabled, local.defaults.hyperfabric.fabrics.nodes.ports.enabled, null)
+ ipv4_addresses = try(port.ipv4_addresses, local.defaults.hyperfabric.fabrics.nodes.ports.ipv4_addresses, null)
+ ipv6_addresses = try(port.ipv6_addresses, local.defaults.hyperfabric.fabrics.nodes.ports.ipv6_addresses, null)
+ prevent_forwarding = try(port.prevent_forwarding, local.defaults.hyperfabric.fabrics.nodes.ports.prevent_forwarding, null)
+ vrf_id = try(hyperfabric_vrf.vrf[port.vrf].vrf_id, null)
+ labels = try(port.labels, local.defaults.hyperfabric.fabrics.nodes.ports.labels, null)
+ annotations = [for key, value in try(port.annotations, {}) : {
+ name = key
+ value = value
+ }]
+ }
+ ]
+ ]
+ ])
+}
+
+resource "hyperfabric_node_port" "node_port" {
+ for_each = { for port in local.fabric_node_ports : port.key => port }
+
+ node_id = each.value.node_id
+ name = each.value.name
+ roles = each.value.roles
+ description = each.value.description
+ enabled = each.value.enabled
+ ipv4_addresses = each.value.ipv4_addresses
+ ipv6_addresses = each.value.ipv6_addresses
+ prevent_forwarding = each.value.prevent_forwarding
+ vrf_id = each.value.vrf_id
+ labels = each.value.labels
+ annotations = each.value.annotations
+}
+
+locals {
+ fabric_node_port_sub_interfaces = flatten([
+ for fabric in try(local.hyperfabric.fabrics, []) : [
+ for node in try(fabric.nodes, []) : [
+ for port in try(node.ports, []) : [
+ for sub in try(port.sub_interfaces, []) : {
+ key = format("%s/%s/%s/%s", fabric.name, node.name, port.name, sub.id)
+ node_id = hyperfabric_node.node[node.name].node_id
+ name = "${port.name}.${sub.id}"
+ description = try(sub.description, local.defaults.hyperfabric.fabrics.nodes.ports.sub_interfaces.description, null)
+ enabled = try(sub.enabled, local.defaults.hyperfabric.fabrics.nodes.ports.sub_interfaces.enabled, null)
+ ipv4_addresses = try(sub.ipv4_addresses, local.defaults.hyperfabric.fabrics.nodes.ports.sub_interfaces.ipv4_addresses, null)
+ ipv6_addresses = try(sub.ipv6_addresses, local.defaults.hyperfabric.fabrics.nodes.ports.sub_interfaces.ipv6_addresses, null)
+ vlan_id = try(sub.vlan_id, local.defaults.hyperfabric.fabrics.nodes.ports.sub_interfaces.vlan_id, null)
+ vrf_id = try(hyperfabric_vrf.vrf[sub.vrf].vrf_id, null)
+ labels = try(sub.labels, local.defaults.hyperfabric.fabrics.nodes.ports.sub_interfaces.labels, null)
+ annotations = [for key, value in try(sub.annotations, {}) : {
+ name = key
+ value = value
+ }]
+ }
+ ]
+ ]
+ ]
+ ])
+}
+
+resource "hyperfabric_node_sub_interface" "node_sub_interface" {
+ for_each = { for sub in local.fabric_node_port_sub_interfaces : sub.key => sub }
+
+ node_id = each.value.node_id
+ name = each.value.name
+ description = each.value.description
+ enabled = each.value.enabled
+ ipv4_addresses = each.value.ipv4_addresses
+ ipv6_addresses = each.value.ipv6_addresses
+ vlan_id = each.value.vlan_id
+ vrf_id = each.value.vrf_id
+ labels = each.value.labels
+ annotations = each.value.annotations
+}
+
+locals {
+ fabric_vnis = flatten([
+ for fabric in try(local.hyperfabric.fabrics, []) : [
+ for vni in try(fabric.vnis, []) : {
+ key = format("%s/%s", fabric.name, vni.name)
+ fabric_id = hyperfabric_fabric.fabric[fabric.name].id
+ name = vni.name
+ description = try(vni.description, local.defaults.hyperfabric.fabrics.vnis.description, null)
+ vni = try(vni.vni, local.defaults.hyperfabric.fabrics.vnis.vni, null)
+ vrf_id = try(hyperfabric_vrf.vrf[vni.vrf].vrf_id, null)
+ svi = {
+ ipv4_addresses = try(vni.svi.ipv4_addresses, local.defaults.hyperfabric.fabrics.vnis.svi.ipv4_addresses, null)
+ ipv6_addresses = try(vni.svi.ipv6_addresses, local.defaults.hyperfabric.fabrics.vnis.svi.ipv6_addresses, null)
+ enabled = try(vni.svi.enabled, local.defaults.hyperfabric.fabrics.vnis.svi.enabled, null)
+ }
+ members = flatten(concat(
+ [
+ for member in try(vni.members, []) : [
+ for node in try(member.nodes, []) : [
+ for port in try(member.ports, []) : {
+ node_id = hyperfabric_node.node[format("%s/%s", fabric.name, node)].node_id
+ port_name = port
+ vlan_id = try(member.vlan_id, null)
+ }
+ ]
+ ] if try(member.nodes, null) != null && try(member.ports, null) != null
+ ],
+ [
+ for member in try(vni.members, []) : [
+ for node in try(member.nodes, []) : {
+ node_id = hyperfabric_node.node[format("%s/%s", fabric.name, node)].id
+ port_name = try(member.port, "*")
+ vlan_id = try(member.vlan_id, null)
+ }
+ ] if try(member.nodes, null) != null && try(member.ports, null) == null
+ ],
+ [
+ for member in try(vni.members, []) : [
+ for port in try(member.ports, []) : {
+ node_id = try(hyperfabric_node.node[format("%s/%s", fabric.name, member.node)].node_id, "*")
+ port_name = port
+ vlan_id = try(member.vlan_id, null)
+ }
+ ] if try(member.nodes, null) == null && try(member.ports, null) != null
+ ],
+ [
+ for member in try(vni.members, []) : {
+ node_id = try(hyperfabric_node.node[format("%s/%s", fabric.name, member.node)].node_id, "*")
+ port_name = try(member.port, "*")
+ vlan_id = try(member.vlan_id, null)
+ } if try(member.nodes, null) == null && try(member.ports, null) == null
+ ]
+ ))
+ labels = try(vni.labels, local.defaults.hyperfabric.fabrics.vnis.labels, null)
+ annotations = [for key, value in try(vni.annotations, {}) : {
+ name = key
+ value = value
+ }]
+ }
+ ]
+ ])
+}
+
+resource "hyperfabric_vni" "vni" {
+ for_each = { for vni in local.fabric_vnis : vni.key => vni }
+
+ fabric_id = each.value.fabric_id
+ name = each.value.name
+ description = each.value.description
+ vni = each.value.vni
+ vrf_id = each.value.vrf_id
+ svi = each.value.svi
+ members = each.value.members
+ labels = each.value.labels
+ annotations = each.value.annotations
+}
\ No newline at end of file
diff --git a/hf_system.tf b/hf_system.tf
new file mode 100644
index 0000000..6e5a874
--- /dev/null
+++ b/hf_system.tf
@@ -0,0 +1,8 @@
+resource "hyperfabric_user" "user" {
+ for_each = { for user in try(local.hyperfabric.users, []) : user.email => user }
+
+ email = each.key
+ enabled = try(each.value.enabled, local.defaults.hyperfabric.users.enabled, null)
+ role = try(each.value.role, local.defaults.hyperfabric.users.role, null)
+ labels = try(each.value.labels, local.defaults.hyperfabric.users.labels, null)
+}
diff --git a/main.tf b/main.tf
new file mode 100644
index 0000000..654e607
--- /dev/null
+++ b/main.tf
@@ -0,0 +1,3 @@
+locals {
+ hyperfabric = try(local.model.hyperfabric, {})
+}
diff --git a/merge.tf b/merge.tf
new file mode 100644
index 0000000..0aaadd8
--- /dev/null
+++ b/merge.tf
@@ -0,0 +1,31 @@
+locals {
+ yaml_strings_directories = flatten([
+ for dir in var.yaml_directories : [
+ for file in fileset(".", "${dir}/*.{yml,yaml}") : file(file)
+ ]
+ ])
+ yaml_strings_files = [
+ for file in var.yaml_files : file(file)
+ ]
+ model_strings = length(keys(var.model)) != 0 ? [yamlencode(var.model)] : []
+ model_string = provider::utils::yaml_merge(concat(local.yaml_strings_directories, local.yaml_strings_files, local.model_strings))
+ model = yamldecode(local.model_string)
+ user_defaults = { "defaults" : try(local.model["defaults"], {}) }
+ defaults_string = provider::utils::yaml_merge([file("${path.module}/defaults/defaults.yaml"), yamlencode(local.user_defaults)])
+ defaults = yamldecode(local.defaults_string)["defaults"]
+}
+
+resource "terraform_data" "validation" {
+ lifecycle {
+ precondition {
+ condition = length(var.yaml_directories) != 0 || length(var.yaml_files) != 0 || length(keys(var.model)) != 0
+ error_message = "Either `yaml_directories`,`yaml_files` or a non-empty `model` value must be provided."
+ }
+ }
+}
+
+resource "local_sensitive_file" "defaults" {
+ count = var.write_default_values_file != "" ? 1 : 0
+ content = local.defaults_string
+ filename = var.write_default_values_file
+}
\ No newline at end of file
diff --git a/outputs.tf b/outputs.tf
new file mode 100644
index 0000000..8331c73
--- /dev/null
+++ b/outputs.tf
@@ -0,0 +1,10 @@
+
+output "default_values" {
+ description = "All default values."
+ value = local.defaults
+}
+
+output "model" {
+ description = "Full model."
+ value = local.model
+}
diff --git a/variables.tf b/variables.tf
new file mode 100644
index 0000000..ac749f3
--- /dev/null
+++ b/variables.tf
@@ -0,0 +1,23 @@
+variable "yaml_directories" {
+ description = "List of paths to YAML directories."
+ type = list(string)
+ default = []
+}
+
+variable "yaml_files" {
+ description = "List of paths to YAML files."
+ type = list(string)
+ default = []
+}
+
+variable "model" {
+ description = "As an alternative to YAML files, a native Terraform data structure can be provided as well."
+ type = map(any)
+ default = {}
+}
+
+variable "write_default_values_file" {
+ description = "Write all default values to a YAML file. Value is a path pointing to the file to be created."
+ type = string
+ default = ""
+}
diff --git a/versions.tf b/versions.tf
new file mode 100644
index 0000000..84fa000
--- /dev/null
+++ b/versions.tf
@@ -0,0 +1,18 @@
+terraform {
+ required_version = ">= 1.8.0"
+
+ required_providers {
+ hyperfabric = {
+ source = "cisco-open/hyperfabric"
+ version = ">= 0.1.0"
+ }
+ utils = {
+ source = "netascode/utils"
+ version = ">= 0.2.5"
+ }
+ local = {
+ source = "hashicorp/local"
+ version = ">= 2.3.0"
+ }
+ }
+}