Skip to content

Commit

Permalink
Merge pull request #473 from boozallen/469-bug-fixes
Browse files Browse the repository at this point in the history
[#469] fix breaking bugs before release
  • Loading branch information
ewilkins-csi authored Nov 18, 2024
2 parents 127164e + c47fbaa commit 87e5f11
Show file tree
Hide file tree
Showing 23 changed files with 114 additions and 176 deletions.
20 changes: 20 additions & 0 deletions bom/aissemble-quarkus-bom/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -28,8 +28,28 @@
<groupId>io.smallrye.reactive</groupId>
<artifactId>*</artifactId>
</exclusion>
<exclusion>
<groupId>org.scala-lang</groupId>
<artifactId>*</artifactId>
</exclusion>
</exclusions>
</dependency>
<!-- Downgrade Scala 2.13 (from Quarkus) to Scala 2.12 -->
<dependency>
<groupId>org.scala-lang</groupId>
<artifactId>scala-reflect</artifactId>
<version>${version.scala}</version>
</dependency>
<dependency>
<groupId>org.scala-lang</groupId>
<artifactId>scala-library</artifactId>
<version>${version.scala}</version>
</dependency>
<dependency>
<groupId>org.scala-lang</groupId>
<artifactId>scala-compiler</artifactId>
<version>${version.scala}</version>
</dependency>
</dependencies>
</dependencyManagement>

Expand Down
1 change: 1 addition & 0 deletions cucumber-report-aggregator/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,7 @@
<plugin>
<groupId>net.masterthought</groupId>
<artifactId>maven-cucumber-reporting</artifactId>
<version>${version.cucumber.reporting.plugin}</version>
<executions>
<execution>
<id>execution</id>
Expand Down
17 changes: 0 additions & 17 deletions extensions/extensions-metadata-service/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -112,23 +112,6 @@
<artifactId>extensions-data-delivery-spark</artifactId>
<version>${project.version}</version>
</dependency>

<!-- Downgrade Scala 2.13 (from Quarkus) to Scala 2.12 -->
<dependency>
<groupId>org.scala-lang</groupId>
<artifactId>scala-reflect</artifactId>
<version>${version.scala}</version>
</dependency>
<dependency>
<groupId>org.scala-lang</groupId>
<artifactId>scala-compiler</artifactId>
<version>${version.scala}</version>
</dependency>
<dependency>
<groupId>org.scala-lang</groupId>
<artifactId>scala-library</artifactId>
<version>${version.scala}</version>
</dependency>
<dependency>
<!-- Spark hasn't migrated to Jakarta packages yet. Will need to shade it if we want to fully migrate. -->
<!-- See https://github.com/apache/incubator-hugegraph-toolchain/issues/464 -->
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -64,30 +64,15 @@
<include>.tiltignore</include>
<include>devops/**</include>
<include>jenkinsPipelineSteps.groovy</include>
<include>.mvn/**</include>
</includes>
<excludes>
<exclude>mvnw</exclude>
<exclude>mvnw.cmd</exclude>
</excludes>
</fileSet>
<fileSet>
<directory>.mvn</directory>
<includes/>
</fileSet>
<fileSet filtered="false" encoding="UTF-8">
<directory/>
<includes>
<include>mvnw</include>
<include>mvnw.cmd</include>
<include>.mvn/**</include>
</includes>
<excludes>
<exclude>Tiltfile</exclude>
<exclude>.tiltignore</exclude>
<exclude>devops/**</exclude>
<exclude>jenkinsPipelineSteps.groovy</exclude>
<exclude>.mvn/**</exclude>
</excludes>
</fileSet>
</fileSets>

Expand All @@ -102,7 +87,7 @@
<fileSet filtered="true" encoding="UTF-8">
<directory>src/main/resources</directory>
<includes>
<include>**</include>
<include>*/**</include>
</includes>
</fileSet>
</fileSets>
Expand All @@ -116,7 +101,7 @@
<fileSet filtered="true" encoding="UTF-8">
<directory>src/main/resources</directory>
<includes>
<include>**</include>
<include>*/**</include>
</includes>
</fileSet>
</fileSets>
Expand All @@ -130,8 +115,11 @@
<fileSet filtered="true" encoding="UTF-8">
<directory/>
<includes>
<include>**</include>
<include>*/**</include>
</includes>
<excludes>
<exclude>pom.xml</exclude>
</excludes>
</fileSet>
</fileSets>
</module>
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,4 @@
# Convert this to a SealedSecret using kubeseal and replace it here
### See our guide for using SealedSecret's in your project to learn more:
## TODO: LINK-TO-GUIDE-HERE

apiVersion: v1
kind: Secret
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,6 @@ public class KubernetesGenerator extends AbstractKubernetesGenerator {
* | mlflowArgoCDV2 | deployment/argocd/v2/mlflow-ui.yaml.vm | templates/mlflow-ui.yaml |
* | pipelineInvocationServiceArgoCD-v2 | deployment/argocd/v2/pipeline.invocation.service.yaml.vm | templates/pipeline-invocation-service.yaml |
* | policyDecisionPointArgoCDV2 | deployment/argocd/v2/policy-decision-point.yaml.vm | templates/policy-decision-point.yaml |
* | sealedSecretArgoCDFile | deployment/argocd/v2/sealed.secret.yaml.vm | templates/sealed-secret.yaml |
* | sparkInfrastructureArgoCDFileV2 | deployment/argocd/v2/spark.infrastructure.yaml.vm | templates/spark-infrastructure.yaml |
* | sparkOperatorArgoCDV2 | deployment/argocd/v2/spark.operator.yaml.vm | templates/spark-operator.yaml |
* | versioningArgoCDV2 | deployment/argocd/v2/versioning.yaml.vm | templates/versioning.yaml |
Expand Down
9 changes: 0 additions & 9 deletions foundation/foundation-mda/src/main/resources/profiles.json
Original file line number Diff line number Diff line change
Expand Up @@ -316,9 +316,6 @@
},
{
"name": "sparkInfrastructureArgoCDFileV2"
},
{
"name": "sealedSecretArgoCDFile"
}
]
},
Expand Down Expand Up @@ -1206,9 +1203,6 @@
},
{
"name": "sparkInfrastructureArgoCD"
},
{
"name": "sealedSecretArgoCDFile"
}
]
},
Expand Down Expand Up @@ -2691,9 +2685,6 @@
},
{
"name": "mlflowArgoCDV2"
},
{
"name": "sealedSecretArgoCDFile"
}
]
}
Expand Down
8 changes: 0 additions & 8 deletions foundation/foundation-mda/src/main/resources/targets.json
Original file line number Diff line number Diff line change
Expand Up @@ -3029,14 +3029,6 @@
"metadataContext": "targeted",
"overwritable": false
},
{
"name": "sealedSecretArgoCDFile",
"templateName": "templates/deployment/argocd/v2/sealed.secret.yaml.vm",
"outputFile": "templates/sealed-secret.yaml",
"generator": "com.boozallen.aiops.mda.generator.KubernetesGenerator",
"metadataContext": "targeted",
"overwritable": false
},
{
"name": "sparkInfrastructureHelmChartFileV2",
"templateName": "templates/deployment/spark-infrastructure/v2/spark.infrastructure.chart.yaml.vm",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,18 +31,20 @@ import static org.apache.spark.sql.functions.lit;
*/
public abstract class ${record.capitalizedName}SchemaBase extends SparkSchema {

#set($columnVars = {})
#foreach ($field in $record.fields)
public static final String ${field.upperSnakecaseName}_COLUMN = "${field.sparkAttributes.columnName}";
#set ($columnVars[$field.name] = "${field.upperSnakecaseName}_COLUMN")
public static final String ${columnVars[$field.name]} = "${field.sparkAttributes.columnName}";
#end

public ${record.capitalizedName}SchemaBase() {
super();

#foreach ($field in $record.fields)
#if ($field.sparkAttributes.isDecimalType())
add(${field.upperSnakecaseName}_COLUMN, new ${field.shortType}(${field.sparkAttributes.defaultDecimalPrecision}, ${field.sparkAttributes.decimalScale}), ${field.sparkAttributes.isNullable()}, "${field.description}");
add(${columnVars[$field.name]}, new ${field.shortType}(${field.sparkAttributes.defaultDecimalPrecision}, ${field.sparkAttributes.decimalScale}), ${field.sparkAttributes.isNullable()}, "${field.description}");
#else
add(${field.upperSnakecaseName}_COLUMN, ${field.shortType}, ${field.sparkAttributes.isNullable()}, "${field.description}");
add(${columnVars[$field.name]}, ${field.shortType}, ${field.sparkAttributes.isNullable()}, "${field.description}");
#end
#end
}
Expand All @@ -56,12 +58,12 @@ public abstract class ${record.capitalizedName}SchemaBase extends SparkSchema {
*/
public Dataset<Row> cast(Dataset<Row> dataset) {
#foreach ($field in $record.fields)
DataType ${field.name}Type = getDataType(${field.upperSnakecaseName}_COLUMN);
DataType ${field.name}Type = getDataType(${columnVars[$field.name]});
#end

return dataset
#foreach ($field in $record.fields)
.withColumn(${field.upperSnakecaseName}_COLUMN, col(${field.upperSnakecaseName}_COLUMN).cast(${field.name}Type))#if (!$foreach.hasNext);#end
.withColumn(${columnVars[$field.name]}, col(${columnVars[$field.name]}).cast(${field.name}Type))#if (!$foreach.hasNext);#end
#end
}

Expand Down Expand Up @@ -90,30 +92,29 @@ public abstract class ${record.capitalizedName}SchemaBase extends SparkSchema {
public Dataset<Row> validateDataFrame(Dataset<Row> data) {
Dataset<Row> dataWithValidations = data
#foreach ($field in $record.fields)
#set ( $columnName = "#if($field.column)$field.column#{else}$field.upperSnakecaseName#end" )
#if (${field.isRequired()})
.withColumn("${columnName}_IS_NOT_NULL", col(${columnName}_COLUMN).isNotNull())
.withColumn(${columnVars[$field.name]} + "_IS_NOT_NULL", col(${columnVars[$field.name]}).isNotNull())
#end
#if (${field.getValidation().getMinValue()})
.withColumn("${columnName}_GREATER_THAN_MIN", col(${columnName}_COLUMN).gt(lit(${field.getValidation().getMinValue()})).or(col(${columnName}_COLUMN).equalTo(lit(${field.getValidation().getMinValue()}))))
.withColumn(${columnVars[$field.name]} + "_GREATER_THAN_MIN", col(${columnVars[$field.name]}).gt(lit(${field.getValidation().getMinValue()})).or(col(${columnVars[$field.name]}).equalTo(lit(${field.getValidation().getMinValue()}))))
#end
#if (${field.getValidation().getMaxValue()})
.withColumn("${columnName}_LESS_THAN_MAX", col(${columnName}_COLUMN).lt(lit(${field.getValidation().getMaxValue()})).or(col(${columnName}_COLUMN).equalTo(lit(${field.getValidation().getMaxValue()}))))
.withColumn(${columnVars[$field.name]} + "_LESS_THAN_MAX", col(${columnVars[$field.name]}).lt(lit(${field.getValidation().getMaxValue()})).or(col(${columnVars[$field.name]}).equalTo(lit(${field.getValidation().getMaxValue()}))))
#end
#if (${field.getValidation().getScale()})
.withColumn("${columnName}_MATCHES_SCALE", col(${columnName}_COLUMN).rlike(("^[0-9]*(?:\\.[0-9]{0,${field.getValidation().getScale()}})?$")))
.withColumn(${columnVars[$field.name]} + "_MATCHES_SCALE", col(${columnVars[$field.name]}).rlike(("^[0-9]*(?:\\.[0-9]{0,${field.getValidation().getScale()}})?$")))
#end
#if (${field.getValidation().getMinLength()})
.withColumn("${columnName}_GREATER_THAN_MAX_LENGTH", col(${columnName}_COLUMN).rlike(("^.{${field.getValidation().getMinLength()},}")))
.withColumn(${columnVars[$field.name]} + "_GREATER_THAN_MAX_LENGTH", col(${columnVars[$field.name]}).rlike(("^.{${field.getValidation().getMinLength()},}")))
#end
#if (${field.getValidation().getMaxLength()})
.withColumn("${columnName}_LESS_THAN_MAX_LENGTH", col(${columnName}_COLUMN).rlike(("^.{${field.getValidation().getMaxLength()},}")).equalTo(lit(false)))
.withColumn(${columnVars[$field.name]} + "_LESS_THAN_MAX_LENGTH", col(${columnVars[$field.name]}).rlike(("^.{${field.getValidation().getMaxLength()},}")).equalTo(lit(false)))
#end
#foreach ($format in $field.getValidation().getFormats())
#if ($foreach.first)
.withColumn("${columnName}_MATCHES_FORMAT", col(${columnName}_COLUMN).rlike(("$format.replace("\","\\")"))
.withColumn(${columnVars[$field.name]} + "_MATCHES_FORMAT", col(${columnVars[$field.name]}).rlike(("$format.replace("\","\\")"))
#else
.or(col(${columnName}_COLUMN).rlike(("$format.replace("\","\\")")))
.or(col(${columnVars[$field.name]}).rlike(("$format.replace("\","\\")")))
#end
#if ($foreach.last)
)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -55,10 +55,4 @@ public void the_deployment_generated(String profileName) throws Exception {
new Slf4jDelegate(logger),
projectDir.toFile());
}

@Then("the placeholder SealedSecret is created")
public void the_placeholder_sealed_secret_is_created() {
Path sealedSecret = projectDir.resolve("main/resources/templates/").resolve("sealed-secret.yaml");
assertTrue("File not created", Files.exists(sealedSecret));
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -22,12 +22,3 @@ Feature: Generating deployments
When the MDA generation is run
Then the user is notified to add a spark worker image deployment to tilt
And the tilt deployment will handle building and reloading the spark worker image

Scenario Outline: Generate a placeholder SealedSecret for ArgoCD
When the deployment "<profile>" is generated
Then the placeholder SealedSecret is created

Examples:
| profile |
| mlflow-deploy-v2 |
| aissemble-spark-infrastructure-deploy-v2 |
5 changes: 0 additions & 5 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -106,11 +106,6 @@
<artifactId>commons-lang3</artifactId>
<version>3.17.0</version>
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-math3</artifactId>
<version>3.6.1</version>
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-exec</artifactId>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -252,7 +252,7 @@
"package": "com.boozallen.aiops.mda.pattern.dictionary"
},
"recordType": {
"name": "CustomRecord",
"name": "CustomData",
"package": "com.boozallen.aiops.mda.pattern.record"
}
}
Expand All @@ -267,7 +267,7 @@
"package": "com.boozallen.aiops.mda.pattern.dictionary"
},
"recordType": {
"name": "CustomRecord",
"name": "CustomData",
"package": "com.boozallen.aiops.mda.pattern.record"
}
}
Expand All @@ -282,7 +282,7 @@
"package": "com.boozallen.aiops.mda.pattern.dictionary"
},
"recordType": {
"name": "CustomRecord",
"name": "CustomData",
"package": "com.boozallen.aiops.mda.pattern.record"
}
}
Expand Down Expand Up @@ -310,23 +310,23 @@
}
},
{
"name": "NativeInboundWithCustomRecordType",
"name": "NativeInboundWithCustomDataType",
"type": "synchronous",
"inbound": {
"type": "native",
"recordType": {
"name": "CustomRecord",
"name": "CustomData",
"package": "com.boozallen.aiops.mda.pattern.record"
}
}
},
{
"name": "NativeInboundWithCustomRecordTypeAsync",
"name": "NativeInboundWithCustomDataTypeAsync",
"type": "asynchronous",
"inbound": {
"type": "native",
"recordType": {
"name": "CustomRecord",
"name": "CustomData",
"package": "com.boozallen.aiops.mda.pattern.record"
}
}
Expand All @@ -341,7 +341,7 @@
"package": "com.boozallen.aiops.mda.pattern.dictionary"
},
"recordType": {
"name": "CustomRecord",
"name": "CustomData",
"package": "com.boozallen.aiops.mda.pattern.record"
}
}
Expand Down

This file was deleted.

Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
{
"name": "CustomRecord",
"name": "CustomData",
"package": "com.boozallen.aiops.mda.pattern.record",
"description": "Example custom record for Spark Java Data Delivery Patterns",
"description": "Example custom record for Pyspark Data Delivery Patterns",
"fields": [
{
"name": "customField",
Expand Down
Loading

0 comments on commit 87e5f11

Please sign in to comment.