diff --git a/plugins/nf-validation/src/main/nextflow/validation/SamplesheetConverter.groovy b/plugins/nf-validation/src/main/nextflow/validation/SamplesheetConverter.groovy index 3118bd80..566153ac 100644 --- a/plugins/nf-validation/src/main/nextflow/validation/SamplesheetConverter.groovy +++ b/plugins/nf-validation/src/main/nextflow/validation/SamplesheetConverter.groovy @@ -97,18 +97,19 @@ class SamplesheetConverter { def Boolean headerCheck = true this.rows = [] resetCount() + def List outputs = samplesheetList.collect { Map fullRow -> increaseCount() - Map row = fullRow.findAll { it.value != "" } + Map row = fullRow.findAll { it.value != "" } def Set rowKeys = containsHeader ? row.keySet() : ["empty"].toSet() - def String yamlInfo = fileType == "yaml" ? " for entry ${this.getCount()}." : "" + def String entryInfo = fileType in ["yaml", "json"] ? " for entry ${this.getCount()}." : "" // Check the header (CSV/TSV) or present fields (YAML) if(headerCheck) { def unexpectedFields = containsHeader ? rowKeys - allFields : [] if(unexpectedFields.size() > 0) { - this.warnings << "The samplesheet contains following unchecked field(s): ${unexpectedFields}${yamlInfo}".toString() + this.warnings << "The samplesheet contains following unchecked field(s): ${unexpectedFields}${entryInfo}".toString() } if(fileType != 'yaml'){ @@ -128,7 +129,7 @@ class SamplesheetConverter { for( Map.Entry field : schemaFields ){ def String key = containsHeader ? field.key : "empty" - def String input = row[key] + def Object input = row[key] // Check if the field is deprecated if(field['value']['deprecated']){ @@ -159,7 +160,7 @@ class SamplesheetConverter { if(input in booleanUniques[key] && input){ this.errors << addSample("The '${key}' value needs to be unique. '${input}' was found at least twice in the samplesheet.".toString()) } - booleanUniques[key].add(input) + booleanUniques[key].add(input as String) } else if(unique && uniqueIsList) { def Map newMap = (Map) row.subMap((List) [key] + (List) unique) @@ -176,20 +177,20 @@ class SamplesheetConverter { def List metaNames = field['value']['meta'] as List if(metaNames) { for(name : metaNames) { - meta[name] = (input != '' && input) ? - castToType(input, field) : - field['value']['default'] != null ? - castToType(field['value']['default'] as String, field) : + meta[name] = (input != '' && input != null) ? + castToNFType(input, field) : + field['value']['default'] != null ? + castToNFType(field['value']['default'], field) : null } } else { - def inputFile = (input != '' && input) ? - castToType(input, field) : - field['value']['default'] != null ? - castToType(field['value']['default'] as String, field) : + def inputVal = (input != '' && input != null) ? + castToNFType(input, field) : + field['value']['default'] != null ? + castToNFType(field['value']['default'], field) : [] - output.add(inputFile) + output.add(inputVal) } } // Add meta to the output when a meta field has been created @@ -253,26 +254,36 @@ class SamplesheetConverter { } // Function to transform an input field from the samplesheet to its desired type - private static castToType( - String input, + private static castToNFType( + Object input, Map.Entry field ) { def String type = field['value']['type'] def String key = field.key + // Recursively call this function for each item in the array if the field is an array-type + // The returned values are collected into a single array + if (type == "array") { + def Map.Entry subfield = (Map.Entry) Map.entry(field.key, field['value']['items']) + log.debug "subfield = $subfield" + def ArrayList result = input.collect{ castToNFType(it, subfield) } as ArrayList + return result + } + + def String inputStr = input as String // Convert string values if(type == "string" || !type) { - def String result = input as String + def String result = inputStr as String // Check and convert to the desired format def String format = field['value']['format'] if(format) { if(format == "file-path-pattern") { - def ArrayList inputFiles = Nextflow.file(input) as ArrayList + def ArrayList inputFiles = Nextflow.file(inputStr) as ArrayList return inputFiles } if(format.contains("path")) { - def Path inputFile = Nextflow.file(input) as Path + def Path inputFile = Nextflow.file(inputStr) as Path return inputFile } } @@ -285,7 +296,7 @@ class SamplesheetConverter { // Convert number values else if(type == "number") { try { - def int result = input as int + def int result = inputStr as int return result } catch (NumberFormatException e) { @@ -293,28 +304,28 @@ class SamplesheetConverter { } try { - def float result = input as float + def float result = inputStr as float return result } catch (NumberFormatException e) { - log.debug("Could not convert ${input} to a float. Trying to convert to a double.") + log.debug("Could not convert ${inputStr} to a float. Trying to convert to a double.") } - def double result = input as double + def double result = inputStr as double return result } // Convert integer values else if(type == "integer") { - def int result = input as int + def int result = inputStr as int return result } // Convert boolean values else if(type == "boolean") { - if(input.toLowerCase() == "true") { + if(inputStr.toLowerCase() == "true") { return true } return false diff --git a/plugins/nf-validation/src/main/nextflow/validation/SchemaValidator.groovy b/plugins/nf-validation/src/main/nextflow/validation/SchemaValidator.groovy index a19715b8..0ba33227 100644 --- a/plugins/nf-validation/src/main/nextflow/validation/SchemaValidator.groovy +++ b/plugins/nf-validation/src/main/nextflow/validation/SchemaValidator.groovy @@ -184,15 +184,21 @@ class SchemaValidator extends PluginExtensionPoint { def String fileType = SamplesheetConverter.getFileType(samplesheetFile) def String delimiter = fileType == "csv" ? "," : fileType == "tsv" ? "\t" : null def List> fileContent + def List> fileContentCasted def Boolean s3PathCheck = params.validationS3PathCheck ? params.validationS3PathCheck : false def Map types = variableTypes(schemaFile.toString(), baseDir) + if (types.find{ it.value == "array" } as Boolean && fileType in ["csv", "tsv"]){ + def msg = "Using \"type\": \"array\" in schema with a \".$fileType\" samplesheet is not supported\n" + log.error("ERROR: Validation of pipeline parameters failed!") + throw new SchemaValidationException(msg, []) + } def Boolean containsHeader = !(types.keySet().size() == 1 && types.keySet()[0] == "") if(!containsHeader){ types = ["empty": types[""]] } if(fileType == "yaml"){ - fileContent = new Yaml().load((samplesheetFile.text)).collect { + fileContentCasted = new Yaml().load((samplesheetFile.text)).collect { if(containsHeader) { return it as Map } @@ -200,7 +206,7 @@ class SchemaValidator extends PluginExtensionPoint { } } else if(fileType == "json"){ - fileContent = new JsonSlurper().parseText(samplesheetFile.text).collect { + fileContentCasted = new JsonSlurper().parseText(samplesheetFile.text).collect { if(containsHeader) { return it as Map } @@ -209,8 +215,8 @@ class SchemaValidator extends PluginExtensionPoint { } else { fileContent = samplesheetFile.splitCsv(header:containsHeader ?: ["empty"], strip:true, sep:delimiter, quote:'\"') + fileContentCasted = castToType(fileContent, types) } - def List> fileContentCasted = castToType(fileContent, types) if (validateFile(false, samplesheetFile.toString(), fileContentCasted, schemaFile.toString(), baseDir, s3PathCheck)) { log.debug "Validation passed: '$samplesheetFile' with '$schemaFile'" } @@ -430,7 +436,13 @@ class SchemaValidator extends PluginExtensionPoint { def String fileType = SamplesheetConverter.getFileType(file_path) def String delimiter = fileType == "csv" ? "," : fileType == "tsv" ? "\t" : null def List> fileContent + def List> fileContentCasted def Map types = variableTypes(schema_name, baseDir) + if (types.find{ it.value == "array" } as Boolean && fileType in ["csv", "tsv"]){ + def msg = "${colors.red}Using {\"type\": \"array\"} in schema with a \".$fileType\" samplesheet is not supported${colors.reset}\n" + log.error("ERROR: Validation of pipeline parameters failed!") + throw new SchemaValidationException(msg, []) + } def Boolean containsHeader = !(types.keySet().size() == 1 && types.keySet()[0] == "") if(!containsHeader){ @@ -438,7 +450,7 @@ class SchemaValidator extends PluginExtensionPoint { } if(fileType == "yaml"){ - fileContent = new Yaml().load(file_path.text).collect { + fileContentCasted = new Yaml().load(file_path.text).collect { if(containsHeader) { return it as Map } @@ -446,7 +458,7 @@ class SchemaValidator extends PluginExtensionPoint { } } else if(fileType == "json"){ - fileContent = new JsonSlurper().parseText(file_path.text).collect { + fileContentCasted = new JsonSlurper().parseText(file_path.text).collect { if(containsHeader) { return it as Map } @@ -455,8 +467,8 @@ class SchemaValidator extends PluginExtensionPoint { } else { fileContent = file_path.splitCsv(header:containsHeader ?: ["empty"], strip:true, sep:delimiter, quote:'\"') + fileContentCasted = castToType(fileContent, types) } - def List> fileContentCasted = castToType(fileContent, types) if (validateFile(useMonochromeLogs, key, fileContentCasted, schema_name, baseDir, s3PathCheck)) { log.debug "Validation passed: '$key': '$file_path' with '$schema_name'" } @@ -554,6 +566,8 @@ class SchemaValidator extends PluginExtensionPoint { Boolean monochrome_logs, String paramName, Object fileContent, String schemaFilename, String baseDir, Boolean s3PathCheck = false ) { + // declare this once for the method + def colors = logColours(monochrome_logs) // Load the schema def String schema_string = Files.readString( Path.of(getSchemaPath(baseDir, schemaFilename)) ) @@ -591,7 +605,10 @@ class SchemaValidator extends PluginExtensionPoint { pathsToCheck.each { String fieldName -> for (int i=0; i < arrayJSON.size(); i++) { def JSONObject entry = arrayJSON.getJSONObject(i) - if ( entry.has(fieldName) ) { + if ( entry.has(fieldName) && entry[fieldName] instanceof JSONArray ) { + entry[fieldName].collect{ pathExists(it.toString(), " Entry ${(i+1).toString()} - ${fieldName.toString()}", s3PathCheck) } + } + else if ( entry.has(fieldName) ) { pathExists(entry[fieldName].toString(), " Entry ${(i+1).toString()} - ${fieldName.toString()}", s3PathCheck) } } @@ -607,13 +624,11 @@ class SchemaValidator extends PluginExtensionPoint { validator.performValidation(schema, arrayJSON); if (this.hasErrors()) { // Needed for custom errors such as pathExists() errors - def colors = logColours(monochrome_logs) def msg = "${colors.red}The following errors have been detected:\n\n" + this.getErrors().join('\n').trim() + "\n${colors.reset}\n" log.error("ERROR: Validation of '$paramName' file failed!") throw new SchemaValidationException(msg, this.getErrors()) } } catch (ValidationException e) { - def colors = logColours(monochrome_logs) JSONObject exceptionJSON = (JSONObject) e.toJSON() JSONObject objectJSON = new JSONObject(); objectJSON.put("objects",arrayJSON); @@ -651,7 +666,10 @@ class SchemaValidator extends PluginExtensionPoint { def Map properties = (Map) group.value['properties'] for (p in properties) { def String key = (String) p.key - def Map property = properties[key] as Map + def Map property = properties[key] as Map + if(property.containsKey('items')){ + property = property.items as Map + } if (property.containsKey('exists') && property.containsKey('format')) { if (property['exists'] && (property['format'] == 'file-path' || property['format'] == 'directory-path' || property['format'] == 'path') ) { exists.push(key) diff --git a/plugins/nf-validation/src/test/nextflow/validation/PluginExtensionMethodsTest.groovy b/plugins/nf-validation/src/test/nextflow/validation/PluginExtensionMethodsTest.groovy index 5942d951..774f0d98 100644 --- a/plugins/nf-validation/src/test/nextflow/validation/PluginExtensionMethodsTest.groovy +++ b/plugins/nf-validation/src/test/nextflow/validation/PluginExtensionMethodsTest.groovy @@ -867,4 +867,28 @@ class PluginExtensionMethodsTest extends Dsl2Spec{ error.message == '''The following errors have been detected:\n\n* -- Entry 1: Missing required value: sample\n* -- Entry 2: Missing required value: sample\n\n''' !stdout } -} + + def 'should fail because of arrays with csv' () { + given: + def schema = Path.of('src/testResources/nextflow_schema_with_samplesheet_converter_arrays.json').toAbsolutePath().toString() + def SCRIPT_TEXT = """ + params.monochrome_logs = true + params.input = 'src/testResources/correct.csv' + include { validateParameters } from 'plugin/nf-validation' + + validateParameters(parameters_schema: '$schema', monochrome_logs: params.monochrome_logs) + """ + + when: + dsl_eval(SCRIPT_TEXT) + def stdout = capture + .toString() + .readLines() + .findResults {it.contains('WARN nextflow.validation.SchemaValidator') || it.startsWith('* --') ? it : null } + + then: + def error = thrown(SchemaValidationException) + error.message == '''Using {"type": "array"} in schema with a ".csv" samplesheet is not supported\n''' + !stdout + } +} \ No newline at end of file diff --git a/plugins/nf-validation/src/test/nextflow/validation/SamplesheetConverterTest.groovy b/plugins/nf-validation/src/test/nextflow/validation/SamplesheetConverterTest.groovy index 444ca15b..3d309cc7 100644 --- a/plugins/nf-validation/src/test/nextflow/validation/SamplesheetConverterTest.groovy +++ b/plugins/nf-validation/src/test/nextflow/validation/SamplesheetConverterTest.groovy @@ -115,7 +115,7 @@ class SamplesheetConverterTest extends Dsl2Spec{ def SCRIPT_TEXT = ''' include { fromSamplesheet } from 'plugin/nf-validation' - params.input = 'src/testResources/correct.csv' + params.input = 'src/testResources/correct.tsv' workflow { Channel.fromSamplesheet("input", parameters_schema:"src/testResources/nextflow_schema_with_samplesheet_converter.json").view() @@ -142,7 +142,7 @@ class SamplesheetConverterTest extends Dsl2Spec{ def SCRIPT_TEXT = ''' include { fromSamplesheet } from 'plugin/nf-validation' - params.input = 'src/testResources/correct.csv' + params.input = 'src/testResources/correct.yaml' workflow { Channel.fromSamplesheet("input", parameters_schema:"src/testResources/nextflow_schema_with_samplesheet_converter.json").view() @@ -191,6 +191,149 @@ class SamplesheetConverterTest extends Dsl2Spec{ stdout.contains("[[string1:extraField, string2:extraField, integer1:10, integer2:10, boolean1:true, boolean2:true], string1, 25, false, ${this.getRootString()}/src/testResources/test.txt, ${this.getRootString()}/src/testResources/testDir, ${this.getRootString()}/src/testResources/testDir, unique3, 1, itDoesExist]" as String) } + def 'arrays should work fine - YAML' () { + given: + def SCRIPT_TEXT = ''' + include { fromSamplesheet } from 'plugin/nf-validation' + + params.input = 'src/testResources/correct_arrays.yaml' + + workflow { + Channel.fromSamplesheet("input", parameters_schema:"src/testResources/nextflow_schema_with_samplesheet_converter_arrays.json").view() + } + ''' + + when: + dsl_eval(SCRIPT_TEXT) + def stdout = capture + .toString() + .readLines() + .findResults {it.startsWith('[[') ? it : null } + + then: + noExceptionThrown() + stdout.contains("[[array_meta:null], [${this.getRootString()}/src/testResources/testDir/testFile.txt, ${this.getRootString()}/src/testResources/testDir2/testFile2.txt], [${this.getRootString()}/src/testResources/testDir, ${this.getRootString()}/src/testResources/testDir2], [${this.getRootString()}/src/testResources/testDir, ${this.getRootString()}/src/testResources/testDir2/testFile2.txt], [string1, string2], [25, 26], [25, 26.5], [false, true], [1, 2, 3], [true], [${this.getRootString()}/src/testResources/testDir/testFile.txt], [[${this.getRootString()}/src/testResources/testDir/testFile.txt]]]" as String) + stdout.contains("[[array_meta:[look, an, array, in, meta]], [], [], [], [string1, string2], [25, 26], [25, 26.5], [], [1, 2, 3], [false, true, false], [${this.getRootString()}/src/testResources/testDir/testFile.txt], [[${this.getRootString()}/src/testResources/testDir/testFile.txt]]]" as String) + stdout.contains("[[array_meta:null], [], [], [], [string1, string2], [25, 26], [25, 26.5], [], [1, 2, 3], [false, true, false], [${this.getRootString()}/src/testResources/testDir/testFile.txt], [[${this.getRootString()}/src/testResources/testDir/testFile.txt], [${this.getRootString()}/src/testResources/testDir/testFile.txt, ${this.getRootString()}/src/testResources/testDir2/testFile2.txt]]]" as String) + } + + def 'arrays should work fine - JSON' () { + given: + def SCRIPT_TEXT = ''' + include { fromSamplesheet } from 'plugin/nf-validation' + + params.input = 'src/testResources/correct_arrays.json' + + workflow { + Channel.fromSamplesheet("input", parameters_schema:"src/testResources/nextflow_schema_with_samplesheet_converter_arrays.json").view() + } + ''' + + when: + dsl_eval(SCRIPT_TEXT) + def stdout = capture + .toString() + .readLines() + .findResults {it.startsWith('[[') ? it : null } + + then: + noExceptionThrown() + stdout.contains("[[array_meta:null], [${this.getRootString()}/src/testResources/testDir/testFile.txt, ${this.getRootString()}/src/testResources/testDir2/testFile2.txt], [${this.getRootString()}/src/testResources/testDir, ${this.getRootString()}/src/testResources/testDir2], [${this.getRootString()}/src/testResources/testDir, ${this.getRootString()}/src/testResources/testDir2/testFile2.txt], [string1, string2], [25, 26], [25, 26.5], [false, true], [1, 2, 3], [true], [${this.getRootString()}/src/testResources/testDir/testFile.txt], [[${this.getRootString()}/src/testResources/testDir/testFile.txt]]]" as String) + stdout.contains("[[array_meta:[look, an, array, in, meta]], [], [], [], [string1, string2], [25, 26], [25, 26.5], [], [1, 2, 3], [false, true, false], [${this.getRootString()}/src/testResources/testDir/testFile.txt], [[${this.getRootString()}/src/testResources/testDir/testFile.txt]]]" as String) + stdout.contains("[[array_meta:null], [], [], [], [string1, string2], [25, 26], [25, 26.5], [], [1, 2, 3], [false, true, false], [${this.getRootString()}/src/testResources/testDir/testFile.txt], [[${this.getRootString()}/src/testResources/testDir/testFile.txt], [${this.getRootString()}/src/testResources/testDir/testFile.txt, ${this.getRootString()}/src/testResources/testDir2/testFile2.txt]]]" as String) + } + + def 'array errors before channel conversion - YAML' () { + given: + def SCRIPT_TEXT = ''' + include { fromSamplesheet } from 'plugin/nf-validation' + + params.input = 'src/testResources/error_arrays.yaml' + + workflow { + Channel.fromSamplesheet("input", parameters_schema:"src/testResources/nextflow_schema_with_samplesheet_converter_arrays.json").view() + } + ''' + + when: + dsl_eval(SCRIPT_TEXT) + def stdout = capture + .toString() + .readLines() + .findResults {it.startsWith('[[') ? it : null } + + then: + def error = thrown(SchemaValidationException) + def errorMessages = error.message.readLines() + errorMessages[0] == "\033[0;31mThe following errors have been detected:" + errorMessages[2] == "* -- Entry 1 - field_3: the file or directory 'src/testResources/testDir3' does not exist." + errorMessages[3] == "* -- Entry 1 - field_3: the file or directory 'src/testResources/testDir2/testFile3.txt' does not exist." + errorMessages[4] == "* -- Entry 1 - field_2: the file or directory 'src/testResources/testDir3' does not exist." + errorMessages[5] == "* -- Entry 1 - field_1: the file or directory 'src/testResources/testDir/testFile.fasta' does not exist." + errorMessages[6] == "* -- Entry 1 - field_1: the file or directory 'src/testResources/testDir2/testFile3.txt' does not exist." + errorMessages[7] == '* -- Entry 1 - field_4: array items are not unique (["string2","string2","string1"])' + errorMessages[8] == '* -- Entry 1 - field_1: string [src/testResources/testDir/testFile.fasta] does not match pattern ^.*\\.txt$ (["src/testResources/testDir/testFile.fasta","src/testResources/testDir2/testFile3.txt"])' + errorMessages[9] == "* -- Entry 1 - field_5: expected maximum item count: 3, found: 4 ([25,25,27,28])" + errorMessages[10] == "* -- Entry 1 - field_6: array items are not unique ([25,25])" + errorMessages[11] == "* -- Entry 2: Missing required value: field_4" + errorMessages[12] == "* -- Entry 2 - field_5: expected minimum item count: 2, found: 1 ([25])" + errorMessages[13] == "* -- Entry 3 - field_4: expected type: JSONArray, found: String (abc)" + !stdout + } + + def 'array errors samplesheet format - CSV' () { + given: + def SCRIPT_TEXT = ''' + include { fromSamplesheet } from 'plugin/nf-validation' + + params.input = 'src/testResources/correct.csv' + + workflow { + Channel.fromSamplesheet("input", parameters_schema:"src/testResources/nextflow_schema_with_samplesheet_converter_arrays.json").view() + } + ''' + + when: + dsl_eval(SCRIPT_TEXT) + def stdout = capture + .toString() + .readLines() + .findResults {it.startsWith('[[') ? it : null } + + then: + def error = thrown(SchemaValidationException) + def errorMessages = error.message.readLines() + errorMessages[0] == 'Using "type": "array" in schema with a ".csv" samplesheet is not supported' + !stdout + } + + def 'array errors samplesheet format - TSV' () { + given: + def SCRIPT_TEXT = ''' + include { fromSamplesheet } from 'plugin/nf-validation' + + params.input = 'src/testResources/correct.tsv' + + workflow { + Channel.fromSamplesheet("input", parameters_schema:"src/testResources/nextflow_schema_with_samplesheet_converter_arrays.json").view() + } + ''' + + when: + dsl_eval(SCRIPT_TEXT) + def stdout = capture + .toString() + .readLines() + .findResults {it.startsWith('[[') ? it : null } + + then: + def error = thrown(SchemaValidationException) + def errorMessages = error.message.readLines() + errorMessages[0] == 'Using "type": "array" in schema with a ".tsv" samplesheet is not supported' + !stdout + } + + def 'no header - CSV' () { given: def SCRIPT_TEXT = ''' @@ -266,7 +409,6 @@ class SamplesheetConverterTest extends Dsl2Spec{ stdout.contains("[test_2]") } - def 'extra field' () { given: def SCRIPT_TEXT = ''' diff --git a/plugins/nf-validation/src/testResources/correct_arrays.json b/plugins/nf-validation/src/testResources/correct_arrays.json new file mode 100644 index 00000000..d7ab9a23 --- /dev/null +++ b/plugins/nf-validation/src/testResources/correct_arrays.json @@ -0,0 +1,79 @@ +[ + { + "field_1": [ + "src/testResources/testDir/testFile.txt", + "src/testResources/testDir2/testFile2.txt" + ], + "field_2": [ + "src/testResources/testDir", + "src/testResources/testDir2" + ], + "field_3": [ + "src/testResources/testDir", + "src/testResources/testDir2/testFile2.txt" + ], + "field_4": [ + "string1", + "string2" + ], + "field_5": [ + 25, + 26 + ], + "field_6": [ + 25, + 26.5 + ], + "field_7": [ + false, + true + ], + "field_9": [ + true + ] + }, + { + "field_4": [ + "string1", + "string2" + ], + "field_5": [ + 25, + 26 + ], + "field_6": [ + 25, + 26.5 + ], + "field_12": [ + "look", + "an", + "array", + "in", + "meta" + ] + }, + { + "field_4": [ + "string1", + "string2" + ], + "field_5": [ + 25, + 26 + ], + "field_6": [ + 25, + 26.5 + ], + "field_11": [ + [ + "src/testResources/testDir/testFile.txt" + ], + [ + "src/testResources/testDir/testFile.txt", + "src/testResources/testDir2/testFile2.txt" + ] + ] + } +] \ No newline at end of file diff --git a/plugins/nf-validation/src/testResources/correct_arrays.yaml b/plugins/nf-validation/src/testResources/correct_arrays.yaml new file mode 100644 index 00000000..5ffc5a3d --- /dev/null +++ b/plugins/nf-validation/src/testResources/correct_arrays.yaml @@ -0,0 +1,51 @@ +- field_1: + - src/testResources/testDir/testFile.txt + - src/testResources/testDir2/testFile2.txt + field_2: + - src/testResources/testDir + - src/testResources/testDir2 + field_3: + - src/testResources/testDir + - src/testResources/testDir2/testFile2.txt + field_4: + - string1 + - string2 + field_5: + - 25 + - 26 + field_6: + - 25 + - 26.5 + field_7: + - false + - true + field_9: + - true +- field_4: + - string1 + - string2 + field_5: + - 25 + - 26 + field_6: + - 25 + - 26.5 + field_12: + - look + - an + - array + - in + - meta +- field_4: + - string1 + - string2 + field_5: + - 25 + - 26 + field_6: + - 25 + - 26.5 + field_11: + - - src/testResources/testDir/testFile.txt + - - src/testResources/testDir/testFile.txt + - src/testResources/testDir2/testFile2.txt \ No newline at end of file diff --git a/plugins/nf-validation/src/testResources/error_arrays.yaml b/plugins/nf-validation/src/testResources/error_arrays.yaml new file mode 100644 index 00000000..78309dab --- /dev/null +++ b/plugins/nf-validation/src/testResources/error_arrays.yaml @@ -0,0 +1,35 @@ +- field_1: + - src/testResources/testDir/testFile.fasta + - src/testResources/testDir2/testFile3.txt + field_2: + - src/testResources/testDir + - src/testResources/testDir3 + field_3: + - src/testResources/testDir3 + - src/testResources/testDir2/testFile3.txt + field_4: + - string2 + - string2 + - string1 + field_5: + - 25 + - 25 + - 27 + - 28 + field_6: + - 25 + - 25.0 + field_7: + - null + - true + field_9: + - true +- field_5: + - 25 + field_6: + - 25 + - 26.5 +- field_4: 'abc' + field_6: + - 25 + - 26.5 \ No newline at end of file diff --git a/plugins/nf-validation/src/testResources/nextflow_schema_with_samplesheet_converter_arrays.json b/plugins/nf-validation/src/testResources/nextflow_schema_with_samplesheet_converter_arrays.json new file mode 100644 index 00000000..c1e90205 --- /dev/null +++ b/plugins/nf-validation/src/testResources/nextflow_schema_with_samplesheet_converter_arrays.json @@ -0,0 +1,28 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema", + "$id": "https://raw.githubusercontent.com/nf-core/testpipeline/master/nextflow_schema.json", + "title": "nf-core/testpipeline pipeline parameters", + "description": "this is a test", + "type": "object", + "definitions": { + "input_output_options": { + "title": "Input/output options", + "type": "object", + "fa_icon": "fas fa-terminal", + "description": "Define where the pipeline should find input data and save output data.", + "required": ["input"], + "properties": { + "input": { + "type": "string", + "format": "file-path", + "mimetype": "text/csv", + "pattern": "^\\S+\\.csv$", + "schema": "src/testResources/schema_input_with_arrays.json", + "description": "Path to comma-separated file containing information about the samples in the experiment.", + "help_text": "You will need to create a design file with information about the samples in your experiment before running the pipeline. Use this parameter to specify its location. It has to be a comma-separated file with 3 columns, and a header row. See [usage docs](https://nf-co.re/testpipeline/usage#samplesheet-input).", + "fa_icon": "fas fa-file-csv" + } + } + } + } +} diff --git a/plugins/nf-validation/src/testResources/schema_input.json b/plugins/nf-validation/src/testResources/schema_input.json index d955d512..b896f136 100644 --- a/plugins/nf-validation/src/testResources/schema_input.json +++ b/plugins/nf-validation/src/testResources/schema_input.json @@ -47,7 +47,7 @@ "type": "string", "format": "path", "exists": true - }, + }, "field_10": { "type": "string", "unique": true diff --git a/plugins/nf-validation/src/testResources/schema_input_with_arrays.json b/plugins/nf-validation/src/testResources/schema_input_with_arrays.json new file mode 100644 index 00000000..06ebba89 --- /dev/null +++ b/plugins/nf-validation/src/testResources/schema_input_with_arrays.json @@ -0,0 +1,109 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema", + "$id": "https://raw.githubusercontent.com/nextflow-io/nf-validation/master/plugins/nf-validation/src/testResources/schema_input.json", + "title": "Samplesheet validation schema", + "description": "Schema for the samplesheet used in this pipeline", + "type": "array", + "items": { + "type": "object", + "properties": { + "field_1": { + "type": "array", + "items": { + "type": "string", + "format": "file-path", + "pattern": "^.*\\.txt$", + "exists": true + } + }, + "field_2": { + "type": "array", + "items": { + "type": "string", + "format": "directory-path", + "exists": true + } + }, + "field_3": { + "type": "array", + "items": { + "type": "string", + "format": "path", + "exists": true + } + }, + "field_4": { + "type": "array", + "items": { + "type": "string" + }, + "uniqueItems": true + }, + "field_5": { + "type": "array", + "items": { + "type": "integer" + }, + "minItems": 2, + "maxItems": 3 + }, + "field_6": { + "type": "array", + "items": { + "type": "number" + }, + "uniqueItems": true + }, + "field_7": { + "type": "array", + "items": { + "type": "boolean" + } + }, + "field_8": { + "type": "array", + "items": { + "type": "string" + }, + "default": ["1", "2", "3"] + }, + "field_9": { + "type": "array", + "items": { + "type": "boolean" + }, + "default": [false, true, false] + }, + "field_10": { + "type": "array", + "items": { + "type": "string", + "format": "file-path", + "pattern": "^.*\\.txt$", + "exists": true + }, + "default": ["src/testResources/testDir/testFile.txt"] + }, + "field_11": { + "type": "array", + "items": { + "type": "array", + "items": { + "format": "file-path", + "pattern": "^.*\\.txt$", + "exists": true + } + }, + "default": [["src/testResources/testDir/testFile.txt"]] + }, + "field_12": { + "type": "array", + "items": { + "type": "string" + }, + "meta": ["array_meta"] + } + }, + "required": ["field_4", "field_6"] + } +} \ No newline at end of file diff --git a/plugins/nf-validation/src/testResources/testDir2/testFile2.txt b/plugins/nf-validation/src/testResources/testDir2/testFile2.txt new file mode 100644 index 00000000..e69de29b