From 1e11ad2f57f60b8de93e31f04a9fd9509469211c Mon Sep 17 00:00:00 2001 From: srinathln7 Date: Thu, 23 Jan 2025 07:05:53 +0000 Subject: [PATCH 01/48] init backend mock for limitless prover --- prover/backend/execution/craft.go | 2 +- prover/backend/execution/limitless/mock.go | 110 +++++++++++++++++++++ prover/backend/execution/prove.go | 2 +- prover/go.mod | 4 +- prover/go.sum | 4 +- 5 files changed, 116 insertions(+), 6 deletions(-) create mode 100644 prover/backend/execution/limitless/mock.go diff --git a/prover/backend/execution/craft.go b/prover/backend/execution/craft.go index 65c894e05..9943cb395 100644 --- a/prover/backend/execution/craft.go +++ b/prover/backend/execution/craft.go @@ -73,7 +73,7 @@ func CraftProverOutput( l2l1MessageHashes = bridge.L2L1MessageHashes(logs, l2BridgeAddress) ) - // This encodes the block as it will be by the compressor before running + // This encodes the block as it will be used by the compressor before running // the compression algorithm. blob.EncodeBlockForCompression(block, execDataBuf) diff --git a/prover/backend/execution/limitless/mock.go b/prover/backend/execution/limitless/mock.go new file mode 100644 index 000000000..eee46cb03 --- /dev/null +++ b/prover/backend/execution/limitless/mock.go @@ -0,0 +1,110 @@ +// THIS FILE IS MEANT ONLY TO BE A PLACEHOLDER AND SERVE AS A MOCK DEFINING THE COMPONENTS REQUIRED +// FOR LIMITLESS PROVER. THERE ARE NO INTERACTIONS WITH THE ACTUAL CRYPTOGRAPHIC COMPONENTS. +// EACH COMPONENTS PREFIX BEGINS WITH 'M' SIGNIFYING MOCK. +// SEE https://app.diagrams.net/#G1U6S4MTrt7lsipc3TZrL4xXjvvVeghc8k#%7B%22pageId%22%3A%2206rcgNj9AqHDneUgptMC%22%7D + +package mock + +import "github.com/consensys/linea-monorepo/prover/backend/execution" + +// Specifies the number of segments +var segments int + +// MBootStrapper initializes the prover with the necessary data +type MBootStrapper struct { +} + +// MDistMetadata handles metadata about the distribution of module ID/segment ID pairs +type MDistMetadata struct { + // Map from Module ID to Segment ID + ModSegMap map[int]int `json:"modSegMap"` + + // Request ID + ReqId string `json:"reqId"` +} + +// MSubmoduleGLProver handles global-local proof generation +type MSubmoduleGLProver struct { +} + +// MGLReq represents a request to the global-local prover +type MGLReq struct { + ReqId string `json:"reqId"` + ModuleId string `json:"moduleId"` + SegmentId int `json:"segmentId"` + ConflatedExecutionTracesFile string `json:"conflatedExecutionTracesFile"` +} + +// Mocked Public Inputs +type MPublicInputs struct { +} + +// MGLResp represents a response from the global-local prover +type MGLResp struct { + ModId string `json:"modId"` + SegmentID int `json:"segmentId"` + ModProof string `json:"modProof"` + QueryResult string `json:"queryResult"` + Auxilliary []MPublicInputs `json:"auxilliary"` +} + +// initBootstrap initializes the bootstrapping process +func (b MBootStrapper) initBootstrap(req execution.Request) ([]MGLResp, MDistMetadata, error) { + resps := make([]MGLResp, segments) + return resps, MDistMetadata{}, nil +} + +// Beacon provides randomness for the proof generation process +type Beacon struct { +} + +// MLPPBeaconReq represents a request for LPP beacon data +type MLPPBeaconReq struct { + LPPColumns []string `json:"lppColumns"` + LPPCommitments []string `json:"lppCommitments"` + ModuleID string `json:"moduleId"` +} + +// generateRandomness generates randomness for the proof generation process +func (b Beacon) generateRandomness(req MLPPBeaconReq, metadata MDistMetadata) (MLPPRequest, error) { + return MLPPRequest{}, nil +} + +// proveGL generates a mock GL proof +func (gl *MSubmoduleGLProver) proveGL(req MGLReq) (MGLResp, error) { + dummyProof := MGLResp{} + return dummyProof, nil +} + +// MSubmoduleLPPProver handles LPP proof generation +type MSubmoduleLPPProver struct { +} + +// MLPPRequest represents a request for LPP proof data +type MLPPRequest struct { + LPPReq MLPPBeaconReq `json:"lppReq"` + Randomseed string `json:"randomseed"` +} + +// MLPPResponse represents a response from the LPP prover +type MLPPResponse struct { + ModuleID string `json:"moduleId"` + SegmentID string `json:"segmentId"` + ModuleProof string `json:"moduleProof"` + QueryPartialResults []string `json:"queryPartialResults"` +} + +// proveLPP generates a mock LPP proof +func (lpp *MSubmoduleLPPProver) proveLPP(req MLPPRequest) (MLPPResponse, error) { + dummyproof := MLPPResponse{} + return dummyproof, nil +} + +// MExecConglomerator combines various proofs into a final execution proof +type MExecConglomerator struct { +} + +// prove combines GL and LPP responses into a final execution proof +func (cong *MExecConglomerator) prove(glresp MGLResp, lppresp MLPPResponse) (execution.Response, error) { + return execution.Response{}, nil +} diff --git a/prover/backend/execution/prove.go b/prover/backend/execution/prove.go index 4970e344f..be3c58335 100644 --- a/prover/backend/execution/prove.go +++ b/prover/backend/execution/prove.go @@ -158,7 +158,7 @@ func mustProveAndPass( utils.Panic("traces checksum in the setup manifest does not match the one in the config") } - // TODO: implements the collection of the functional inputs from the prover response + // TODO: implement the collection of the functional inputs from the prover response return execution.MakeProof(traces, setup, fullZkEvm.WizardIOP, proof, *w.FuncInp), setup.VerifyingKeyDigest() case config.ProverModeBench: diff --git a/prover/go.mod b/prover/go.mod index c14ce8408..a0c04e775 100644 --- a/prover/go.mod +++ b/prover/go.mod @@ -18,6 +18,7 @@ require ( github.com/iancoleman/strcase v0.3.0 github.com/icza/bitio v1.1.0 github.com/leanovate/gopter v0.2.11 + github.com/pierrec/lz4/v4 v4.1.21 github.com/pkg/errors v0.9.1 github.com/prometheus/client_golang v1.19.1 github.com/rs/zerolog v1.33.0 @@ -26,7 +27,7 @@ require ( github.com/spf13/viper v1.19.0 github.com/stretchr/testify v1.9.0 golang.org/x/crypto v0.31.0 - golang.org/x/net v0.27.0 + golang.org/x/net v0.28.0 golang.org/x/sync v0.10.0 golang.org/x/time v0.5.0 ) @@ -76,7 +77,6 @@ require ( github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 // indirect github.com/olekukonko/tablewriter v0.0.5 // indirect github.com/pelletier/go-toml/v2 v2.2.2 // indirect - github.com/pierrec/lz4/v4 v4.1.21 // indirect github.com/prometheus/client_model v0.6.1 // indirect github.com/prometheus/common v0.55.0 // indirect github.com/prometheus/procfs v0.15.1 // indirect diff --git a/prover/go.sum b/prover/go.sum index f4e8d6435..633cd1045 100644 --- a/prover/go.sum +++ b/prover/go.sum @@ -588,8 +588,8 @@ golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96b golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= golang.org/x/net v0.8.0/go.mod h1:QVkue5JL9kW//ek3r6jTKnTFis1tRmNAW2P1shuFdJc= -golang.org/x/net v0.27.0 h1:5K3Njcw06/l2y9vpGCSdcxWOYHOUk3dVNGDXN+FvAys= -golang.org/x/net v0.27.0/go.mod h1:dDi0PyhWNoiUOrAS8uXv/vnScO4wnHQO4mj9fn/RytE= +golang.org/x/net v0.28.0 h1:a9JDOJc5GMUJ0+UDqmLT86WiEy7iWyIhz8gz8E4e5hE= +golang.org/x/net v0.28.0/go.mod h1:yqtgsTWOOnlGLG9GFRrK3++bGOUEkNBoHZc8MEDWPNg= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= From 6ff0acb2923005ca0f0f509b68c0dccfa327cea2 Mon Sep 17 00:00:00 2001 From: srinathln7 Date: Thu, 23 Jan 2025 10:22:49 +0000 Subject: [PATCH 02/48] adjust mock func. --- prover/backend/execution/limitless/mock.go | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/prover/backend/execution/limitless/mock.go b/prover/backend/execution/limitless/mock.go index eee46cb03..e5030b268 100644 --- a/prover/backend/execution/limitless/mock.go +++ b/prover/backend/execution/limitless/mock.go @@ -7,7 +7,7 @@ package mock import "github.com/consensys/linea-monorepo/prover/backend/execution" -// Specifies the number of segments +// Specifies the number of segments ideally set in the config. file var segments int // MBootStrapper initializes the prover with the necessary data @@ -49,13 +49,13 @@ type MGLResp struct { } // initBootstrap initializes the bootstrapping process -func (b MBootStrapper) initBootstrap(req execution.Request) ([]MGLResp, MDistMetadata, error) { - resps := make([]MGLResp, segments) - return resps, MDistMetadata{}, nil +// Outputs the submodule request for global-local prover for round 0 +func (b MBootStrapper) initBootstrap(req execution.Request) (MGLReq, MDistMetadata, error) { + return MGLReq{}, MDistMetadata{}, nil } -// Beacon provides randomness for the proof generation process -type Beacon struct { +// RandomnessBeacon provides randomness for the proof generation process +type RandomnessBeacon struct { } // MLPPBeaconReq represents a request for LPP beacon data @@ -66,7 +66,7 @@ type MLPPBeaconReq struct { } // generateRandomness generates randomness for the proof generation process -func (b Beacon) generateRandomness(req MLPPBeaconReq, metadata MDistMetadata) (MLPPRequest, error) { +func (b RandomnessBeacon) generateLPPProofReq(req MLPPBeaconReq, metadata MDistMetadata) (MLPPRequest, error) { return MLPPRequest{}, nil } From d749a6ba0b4c190b3bd526fdd344bd021c405ee6 Mon Sep 17 00:00:00 2001 From: srinathln7 Date: Tue, 28 Jan 2025 14:17:29 +0000 Subject: [PATCH 03/48] init limitless job definition --- .../backend/blobsubmission/craft_eip4844.go | 2 +- prover/backend/execution/limitless/mock.go | 2 +- .../controller/controller/job_definition.go | 90 +++++++++---------- .../controller/job_definition_limitless.go | 54 +++++++++++ .../controller/job_definition_test.go | 8 +- 5 files changed, 102 insertions(+), 54 deletions(-) create mode 100644 prover/cmd/controller/controller/job_definition_limitless.go diff --git a/prover/backend/blobsubmission/craft_eip4844.go b/prover/backend/blobsubmission/craft_eip4844.go index 741104aec..65ea4c462 100644 --- a/prover/backend/blobsubmission/craft_eip4844.go +++ b/prover/backend/blobsubmission/craft_eip4844.go @@ -4,6 +4,7 @@ import ( "crypto/sha256" "errors" "fmt" + "github.com/consensys/linea-monorepo/prover/lib/compressor/blob/encode" blob "github.com/consensys/linea-monorepo/prover/lib/compressor/blob/v1" @@ -171,5 +172,4 @@ func compressedStreamToBlob(compressedStream []byte) (blob kzg4844.Blob, err err } } return blob, nil - } diff --git a/prover/backend/execution/limitless/mock.go b/prover/backend/execution/limitless/mock.go index e5030b268..868dcf8f2 100644 --- a/prover/backend/execution/limitless/mock.go +++ b/prover/backend/execution/limitless/mock.go @@ -105,6 +105,6 @@ type MExecConglomerator struct { } // prove combines GL and LPP responses into a final execution proof -func (cong *MExecConglomerator) prove(glresp MGLResp, lppresp MLPPResponse) (execution.Response, error) { +func (cong *MExecConglomerator) prove(glresp MGLResp, lppresp MLPPResponse, dmetadata MDistMetadata) (execution.Response, error) { return execution.Response{}, nil } diff --git a/prover/cmd/controller/controller/job_definition.go b/prover/cmd/controller/controller/job_definition.go index 610b1808b..de63577ec 100644 --- a/prover/cmd/controller/controller/job_definition.go +++ b/prover/cmd/controller/controller/job_definition.go @@ -69,61 +69,31 @@ type JobDefinition struct { // Definition of an execution prover job. The function panics on any error since // it is called at start up. func ExecutionDefinition(conf *config.Config) JobDefinition { - - // format the extension part of the regexp if provided + // Format the extension part of the regexp if provided inpFileExt := "" if conf.Execution.CanRunFullLarge { inpFileExt = fmt.Sprintf(`\.%v`, config.LargeSuffix) } - return JobDefinition{ - RequestsRootDir: conf.Execution.RequestsRootDir, + jobDef := commonExecJobDef(conf, jobNameExecution, 0) - // Name of the job - Name: jobNameExecution, - - // This will panic at startup if the regexp is invalid - InputFileRegexp: regexp2.MustCompile( - fmt.Sprintf( - `^[0-9]+-[0-9]+(-etv[0-9\.]+)?(-stv[0-9\.]+)?-getZkProof\.json%v(\.failure\.%v_[0-9]+)*$`, - inpFileExt, - config.FailSuffix, - ), - regexp2.None, + // Set the InputFileRegexp specific to ExecutionDefinition + jobDef.InputFileRegexp = regexp2.MustCompile( + fmt.Sprintf( + `^[0-9]+-[0-9]+(-etv[0-9\.]+)?(-stv[0-9\.]+)?-getZkProof\.json%v(\.failure\.%v_[0-9]+)*$`, + inpFileExt, + config.FailSuffix, ), + regexp2.None, + ) - // This will panic at startup if the template is invalid - OutputFileTmpl: tmplMustCompile( - "exec-output-file", - "{{.Start}}-{{.End}}-getZkProof.json", - ), - - // Execution job are at utmost priority - Priority: 0, - - // Parameters of the regexp, they can loose in the sense that these regexp - // are only called if the `InputFileRegexp` is matched. - ParamsRegexp: struct { - Start *regexp2.Regexp - End *regexp2.Regexp - Stv *regexp2.Regexp - Etv *regexp2.Regexp - Cv *regexp2.Regexp - ContentHash *regexp2.Regexp - }{ - // Match a string of digit at the beginning of the line - Start: regexp2.MustCompile(`^[0-9]+`, regexp2.None), - // Match a string of digit coming after the first string of digits that - // initiate the line and followed by a "-" - End: regexp2.MustCompile(`(?<=^[0-9]+-)[0-9]+`, regexp2.None), - // Match a sequence of digits and "." comining after (resp.) "etv" and - // "cv" - Etv: matchVersionWithPrefix("etv"), - Stv: matchVersionWithPrefix("stv"), - }, + // Set the OutputFileTmpl specific to ExecutionDefinition + jobDef.OutputFileTmpl = tmplMustCompile( + "exec-output-file", + "{{.Start}}-{{.End}}-getZkProof.json", + ) - FailureSuffix: matchFailureSuffix(config.FailSuffix), - } + return jobDef } // Definition of an execution prover job. @@ -202,8 +172,8 @@ func AggregatedDefinition(conf *config.Config) JobDefinition { "{{.Start}}-{{.End}}-{{.ContentHash}}-getZkAggregatedProof.json", ), - // Execution job are at utmost priority - Priority: 1, + // Aggregation job are at lowest priority + Priority: 2, // Parameters of the regexp, they can loose in the sense that these // regexp are only called if the `InputFileRegexp` is matched. @@ -228,6 +198,30 @@ func AggregatedDefinition(conf *config.Config) JobDefinition { } } +// Helper function to set up common parts of execution JobDefinition +// Used in Execution Definition and for Limitless prover Bootstrap and Conglomeration +func commonExecJobDef(conf *config.Config, jobName string, priority int) JobDefinition { + return JobDefinition{ + RequestsRootDir: conf.Execution.RequestsRootDir, + Name: jobName, + Priority: priority, + ParamsRegexp: struct { + Start *regexp2.Regexp + End *regexp2.Regexp + Stv *regexp2.Regexp + Etv *regexp2.Regexp + Cv *regexp2.Regexp + ContentHash *regexp2.Regexp + }{ + Start: regexp2.MustCompile(`^[0-9]+`, regexp2.None), + End: regexp2.MustCompile(`(?<=^[0-9]+-)[0-9]+`, regexp2.None), + Etv: matchVersionWithPrefix("etv"), + Stv: matchVersionWithPrefix("stv"), + }, + FailureSuffix: matchFailureSuffix(config.FailSuffix), + } +} + // Version prefix template func matchVersionWithPrefix(pre string) *regexp2.Regexp { return regexp2.MustCompile( diff --git a/prover/cmd/controller/controller/job_definition_limitless.go b/prover/cmd/controller/controller/job_definition_limitless.go new file mode 100644 index 000000000..19c802188 --- /dev/null +++ b/prover/cmd/controller/controller/job_definition_limitless.go @@ -0,0 +1,54 @@ +package controller + +import ( + "fmt" + + "github.com/consensys/linea-monorepo/prover/config" + "github.com/dlclark/regexp2" +) + +const ( + jobNameBootstrap = "bootstrap-execution" + jobNameGLExecution = "gl-execution" + jobNameRandomBeacon = "randomness-beacon-execution" + jobNameLPPExecution = "lpp-execution" + jobNameConglomeration = "conglomeration-execution" +) + +// BootstrapDefinition: Defines the "one-at-all" bootstrap job +func BootstrapDefinition(conf *config.Config) JobDefinition { + jobDef := commonExecJobDef(conf, jobNameBootstrap, 0) + + // Format the extension part of the regexp if provided + inpFileExt := "" + if conf.Execution.CanRunFullLarge { + inpFileExt = fmt.Sprintf(`\.%v`, config.LargeSuffix) + } + + // Set the InputFileRegexp specific to ExecutionDefinition + jobDef.InputFileRegexp = regexp2.MustCompile( + fmt.Sprintf( + `^[0-9]+-[0-9]+(-etv[0-9\.]+)?(-stv[0-9\.]+)?-getZkProof\.json%v(\.failure\.%v_[0-9]+)*$`, + inpFileExt, + config.FailSuffix, + ), + regexp2.None, + ) + + return jobDef +} + +// Function to define Conglomeration job +func ConglomerationDefinition(conf *config.Config) JobDefinition { + jobDef := commonExecJobDef(conf, jobNameConglomeration, 4) + + // Set the OutputFileTmpl specific to ConglomerationDefinition + jobDef.OutputFileTmpl = tmplMustCompile( + "exec-output-file", + "{{.Start}}-{{.End}}-getZkProof.json", + ) + + return jobDef +} + +// TODO: Define other jobs diff --git a/prover/cmd/controller/controller/job_definition_test.go b/prover/cmd/controller/controller/job_definition_test.go index b71296508..7fcb51d2c 100644 --- a/prover/cmd/controller/controller/job_definition_test.go +++ b/prover/cmd/controller/controller/job_definition_test.go @@ -133,7 +133,7 @@ func TestExecutionInFileRegexp(t *testing.T) { def := ExecutionDefinition(&conf) t.Run(c.Explainer, func(t *testing.T) { - runInpFileTestCase(t, &conf, &def, c) + runInpFileTestCase(t, &def, c) }) } } @@ -196,7 +196,7 @@ func TestCompressionInFileRegexp(t *testing.T) { def := CompressionDefinition(&conf) t.Run(c.Explainer, func(t *testing.T) { - runInpFileTestCase(t, &conf, &def, c) + runInpFileTestCase(t, &def, c) }) } } @@ -244,12 +244,12 @@ func TestAggregatedInFileRegexp(t *testing.T) { def := AggregatedDefinition(&conf) t.Run(c.Explainer, func(t *testing.T) { - runInpFileTestCase(t, &conf, &def, c) + runInpFileTestCase(t, &def, c) }) } } -func runInpFileTestCase(t *testing.T, conf *config.Config, def *JobDefinition, c inpFileNamesCases) { +func runInpFileTestCase(t *testing.T, def *JobDefinition, c inpFileNamesCases) { for i, fname := range c.Fnames { From 7b0706c9607d43631ec62f7bdf68397bcf54b438 Mon Sep 17 00:00:00 2001 From: srinathln7 Date: Tue, 28 Jan 2025 16:50:20 +0000 Subject: [PATCH 04/48] init limitless job definitions --- .../controller/controller/job_definition.go | 106 +++++++++++------- .../controller/job_definition_limitless.go | 82 ++++++++++---- prover/config/config.go | 44 ++++++++ 3 files changed, 170 insertions(+), 62 deletions(-) diff --git a/prover/cmd/controller/controller/job_definition.go b/prover/cmd/controller/controller/job_definition.go index de63577ec..e42038b39 100644 --- a/prover/cmd/controller/controller/job_definition.go +++ b/prover/cmd/controller/controller/job_definition.go @@ -69,31 +69,61 @@ type JobDefinition struct { // Definition of an execution prover job. The function panics on any error since // it is called at start up. func ExecutionDefinition(conf *config.Config) JobDefinition { - // Format the extension part of the regexp if provided + + // format the extension part of the regexp if provided inpFileExt := "" if conf.Execution.CanRunFullLarge { inpFileExt = fmt.Sprintf(`\.%v`, config.LargeSuffix) } - jobDef := commonExecJobDef(conf, jobNameExecution, 0) + return JobDefinition{ + RequestsRootDir: conf.Execution.RequestsRootDir, - // Set the InputFileRegexp specific to ExecutionDefinition - jobDef.InputFileRegexp = regexp2.MustCompile( - fmt.Sprintf( - `^[0-9]+-[0-9]+(-etv[0-9\.]+)?(-stv[0-9\.]+)?-getZkProof\.json%v(\.failure\.%v_[0-9]+)*$`, - inpFileExt, - config.FailSuffix, + // Name of the job + Name: jobNameExecution, + + // This will panic at startup if the regexp is invalid + InputFileRegexp: regexp2.MustCompile( + fmt.Sprintf( + `^[0-9]+-[0-9]+(-etv[0-9\.]+)?(-stv[0-9\.]+)?-getZkProof\.json%v(\.failure\.%v_[0-9]+)*$`, + inpFileExt, + config.FailSuffix, + ), + regexp2.None, ), - regexp2.None, - ) - // Set the OutputFileTmpl specific to ExecutionDefinition - jobDef.OutputFileTmpl = tmplMustCompile( - "exec-output-file", - "{{.Start}}-{{.End}}-getZkProof.json", - ) + // This will panic at startup if the template is invalid + OutputFileTmpl: tmplMustCompile( + "exec-output-file", + "{{.Start}}-{{.End}}-getZkProof.json", + ), + + // Execution job are at utmost priority + Priority: 0, - return jobDef + // Parameters of the regexp, they can loose in the sense that these regexp + // are only called if the `InputFileRegexp` is matched. + ParamsRegexp: struct { + Start *regexp2.Regexp + End *regexp2.Regexp + Stv *regexp2.Regexp + Etv *regexp2.Regexp + Cv *regexp2.Regexp + ContentHash *regexp2.Regexp + }{ + // Match a string of digit at the beginning of the line + Start: regexp2.MustCompile(`^[0-9]+`, regexp2.None), + // Match a string of digit coming after the first string of digits that + // initiate the line and followed by a "-" + End: regexp2.MustCompile(`(?<=^[0-9]+-)[0-9]+`, regexp2.None), + // Match a sequence of digits and "." comining after (resp.) "etv" and + // "cv" + Etv: matchVersionWithPrefix("etv"), + Stv: matchVersionWithPrefix("stv"), + }, + + FailureSuffix: matchFailureSuffix(config.FailSuffix), + } } // Definition of an execution prover job. @@ -148,7 +178,7 @@ func CompressionDefinition(conf *config.Config) JobDefinition { } } -// Definition of an execution prover job. +// Definition of an aggregated prover job. func AggregatedDefinition(conf *config.Config) JobDefinition { return JobDefinition{ @@ -200,27 +230,27 @@ func AggregatedDefinition(conf *config.Config) JobDefinition { // Helper function to set up common parts of execution JobDefinition // Used in Execution Definition and for Limitless prover Bootstrap and Conglomeration -func commonExecJobDef(conf *config.Config, jobName string, priority int) JobDefinition { - return JobDefinition{ - RequestsRootDir: conf.Execution.RequestsRootDir, - Name: jobName, - Priority: priority, - ParamsRegexp: struct { - Start *regexp2.Regexp - End *regexp2.Regexp - Stv *regexp2.Regexp - Etv *regexp2.Regexp - Cv *regexp2.Regexp - ContentHash *regexp2.Regexp - }{ - Start: regexp2.MustCompile(`^[0-9]+`, regexp2.None), - End: regexp2.MustCompile(`(?<=^[0-9]+-)[0-9]+`, regexp2.None), - Etv: matchVersionWithPrefix("etv"), - Stv: matchVersionWithPrefix("stv"), - }, - FailureSuffix: matchFailureSuffix(config.FailSuffix), - } -} +// func commonExecJobDef(reqRootDir string, jobName string, priority int) JobDefinition { +// return JobDefinition{ +// RequestsRootDir: reqRootDir, +// Name: jobName, +// Priority: priority, +// ParamsRegexp: struct { +// Start *regexp2.Regexp +// End *regexp2.Regexp +// Stv *regexp2.Regexp +// Etv *regexp2.Regexp +// Cv *regexp2.Regexp +// ContentHash *regexp2.Regexp +// }{ +// Start: regexp2.MustCompile(`^[0-9]+`, regexp2.None), +// End: regexp2.MustCompile(`(?<=^[0-9]+-)[0-9]+`, regexp2.None), +// Etv: matchVersionWithPrefix("etv"), +// Stv: matchVersionWithPrefix("stv"), +// }, +// FailureSuffix: matchFailureSuffix(config.FailSuffix), +// } +// } // Version prefix template func matchVersionWithPrefix(pre string) *regexp2.Regexp { diff --git a/prover/cmd/controller/controller/job_definition_limitless.go b/prover/cmd/controller/controller/job_definition_limitless.go index 19c802188..f36e83677 100644 --- a/prover/cmd/controller/controller/job_definition_limitless.go +++ b/prover/cmd/controller/controller/job_definition_limitless.go @@ -15,40 +15,74 @@ const ( jobNameConglomeration = "conglomeration-execution" ) -// BootstrapDefinition: Defines the "one-at-all" bootstrap job -func BootstrapDefinition(conf *config.Config) JobDefinition { - jobDef := commonExecJobDef(conf, jobNameBootstrap, 0) +func createJobDefinition(conf *config.Config, jobName, inputFilePattern, outputTmpl, outputFileName string, priority int, requestsRootDir string) JobDefinition { + return JobDefinition{ + RequestsRootDir: requestsRootDir, + Name: jobName, + InputFileRegexp: regexp2.MustCompile( + inputFilePattern, + regexp2.None, + ), + OutputFileTmpl: tmplMustCompile( + outputTmpl, + outputFileName, + ), + Priority: priority, + ParamsRegexp: struct { + Start *regexp2.Regexp + End *regexp2.Regexp + Stv *regexp2.Regexp + Etv *regexp2.Regexp + Cv *regexp2.Regexp + ContentHash *regexp2.Regexp + }{ + Start: regexp2.MustCompile(`^[0-9]+`, regexp2.None), + End: regexp2.MustCompile(`(?<=^[0-9]+-)[0-9]+`, regexp2.None), + Etv: matchVersionWithPrefix("etv"), + Stv: matchVersionWithPrefix("stv"), + }, + FailureSuffix: matchFailureSuffix(config.FailSuffix), + } +} - // Format the extension part of the regexp if provided +func BootstrapDefinition(conf *config.Config) JobDefinition { inpFileExt := "" if conf.Execution.CanRunFullLarge { inpFileExt = fmt.Sprintf(`\.%v`, config.LargeSuffix) } - - // Set the InputFileRegexp specific to ExecutionDefinition - jobDef.InputFileRegexp = regexp2.MustCompile( - fmt.Sprintf( - `^[0-9]+-[0-9]+(-etv[0-9\.]+)?(-stv[0-9\.]+)?-getZkProof\.json%v(\.failure\.%v_[0-9]+)*$`, - inpFileExt, - config.FailSuffix, - ), - regexp2.None, + inputFilePattern := fmt.Sprintf( + `^[0-9]+-[0-9]+(-etv[0-9\.]+)?(-stv[0-9\.]+)?-getZkProof\.json%v(\.failure\.%v_[0-9]+)*$`, + inpFileExt, + config.FailSuffix, ) - - return jobDef + return createJobDefinition(conf, jobNameBootstrap, inputFilePattern, "bootstrap-exec-output-file", "{{.Start}}-{{.End}}-.getZKExecBootstrap.json", 0, conf.Bootstrap.RequestsRootDir) } -// Function to define Conglomeration job -func ConglomerationDefinition(conf *config.Config) JobDefinition { - jobDef := commonExecJobDef(conf, jobNameConglomeration, 4) +func GLExecutionDefinition(conf *config.Config) JobDefinition { + inputFilePattern := fmt.Sprintf( + `^[0-9]+-[0-9]+(-etv[0-9\.]+)?(-stv[0-9\.]+)?-getZKExecBootstrap\.json(\.failure\.%v_[0-9]+)*$`, + config.FailSuffix, + ) + return createJobDefinition(conf, jobNameGLExecution, inputFilePattern, "gl-exec-output-file", "{{.Start}}-{{.End}}-.getZKExecGLProof.json", 1, conf.GLExecution.RequestsRootDir) +} - // Set the OutputFileTmpl specific to ConglomerationDefinition - jobDef.OutputFileTmpl = tmplMustCompile( - "exec-output-file", - "{{.Start}}-{{.End}}-getZkProof.json", +func RandomBeaconDefinition(conf *config.Config) JobDefinition { + inputFilePattern := fmt.Sprintf( + `^[0-9]+-[0-9]+(-etv[0-9\.]+)?(-stv[0-9\.]+)?-getZKExecGLProof\.json(\.failure\.%v_[0-9]+)*$`, + config.FailSuffix, ) + return createJobDefinition(conf, jobNameRandomBeacon, inputFilePattern, "rnd-beacon-exec-output-file", "{{.Start}}-{{.End}}-.getZKExecRandBeacon.json", 2, conf.RandomBeacon.RequestsRootDir) +} - return jobDef +func LPPExecutionDefinition(conf *config.Config) JobDefinition { + inputFilePattern := fmt.Sprintf( + `^[0-9]+-[0-9]+(-etv[0-9\.]+)?(-stv[0-9\.]+)?-getZKExecRandBeacon\.json(\.failure\.%v_[0-9]+)*$`, + config.FailSuffix, + ) + return createJobDefinition(conf, jobNameLPPExecution, inputFilePattern, "lpp-exec-output-file", "{{.Start}}-{{.End}}-.getZKExecLPPProof.json", 3, conf.LPPExecution.RequestsRootDir) } -// TODO: Define other jobs +// // TODO: Figure out: How will the request dir. work here? +// // Are we combining the responses from GL and LPP in to one file +// // Or We can set optional req dir. in the JobDefinition struct +// func ConglomerationDefinition(conf *config.Config) JobDefinition { // return JobDefinition{} // } diff --git a/prover/config/config.go b/prover/config/config.go index d13f56b11..8c02dace2 100644 --- a/prover/config/config.go +++ b/prover/config/config.go @@ -111,6 +111,13 @@ type Config struct { Aggregation Aggregation PublicInputInterconnection PublicInput `mapstructure:"public_input_interconnection"` // TODO add wizard compilation params + // LIMITLESS PROVER STUFF + Bootstrap Bootstrap + GLExecution GLExecution + RandomBeacon RandomBeacon + LPPExecution LPPExecution + Conglomeration Conglomeration + Debug struct { // Profiling indicates whether we want to generate profiles using the [runtime/pprof] pkg. // Profiles can later be read using the `go tool pprof` command. @@ -212,6 +219,43 @@ type Execution struct { ConflatedTracesDir string `mapstructure:"conflated_traces_dir" validate:"required"` } +// TODO: Add and define Limitless prover components +type Bootstrap struct { + WithRequestDir `mapstructure:",squash"` + + // ConflatedTracesDir stores the directory where the conflation traces are stored. + ConflatedTracesDir string `mapstructure:"conflated_traces_dir" validate:"required"` +} + +type GLExecution struct { + // Directory where the submodule request is stored + WithRequestDir `mapstructure:",squash"` + + // ConflatedTracesDir stores the directory where the conflation traces are stored. + ConflatedTracesDir string `mapstructure:"conflated_traces_dir" validate:"required"` +} + +type RandomBeacon struct { + // Directory where LPP Beacon request is stored + WithRequestDir `mapstructure:",squash"` + + // DistMetaData points to the directory where distributed metadata is stored + DistMetaData string `mapstructure:"distributed_metadata" validate:"required"` +} + +type LPPExecution struct { + // Directory where LPP Proof request is stored + WithRequestDir `mapstructure:",squash"` +} + +type Conglomeration struct { + // Directory where GL-sub prover response is stored + GLResp WithRequestDir `mapstructure:",squash"` + + // Directory where LPP-sub prover response is stored + LPPResp WithRequestDir `mapstructure:",squash"` +} + type BlobDecompression struct { WithRequestDir `mapstructure:",squash"` From 4b1da482247d40dda5d77f222fdc64c9b9c36f55 Mon Sep 17 00:00:00 2001 From: srinathln7 Date: Wed, 29 Jan 2025 09:42:39 +0000 Subject: [PATCH 05/48] add congolomeration job definition --- .../controller/controller/job_definition.go | 51 +++++++------------ .../controller/job_definition_limitless.go | 49 +++++++++++++----- prover/config/config.go | 2 +- 3 files changed, 54 insertions(+), 48 deletions(-) diff --git a/prover/cmd/controller/controller/job_definition.go b/prover/cmd/controller/controller/job_definition.go index e42038b39..79f91384a 100644 --- a/prover/cmd/controller/controller/job_definition.go +++ b/prover/cmd/controller/controller/job_definition.go @@ -20,12 +20,19 @@ const ( // a job. type JobDefinition struct { - // Parameters for the job definition provided by the user - RequestsRootDir string - // Name of the job Name string + // Priority at which this type of job should be processed. The lower the + // more of a priority. + // + // Typically 0 for execution, 1 for compression and 2 for aggregation. + // + Priority int + + // Parameters for the job definition provided by the user + RequestsRootDir string + // The regexp to use to match input files. For instance, // // `^\d+-\d+-etv0.1.2-stv\d.\d.\d-getZkProof.json$` @@ -36,6 +43,13 @@ type JobDefinition struct { // InputFileRegexp *regexp2.Regexp + // Optional Request Root Dir. Some jobs might have multiple request files + // For eg: Conglomeration will have two requests files - responses from GL and LPP sub-prover + OptReqRootDir string + + // Regex check for optional input files + OptInputFileRegexp *regexp2.Regexp + // Template to use to generate the output file. The template should have the // form of a go template. For instance, // @@ -43,13 +57,6 @@ type JobDefinition struct { // OutputFileTmpl *template.Template - // Priority at which this type of job should be processed. The lower the - // more of a priority. - // - // Typically 0 for execution, 1 for compression and 2 for aggregation. - // - Priority int - // The associated compiled regexp, this saves on recompiling the regexps // everytime we want to use them. If a field is not needed, it can be left // at zero. @@ -228,30 +235,6 @@ func AggregatedDefinition(conf *config.Config) JobDefinition { } } -// Helper function to set up common parts of execution JobDefinition -// Used in Execution Definition and for Limitless prover Bootstrap and Conglomeration -// func commonExecJobDef(reqRootDir string, jobName string, priority int) JobDefinition { -// return JobDefinition{ -// RequestsRootDir: reqRootDir, -// Name: jobName, -// Priority: priority, -// ParamsRegexp: struct { -// Start *regexp2.Regexp -// End *regexp2.Regexp -// Stv *regexp2.Regexp -// Etv *regexp2.Regexp -// Cv *regexp2.Regexp -// ContentHash *regexp2.Regexp -// }{ -// Start: regexp2.MustCompile(`^[0-9]+`, regexp2.None), -// End: regexp2.MustCompile(`(?<=^[0-9]+-)[0-9]+`, regexp2.None), -// Etv: matchVersionWithPrefix("etv"), -// Stv: matchVersionWithPrefix("stv"), -// }, -// FailureSuffix: matchFailureSuffix(config.FailSuffix), -// } -// } - // Version prefix template func matchVersionWithPrefix(pre string) *regexp2.Regexp { return regexp2.MustCompile( diff --git a/prover/cmd/controller/controller/job_definition_limitless.go b/prover/cmd/controller/controller/job_definition_limitless.go index f36e83677..7b51490b1 100644 --- a/prover/cmd/controller/controller/job_definition_limitless.go +++ b/prover/cmd/controller/controller/job_definition_limitless.go @@ -15,10 +15,11 @@ const ( jobNameConglomeration = "conglomeration-execution" ) -func createJobDefinition(conf *config.Config, jobName, inputFilePattern, outputTmpl, outputFileName string, priority int, requestsRootDir string) JobDefinition { - return JobDefinition{ - RequestsRootDir: requestsRootDir, - Name: jobName, +func createJobDefinition(name string, priority int, reqRootDir string, inputFilePattern string, optReqRootDir string, optInputFilePattern string, outputTmpl string, outputFileName string) JobDefinition { + jd := JobDefinition{ + Name: name, + Priority: priority, + RequestsRootDir: reqRootDir, InputFileRegexp: regexp2.MustCompile( inputFilePattern, regexp2.None, @@ -27,7 +28,6 @@ func createJobDefinition(conf *config.Config, jobName, inputFilePattern, outputT outputTmpl, outputFileName, ), - Priority: priority, ParamsRegexp: struct { Start *regexp2.Regexp End *regexp2.Regexp @@ -43,6 +43,14 @@ func createJobDefinition(conf *config.Config, jobName, inputFilePattern, outputT }, FailureSuffix: matchFailureSuffix(config.FailSuffix), } + + // Additional check for optional dirs => Congolomeration + if optInputFilePattern != "" && optReqRootDir != "" { + jd.OptReqRootDir = optReqRootDir + jd.OptInputFileRegexp = regexp2.MustCompile(optInputFilePattern, regexp2.None) + } + + return jd } func BootstrapDefinition(conf *config.Config) JobDefinition { @@ -55,7 +63,8 @@ func BootstrapDefinition(conf *config.Config) JobDefinition { inpFileExt, config.FailSuffix, ) - return createJobDefinition(conf, jobNameBootstrap, inputFilePattern, "bootstrap-exec-output-file", "{{.Start}}-{{.End}}-.getZKExecBootstrap.json", 0, conf.Bootstrap.RequestsRootDir) + return createJobDefinition(jobNameBootstrap, 0, conf.Bootstrap.RequestsRootDir, inputFilePattern, "", + "", "bootstrap-exec-output-file", "{{.Start}}-{{.End}}-.getZKExecBootstrap.json") } func GLExecutionDefinition(conf *config.Config) JobDefinition { @@ -63,7 +72,8 @@ func GLExecutionDefinition(conf *config.Config) JobDefinition { `^[0-9]+-[0-9]+(-etv[0-9\.]+)?(-stv[0-9\.]+)?-getZKExecBootstrap\.json(\.failure\.%v_[0-9]+)*$`, config.FailSuffix, ) - return createJobDefinition(conf, jobNameGLExecution, inputFilePattern, "gl-exec-output-file", "{{.Start}}-{{.End}}-.getZKExecGLProof.json", 1, conf.GLExecution.RequestsRootDir) + return createJobDefinition(jobNameGLExecution, 1, conf.GLExecution.RequestsRootDir, inputFilePattern, "", + "", "gl-exec-output-file", "{{.Start}}-{{.End}}-.getZKExecGLProof.json") } func RandomBeaconDefinition(conf *config.Config) JobDefinition { @@ -71,7 +81,8 @@ func RandomBeaconDefinition(conf *config.Config) JobDefinition { `^[0-9]+-[0-9]+(-etv[0-9\.]+)?(-stv[0-9\.]+)?-getZKExecGLProof\.json(\.failure\.%v_[0-9]+)*$`, config.FailSuffix, ) - return createJobDefinition(conf, jobNameRandomBeacon, inputFilePattern, "rnd-beacon-exec-output-file", "{{.Start}}-{{.End}}-.getZKExecRandBeacon.json", 2, conf.RandomBeacon.RequestsRootDir) + return createJobDefinition(jobNameRandomBeacon, 2, conf.RandomBeacon.RequestsRootDir, inputFilePattern, "", + "", "rnd-beacon-exec-output-file", "{{.Start}}-{{.End}}-.getZKExecRandBeacon.json") } func LPPExecutionDefinition(conf *config.Config) JobDefinition { @@ -79,10 +90,22 @@ func LPPExecutionDefinition(conf *config.Config) JobDefinition { `^[0-9]+-[0-9]+(-etv[0-9\.]+)?(-stv[0-9\.]+)?-getZKExecRandBeacon\.json(\.failure\.%v_[0-9]+)*$`, config.FailSuffix, ) - return createJobDefinition(conf, jobNameLPPExecution, inputFilePattern, "lpp-exec-output-file", "{{.Start}}-{{.End}}-.getZKExecLPPProof.json", 3, conf.LPPExecution.RequestsRootDir) + return createJobDefinition(jobNameLPPExecution, 3, conf.LPPExecution.RequestsRootDir, inputFilePattern, "", + "", "lpp-exec-output-file", "{{.Start}}-{{.End}}-.getZKExecLPPProof.json") } -// // TODO: Figure out: How will the request dir. work here? -// // Are we combining the responses from GL and LPP in to one file -// // Or We can set optional req dir. in the JobDefinition struct -// func ConglomerationDefinition(conf *config.Config) JobDefinition { // return JobDefinition{} // } +func ConglomerationDefinition(conf *config.Config) JobDefinition { + inputFilePattern := fmt.Sprintf( + `^[0-9]+-[0-9]+(-etv[0-9\.]+)?(-stv[0-9\.]+)?-getZKExecGLProof\.json(\.failure\.%v_[0-9]+)*$`, + config.FailSuffix, + ) + + optFilePattern := fmt.Sprintf( + `^[0-9]+-[0-9]+(-etv[0-9\.]+)?(-stv[0-9\.]+)?-getZKExecLPPProof\.json(\.failure\.%v_[0-9]+)*$`, + config.FailSuffix, + ) + + return createJobDefinition(jobNameConglomeration, 4, conf.Conglomeration.GLResp.RequestsRootDir, + inputFilePattern, conf.Conglomeration.LPPResp.RequestsRootDir, optFilePattern, + "exec-output-file", "{{.Start}}-{{.End}}-.getZKProof.json") +} diff --git a/prover/config/config.go b/prover/config/config.go index 8c02dace2..e3bb5a4d7 100644 --- a/prover/config/config.go +++ b/prover/config/config.go @@ -111,7 +111,7 @@ type Config struct { Aggregation Aggregation PublicInputInterconnection PublicInput `mapstructure:"public_input_interconnection"` // TODO add wizard compilation params - // LIMITLESS PROVER STUFF + // LIMITLESS PROVER Components Bootstrap Bootstrap GLExecution GLExecution RandomBeacon RandomBeacon From 3b23278ec9814c93442046012fd8c9fa768e595e Mon Sep 17 00:00:00 2001 From: srinathln7 Date: Wed, 29 Jan 2025 14:38:29 +0000 Subject: [PATCH 06/48] enable multiple req-resp files for diff. job definitions --- .../controller/controller/job_definition.go | 7 - .../controller/job_definition_limitless.go | 266 +++++++++++++----- prover/config/config.go | 38 +-- prover/config/config_default.go | 4 + 4 files changed, 217 insertions(+), 98 deletions(-) diff --git a/prover/cmd/controller/controller/job_definition.go b/prover/cmd/controller/controller/job_definition.go index 79f91384a..9e16399bc 100644 --- a/prover/cmd/controller/controller/job_definition.go +++ b/prover/cmd/controller/controller/job_definition.go @@ -43,13 +43,6 @@ type JobDefinition struct { // InputFileRegexp *regexp2.Regexp - // Optional Request Root Dir. Some jobs might have multiple request files - // For eg: Conglomeration will have two requests files - responses from GL and LPP sub-prover - OptReqRootDir string - - // Regex check for optional input files - OptInputFileRegexp *regexp2.Regexp - // Template to use to generate the output file. The template should have the // form of a go template. For instance, // diff --git a/prover/cmd/controller/controller/job_definition_limitless.go b/prover/cmd/controller/controller/job_definition_limitless.go index 7b51490b1..459619163 100644 --- a/prover/cmd/controller/controller/job_definition_limitless.go +++ b/prover/cmd/controller/controller/job_definition_limitless.go @@ -2,6 +2,7 @@ package controller import ( "fmt" + "text/template" "github.com/consensys/linea-monorepo/prover/config" "github.com/dlclark/regexp2" @@ -15,19 +16,114 @@ const ( jobNameConglomeration = "conglomeration-execution" ) -func createJobDefinition(name string, priority int, reqRootDir string, inputFilePattern string, optReqRootDir string, optInputFilePattern string, outputTmpl string, outputFileName string) JobDefinition { - jd := JobDefinition{ - Name: name, - Priority: priority, - RequestsRootDir: reqRootDir, - InputFileRegexp: regexp2.MustCompile( - inputFilePattern, - regexp2.None, - ), - OutputFileTmpl: tmplMustCompile( - outputTmpl, - outputFileName, - ), +const ( + priorityBootstrap = 0 + priorityGLExecution = 1 + priorityRandomBeacon = 2 + priorityLPPExecution = 3 + priorityConglomeration = 4 +) + +const ( + bootstrapInputPattern = `^[0-9]+-[0-9]+(-etv[0-9\.]+)?(-stv[0-9\.]+)?-getZkProof\.json%v(\.failure\.%v_[0-9]+)*$` + glInputPattern = `^[0-9]+-[0-9]+(-etv[0-9\.]+)?(-stv[0-9\.]+)?-getZKProof_Bootstrap_Submodule\.json%v(\.failure\.%v_[0-9]+)*$` + randomBeaconInputPattern1 = `^[0-9]+-[0-9]+(-etv[0-9\.]+)?(-stv[0-9\.]+)?-getZKProof_Bootstrap_DistMetadata\.json%v(\.failure\.%v_[0-9]+)*$` + randomBeaconInputPattern2 = `^[0-9]+-[0-9]+(-etv[0-9\.]+)?(-stv[0-9\.]+)?-getZKProof_GL_Beacon\.json%v(\.failure\.%v_[0-9]+)*$` + lppInputPattern = `^[0-9]+-[0-9]+(-etv[0-9\.]+)?(-stv[0-9\.]+)?-getZKProof_RndBeacon\.json%v(\.failure\.%v_[0-9]+)*$` + conglomerationInputPattern1 = `^[0-9]+-[0-9]+(-etv[0-9\.]+)?(-stv[0-9\.]+)?-getZKProof_GL\.json%v(\.failure\.%v_[0-9]+)*$` + conglomerationInputPattern2 = `^[0-9]+-[0-9]+(-etv[0-9\.]+)?(-stv[0-9\.]+)?-getZKProof_LPP\.json%v(\.failure\.%v_[0-9]+)*$` +) + +const ( + bootstrapSubmoduleFile = "{{.Start}}-{{.End}}-.getZKProof_Bootstrap_Submodule.json" + bootstrapDistMetadataFile = "{{.Start}}-{{.End}}-.getZKProof_Bootstrap_DistMetadata.json" + glBeaconFile = "{{.Start}}-{{.End}}-.getZKProof_GL_Beacon.json" + glOutputFile = "{{.Start}}-{{.End}}-.getZKProof_GL.json" + randomBeaconDistMetadataFile = "{{.Start}}-{{.End}}-.getZKProof_Bootstrap_DistMetadata.json" + randomBeaconGLFile = "{{.Start}}-{{.End}}-.getZKProof_GL_Beacon.json" + randomBeaconOutputFile = "{{.Start}}-{{.End}}-.getZKProof_RndBeacon.json" + lppOutputFile = "{{.Start}}-{{.End}}-.getZKProof_LPP.json" + conglomerationGLFile = "{{.Start}}-{{.End}}-.getZKProof_GL.json" + conglomerationLPPFile = "{{.Start}}-{{.End}}-.getZKProof_LPP.json" + conglomerationOutputFile = "{{.Start}}-{{.End}}-.getZKProof.json" +) + +type JobDefinition_Limitless struct { + // Name of the job + Name string + + // Priority at which this type of job should be processed. The lower the more of a priority. + // Typically 0 for bootstrap, 1 for Gl execution, 2 for Random Beacon, 3 for LPP Execution, 4 for Conglomeration. + Priority int + + // Parameters for the job definition provided by the user + // There can be multiple i/p request files for a job eg: conglomeration + ReqRootDir []string + + // The regexp to use to match input files. For instance, + // + // `^\d+-\d+-etv0.1.2-stv\d.\d.\d-getZkProof.json$` + // + // Will tell the controller to accept any version of the state-manager + // but to only accept execution trace. The regexp should always start "^" + // and end with "$" otherwise you are going to match in-progress files. + // + InputFilesRegexp []*regexp2.Regexp + + // Template to use to generate the output file. The template should have the + // form of a go template. For instance, + // + // `{{.From}}-{{.To}}-pv{{.Version}}-stv{{.Stv}}-etv{{.Etv}}-zkProof.json` + // There can be multiple output files for a job. Eg: GL-Execution + OutputFileTmpl []*template.Template + + // The associated compiled regexp, this saves on recompiling the regexps + // everytime we want to use them. If a field is not needed, it can be left + // at zero. + ParamsRegexp struct { + Start *regexp2.Regexp + End *regexp2.Regexp + Stv *regexp2.Regexp + Etv *regexp2.Regexp + Cv *regexp2.Regexp + ContentHash *regexp2.Regexp + } + + // Regexp of the failure code so that we can trim it if we want to retry. + FailureSuffix *regexp2.Regexp +} + +// Function to create a JobDefinition_Limitless +func createJobDefinition(name string, priority int, + reqRootDir, inputFilePattern []string, + outputTmpl, outputFileName []string) (*JobDefinition_Limitless, error) { + + numReqs, numIPs, numOPTmpl, numOPFileName := len(reqRootDir), len(inputFilePattern), len(outputTmpl), len(outputFileName) + if numReqs != numIPs || numOPTmpl != numOPFileName { + return nil, fmt.Errorf(`length mismatch: reqRootDir:%d, inputFilePattern:%d, + outputTmpl:%d, and outputFileName:%d must have the same length`, numReqs, numIPs, numOPTmpl, numOPFileName) + } + + var inputFileRegexps []*regexp2.Regexp + for _, pattern := range inputFilePattern { + re, err := regexp2.Compile(pattern, regexp2.None) + if err != nil { + return nil, fmt.Errorf("invalid input file pattern: %v", err) + } + inputFileRegexps = append(inputFileRegexps, re) + } + + var outputFileTemplates []*template.Template + for i, tmpl := range outputTmpl { + outputFileTemplates = append(outputFileTemplates, tmplMustCompile(tmpl, outputFileName[i])) + } + + return &JobDefinition_Limitless{ + Name: name, + Priority: priority, + ReqRootDir: reqRootDir, + InputFilesRegexp: inputFileRegexps, + OutputFileTmpl: outputFileTemplates, ParamsRegexp: struct { Start *regexp2.Regexp End *regexp2.Regexp @@ -41,71 +137,111 @@ func createJobDefinition(name string, priority int, reqRootDir string, inputFile Etv: matchVersionWithPrefix("etv"), Stv: matchVersionWithPrefix("stv"), }, - FailureSuffix: matchFailureSuffix(config.FailSuffix), - } + FailureSuffix: matchFailureSuffix("fail"), + }, nil +} - // Additional check for optional dirs => Congolomeration - if optInputFilePattern != "" && optReqRootDir != "" { - jd.OptReqRootDir = optReqRootDir - jd.OptInputFileRegexp = regexp2.MustCompile(optInputFilePattern, regexp2.None) +func BootstrapDefinition(conf *config.Config) (*JobDefinition_Limitless, error) { + inpFileExt := "" + if conf.Bootstrap.CanRunFullLarge { + inpFileExt = fmt.Sprintf(`\.%v`, config.LargeSuffix) } - - return jd + inputFilePattern := []string{ + fmt.Sprintf( + bootstrapInputPattern, + inpFileExt, + config.FailSuffix, + ), + } + reqRootDir := []string{conf.Bootstrap.RequestsRootDir} + outputTmpl := []string{"exec-bootstrap-submodule-req-file", "exec-bootstrap-submodule-distmetadata-file"} + outputFileName := []string{bootstrapSubmoduleFile, bootstrapDistMetadataFile} + return createJobDefinition(jobNameBootstrap, priorityBootstrap, reqRootDir, inputFilePattern, outputTmpl, outputFileName) } -func BootstrapDefinition(conf *config.Config) JobDefinition { +func GLExecutionDefinition(conf *config.Config) (*JobDefinition_Limitless, error) { inpFileExt := "" - if conf.Execution.CanRunFullLarge { + if conf.GLExecution.CanRunFullLarge { inpFileExt = fmt.Sprintf(`\.%v`, config.LargeSuffix) } - inputFilePattern := fmt.Sprintf( - `^[0-9]+-[0-9]+(-etv[0-9\.]+)?(-stv[0-9\.]+)?-getZkProof\.json%v(\.failure\.%v_[0-9]+)*$`, - inpFileExt, - config.FailSuffix, - ) - return createJobDefinition(jobNameBootstrap, 0, conf.Bootstrap.RequestsRootDir, inputFilePattern, "", - "", "bootstrap-exec-output-file", "{{.Start}}-{{.End}}-.getZKExecBootstrap.json") -} + inputFilePattern := []string{ + fmt.Sprintf( + glInputPattern, + inpFileExt, + config.FailSuffix, + ), + } + reqRootDir := []string{conf.GLExecution.RequestsRootDir} + outputTmpl := []string{"exec-GL-Beacon-file", "exec-GL-output-file"} + outputFileName := []string{glBeaconFile, glOutputFile} -func GLExecutionDefinition(conf *config.Config) JobDefinition { - inputFilePattern := fmt.Sprintf( - `^[0-9]+-[0-9]+(-etv[0-9\.]+)?(-stv[0-9\.]+)?-getZKExecBootstrap\.json(\.failure\.%v_[0-9]+)*$`, - config.FailSuffix, - ) - return createJobDefinition(jobNameGLExecution, 1, conf.GLExecution.RequestsRootDir, inputFilePattern, "", - "", "gl-exec-output-file", "{{.Start}}-{{.End}}-.getZKExecGLProof.json") + return createJobDefinition(jobNameGLExecution, priorityGLExecution, reqRootDir, inputFilePattern, outputTmpl, outputFileName) } -func RandomBeaconDefinition(conf *config.Config) JobDefinition { - inputFilePattern := fmt.Sprintf( - `^[0-9]+-[0-9]+(-etv[0-9\.]+)?(-stv[0-9\.]+)?-getZKExecGLProof\.json(\.failure\.%v_[0-9]+)*$`, - config.FailSuffix, - ) - return createJobDefinition(jobNameRandomBeacon, 2, conf.RandomBeacon.RequestsRootDir, inputFilePattern, "", - "", "rnd-beacon-exec-output-file", "{{.Start}}-{{.End}}-.getZKExecRandBeacon.json") +func RandomBeaconDefinition(conf *config.Config) (*JobDefinition_Limitless, error) { + inpFile1Ext, inpFile2Ext := "", "" + if conf.RandomBeacon.Bootstrap.CanRunFullLarge && conf.RandomBeacon.GL.CanRunFullLarge { + inpFile1Ext, inpFile2Ext = fmt.Sprintf(`\.%v`, config.LargeSuffix), fmt.Sprintf(`\.%v`, config.LargeSuffix) + } + inputFilePattern := []string{ + fmt.Sprintf( + randomBeaconInputPattern1, + inpFile1Ext, + config.FailSuffix, + ), + fmt.Sprintf( + randomBeaconInputPattern2, + inpFile2Ext, + config.FailSuffix, + ), + } + reqRootDir := []string{conf.RandomBeacon.Bootstrap.RequestsRootDir, conf.RandomBeacon.GL.RequestsRootDir} + outputTmpl := []string{"exec-rndbeacon-output-file"} + outputFileName := []string{randomBeaconOutputFile} + return createJobDefinition(jobNameRandomBeacon, priorityRandomBeacon, reqRootDir, inputFilePattern, outputTmpl, outputFileName) } -func LPPExecutionDefinition(conf *config.Config) JobDefinition { - inputFilePattern := fmt.Sprintf( - `^[0-9]+-[0-9]+(-etv[0-9\.]+)?(-stv[0-9\.]+)?-getZKExecRandBeacon\.json(\.failure\.%v_[0-9]+)*$`, - config.FailSuffix, - ) - return createJobDefinition(jobNameLPPExecution, 3, conf.LPPExecution.RequestsRootDir, inputFilePattern, "", - "", "lpp-exec-output-file", "{{.Start}}-{{.End}}-.getZKExecLPPProof.json") -} +func LPPExecutionDefinition(conf *config.Config) (*JobDefinition_Limitless, error) { + inpFileExt := "" + if conf.LPPExecution.CanRunFullLarge { + inpFileExt = fmt.Sprintf(`\.%v`, config.LargeSuffix) + } + inputFilePattern := []string{ + fmt.Sprintf( + lppInputPattern, + inpFileExt, + config.FailSuffix, + ), + } + reqRootDir := []string{conf.LPPExecution.RequestsRootDir} + outputTmpl := []string{"exec-LPP-output-file"} + outputFileName := []string{lppOutputFile} -func ConglomerationDefinition(conf *config.Config) JobDefinition { - inputFilePattern := fmt.Sprintf( - `^[0-9]+-[0-9]+(-etv[0-9\.]+)?(-stv[0-9\.]+)?-getZKExecGLProof\.json(\.failure\.%v_[0-9]+)*$`, - config.FailSuffix, - ) + return createJobDefinition(jobNameLPPExecution, priorityLPPExecution, reqRootDir, inputFilePattern, outputTmpl, outputFileName) +} - optFilePattern := fmt.Sprintf( - `^[0-9]+-[0-9]+(-etv[0-9\.]+)?(-stv[0-9\.]+)?-getZKExecLPPProof\.json(\.failure\.%v_[0-9]+)*$`, - config.FailSuffix, - ) +func ConglomerationDefinition(conf *config.Config) (*JobDefinition_Limitless, error) { + inpFile1Ext, inpFile2Ext := "", "" - return createJobDefinition(jobNameConglomeration, 4, conf.Conglomeration.GLResp.RequestsRootDir, - inputFilePattern, conf.Conglomeration.LPPResp.RequestsRootDir, optFilePattern, - "exec-output-file", "{{.Start}}-{{.End}}-.getZKProof.json") + // TODO: Clairfy @linea-prover Can be have multiple limitless prover component running in different modes? + // For eg. Bootstraper - "full-large" and GL-subprover in "full". If so? how would the file be formated + if conf.Conglomeration.GL.CanRunFullLarge && conf.Conglomeration.LPP.CanRunFullLarge { + inpFile1Ext, inpFile2Ext = fmt.Sprintf(`\.%v`, config.LargeSuffix), fmt.Sprintf(`\.%v`, config.LargeSuffix) + } + inputFilePattern := []string{ + fmt.Sprintf( + conglomerationInputPattern1, + inpFile1Ext, + config.FailSuffix, + ), + fmt.Sprintf( + conglomerationInputPattern2, + inpFile2Ext, + config.FailSuffix, + ), + } + reqRootDir := []string{conf.Conglomeration.GL.RequestsRootDir, conf.Conglomeration.LPP.RequestsRootDir} + outputTmpl := []string{"exec-output-file"} + outputFileName := []string{conglomerationOutputFile} + return createJobDefinition(jobNameConglomeration, priorityConglomeration, reqRootDir, inputFilePattern, outputTmpl, outputFileName) } diff --git a/prover/config/config.go b/prover/config/config.go index e3bb5a4d7..f6426102f 100644 --- a/prover/config/config.go +++ b/prover/config/config.go @@ -112,11 +112,11 @@ type Config struct { PublicInputInterconnection PublicInput `mapstructure:"public_input_interconnection"` // TODO add wizard compilation params // LIMITLESS PROVER Components - Bootstrap Bootstrap - GLExecution GLExecution - RandomBeacon RandomBeacon - LPPExecution LPPExecution - Conglomeration Conglomeration + Bootstrap Bootstrap `mapstructure:"execution_bootstrap"` + GLExecution GLExecution `mapstructure:"execution_gl"` + RandomBeacon RandomBeacon `mapstructure:"execution_rndbeacon"` + LPPExecution LPPExecution `mapstructure:"execution_lpp"` + Conglomeration Conglomeration `mapstructure:"execution_conglomeration"` Debug struct { // Profiling indicates whether we want to generate profiles using the [runtime/pprof] pkg. @@ -221,39 +221,25 @@ type Execution struct { // TODO: Add and define Limitless prover components type Bootstrap struct { - WithRequestDir `mapstructure:",squash"` - - // ConflatedTracesDir stores the directory where the conflation traces are stored. - ConflatedTracesDir string `mapstructure:"conflated_traces_dir" validate:"required"` + Execution } type GLExecution struct { - // Directory where the submodule request is stored - WithRequestDir `mapstructure:",squash"` - - // ConflatedTracesDir stores the directory where the conflation traces are stored. - ConflatedTracesDir string `mapstructure:"conflated_traces_dir" validate:"required"` + Execution } +// Component with multiple input files type RandomBeacon struct { - // Directory where LPP Beacon request is stored - WithRequestDir `mapstructure:",squash"` - - // DistMetaData points to the directory where distributed metadata is stored - DistMetaData string `mapstructure:"distributed_metadata" validate:"required"` + Bootstrap, GL Execution } type LPPExecution struct { - // Directory where LPP Proof request is stored - WithRequestDir `mapstructure:",squash"` + Execution } +// Component with multiple input files type Conglomeration struct { - // Directory where GL-sub prover response is stored - GLResp WithRequestDir `mapstructure:",squash"` - - // Directory where LPP-sub prover response is stored - LPPResp WithRequestDir `mapstructure:",squash"` + GL, LPP Execution } type BlobDecompression struct { diff --git a/prover/config/config_default.go b/prover/config/config_default.go index fb5a20bc4..40a392a79 100644 --- a/prover/config/config_default.go +++ b/prover/config/config_default.go @@ -35,6 +35,10 @@ func setDefaultPaths() { viper.SetDefault("execution.requests_root_dir", "/shared/prover-execution") viper.SetDefault("blob_decompression.requests_root_dir", "/shared/prover-compression") viper.SetDefault("aggregation.requests_root_dir", "/shared/prover-aggregation") + + // TODO: @srinathLN7 Add Limitless prover default values + viper.SetDefault("execution_bootstrap.requests_root_dir", "/shared/prover-execution/limitless/bootstrap") + } func setDefaultTracesLimit() { From 39f7836b89f6c74ef8b0f4de5c8aa2c379aead07 Mon Sep 17 00:00:00 2001 From: srinathln7 Date: Thu, 30 Jan 2025 09:31:30 +0000 Subject: [PATCH 07/48] refactor limitless prover job defintions --- .../controller/controller/job_definition.go | 12 +- .../controller/job_definition_limitless.go | 128 +++++---- .../job_definition_limitless_test.go | 249 ++++++++++++++++++ 3 files changed, 319 insertions(+), 70 deletions(-) create mode 100644 prover/cmd/controller/controller/job_definition_limitless_test.go diff --git a/prover/cmd/controller/controller/job_definition.go b/prover/cmd/controller/controller/job_definition.go index 9e16399bc..dfacf9e99 100644 --- a/prover/cmd/controller/controller/job_definition.go +++ b/prover/cmd/controller/controller/job_definition.go @@ -19,7 +19,6 @@ const ( // JobDefinition represents a collection of static parameters allowing to define // a job. type JobDefinition struct { - // Name of the job Name string @@ -43,6 +42,13 @@ type JobDefinition struct { // InputFileRegexp *regexp2.Regexp + // Secondary request root directory. Some jobs may have more than one input reqeust file + // Eg: Random Beacon and Conglomeration + SecRequestsRootDir string + + // Second input file pattern + SecInputFileRegexp *regexp2.Regexp + // Template to use to generate the output file. The template should have the // form of a go template. For instance, // @@ -50,6 +56,10 @@ type JobDefinition struct { // OutputFileTmpl *template.Template + // Some jobs may have more than one output file. + // Eg: Bootstraping => Submodule and distributed metadata + SecOutputFileTmpl *template.Template + // The associated compiled regexp, this saves on recompiling the regexps // everytime we want to use them. If a field is not needed, it can be left // at zero. diff --git a/prover/cmd/controller/controller/job_definition_limitless.go b/prover/cmd/controller/controller/job_definition_limitless.go index 459619163..2cfcd1d74 100644 --- a/prover/cmd/controller/controller/job_definition_limitless.go +++ b/prover/cmd/controller/controller/job_definition_limitless.go @@ -48,82 +48,72 @@ const ( conglomerationOutputFile = "{{.Start}}-{{.End}}-.getZKProof.json" ) -type JobDefinition_Limitless struct { - // Name of the job - Name string - - // Priority at which this type of job should be processed. The lower the more of a priority. - // Typically 0 for bootstrap, 1 for Gl execution, 2 for Random Beacon, 3 for LPP Execution, 4 for Conglomeration. - Priority int - - // Parameters for the job definition provided by the user - // There can be multiple i/p request files for a job eg: conglomeration - ReqRootDir []string - - // The regexp to use to match input files. For instance, - // - // `^\d+-\d+-etv0.1.2-stv\d.\d.\d-getZkProof.json$` - // - // Will tell the controller to accept any version of the state-manager - // but to only accept execution trace. The regexp should always start "^" - // and end with "$" otherwise you are going to match in-progress files. - // - InputFilesRegexp []*regexp2.Regexp - - // Template to use to generate the output file. The template should have the - // form of a go template. For instance, - // - // `{{.From}}-{{.To}}-pv{{.Version}}-stv{{.Stv}}-etv{{.Etv}}-zkProof.json` - // There can be multiple output files for a job. Eg: GL-Execution - OutputFileTmpl []*template.Template - - // The associated compiled regexp, this saves on recompiling the regexps - // everytime we want to use them. If a field is not needed, it can be left - // at zero. - ParamsRegexp struct { - Start *regexp2.Regexp - End *regexp2.Regexp - Stv *regexp2.Regexp - Etv *regexp2.Regexp - Cv *regexp2.Regexp - ContentHash *regexp2.Regexp - } - - // Regexp of the failure code so that we can trim it if we want to retry. - FailureSuffix *regexp2.Regexp -} +const ( + bootstrapOutputTmpl = "exec-bootstrap-submodule-req-file" + bootstrapDistMetadataTmpl = "exec-bootstrap-submodule-distmetadata-file" + glBeaconOutputTmpl = "exec-GL-Beacon-file" + glOutputTmpl = "exec-GL-output-file" + randomBeaconOutputTmpl = "exec-rndbeacon-output-file" + lppOutputTmpl = "exec-LPP-output-file" + conglomerationOutputTmpl = "exec-output-file" +) -// Function to create a JobDefinition_Limitless func createJobDefinition(name string, priority int, reqRootDir, inputFilePattern []string, - outputTmpl, outputFileName []string) (*JobDefinition_Limitless, error) { + outputTmpl, outputFileName []string) (*JobDefinition, error) { numReqs, numIPs, numOPTmpl, numOPFileName := len(reqRootDir), len(inputFilePattern), len(outputTmpl), len(outputFileName) + + // Currently JobDefinition supports only primary and secondary inputs/output files + // Length cannot exceed 2 + if numReqs > 2 || numIPs > 2 || numOPTmpl > 2 || numOPFileName > 2 { + return nil, fmt.Errorf("input and output parameters length cannot be greater than 2") + } + if numReqs != numIPs || numOPTmpl != numOPFileName { return nil, fmt.Errorf(`length mismatch: reqRootDir:%d, inputFilePattern:%d, outputTmpl:%d, and outputFileName:%d must have the same length`, numReqs, numIPs, numOPTmpl, numOPFileName) } - var inputFileRegexps []*regexp2.Regexp - for _, pattern := range inputFilePattern { - re, err := regexp2.Compile(pattern, regexp2.None) + // Set primary request root directory and compile primary input file regexps + primaryReqRootDir := reqRootDir[0] + inpReq1FileRegexp, err := regexp2.Compile(inputFilePattern[0], regexp2.None) + if err != nil { + return nil, fmt.Errorf("invalid input file pattern: %v", err) + } + + // Set secondary request root directory and compile secondary input file regexps + var inpReq2FileRegexp *regexp2.Regexp + var secReqRootDir string + if numReqs == 2 { + secReqRootDir = reqRootDir[1] + inpReq2FileRegexp, err = regexp2.Compile(inputFilePattern[1], regexp2.None) if err != nil { return nil, fmt.Errorf("invalid input file pattern: %v", err) } - inputFileRegexps = append(inputFileRegexps, re) } - var outputFileTemplates []*template.Template - for i, tmpl := range outputTmpl { - outputFileTemplates = append(outputFileTemplates, tmplMustCompile(tmpl, outputFileName[i])) + // Compile output file templates + opFile1Template := tmplMustCompile(outputTmpl[0], outputFileName[0]) + var opFile2Template *template.Template + if numOPTmpl == 2 { + opFile2Template = tmplMustCompile(outputTmpl[1], outputFileName[1]) } - return &JobDefinition_Limitless{ - Name: name, - Priority: priority, - ReqRootDir: reqRootDir, - InputFilesRegexp: inputFileRegexps, - OutputFileTmpl: outputFileTemplates, + return &JobDefinition{ + Name: name, + Priority: priority, + + // Primary and Secondary Request (Input) Files + RequestsRootDir: primaryReqRootDir, + InputFileRegexp: inpReq1FileRegexp, + SecRequestsRootDir: secReqRootDir, + SecInputFileRegexp: inpReq2FileRegexp, + + // Output Templates + OutputFileTmpl: opFile1Template, + SecOutputFileTmpl: opFile2Template, + ParamsRegexp: struct { Start *regexp2.Regexp End *regexp2.Regexp @@ -141,7 +131,7 @@ func createJobDefinition(name string, priority int, }, nil } -func BootstrapDefinition(conf *config.Config) (*JobDefinition_Limitless, error) { +func BootstrapDefinition(conf *config.Config) (*JobDefinition, error) { inpFileExt := "" if conf.Bootstrap.CanRunFullLarge { inpFileExt = fmt.Sprintf(`\.%v`, config.LargeSuffix) @@ -154,12 +144,12 @@ func BootstrapDefinition(conf *config.Config) (*JobDefinition_Limitless, error) ), } reqRootDir := []string{conf.Bootstrap.RequestsRootDir} - outputTmpl := []string{"exec-bootstrap-submodule-req-file", "exec-bootstrap-submodule-distmetadata-file"} + outputTmpl := []string{bootstrapOutputTmpl, bootstrapDistMetadataTmpl} outputFileName := []string{bootstrapSubmoduleFile, bootstrapDistMetadataFile} return createJobDefinition(jobNameBootstrap, priorityBootstrap, reqRootDir, inputFilePattern, outputTmpl, outputFileName) } -func GLExecutionDefinition(conf *config.Config) (*JobDefinition_Limitless, error) { +func GLExecutionDefinition(conf *config.Config) (*JobDefinition, error) { inpFileExt := "" if conf.GLExecution.CanRunFullLarge { inpFileExt = fmt.Sprintf(`\.%v`, config.LargeSuffix) @@ -172,13 +162,13 @@ func GLExecutionDefinition(conf *config.Config) (*JobDefinition_Limitless, error ), } reqRootDir := []string{conf.GLExecution.RequestsRootDir} - outputTmpl := []string{"exec-GL-Beacon-file", "exec-GL-output-file"} + outputTmpl := []string{glBeaconOutputTmpl, glOutputTmpl} outputFileName := []string{glBeaconFile, glOutputFile} return createJobDefinition(jobNameGLExecution, priorityGLExecution, reqRootDir, inputFilePattern, outputTmpl, outputFileName) } -func RandomBeaconDefinition(conf *config.Config) (*JobDefinition_Limitless, error) { +func RandomBeaconDefinition(conf *config.Config) (*JobDefinition, error) { inpFile1Ext, inpFile2Ext := "", "" if conf.RandomBeacon.Bootstrap.CanRunFullLarge && conf.RandomBeacon.GL.CanRunFullLarge { inpFile1Ext, inpFile2Ext = fmt.Sprintf(`\.%v`, config.LargeSuffix), fmt.Sprintf(`\.%v`, config.LargeSuffix) @@ -196,12 +186,12 @@ func RandomBeaconDefinition(conf *config.Config) (*JobDefinition_Limitless, erro ), } reqRootDir := []string{conf.RandomBeacon.Bootstrap.RequestsRootDir, conf.RandomBeacon.GL.RequestsRootDir} - outputTmpl := []string{"exec-rndbeacon-output-file"} + outputTmpl := []string{randomBeaconOutputTmpl} outputFileName := []string{randomBeaconOutputFile} return createJobDefinition(jobNameRandomBeacon, priorityRandomBeacon, reqRootDir, inputFilePattern, outputTmpl, outputFileName) } -func LPPExecutionDefinition(conf *config.Config) (*JobDefinition_Limitless, error) { +func LPPExecutionDefinition(conf *config.Config) (*JobDefinition, error) { inpFileExt := "" if conf.LPPExecution.CanRunFullLarge { inpFileExt = fmt.Sprintf(`\.%v`, config.LargeSuffix) @@ -214,13 +204,13 @@ func LPPExecutionDefinition(conf *config.Config) (*JobDefinition_Limitless, erro ), } reqRootDir := []string{conf.LPPExecution.RequestsRootDir} - outputTmpl := []string{"exec-LPP-output-file"} + outputTmpl := []string{lppOutputTmpl} outputFileName := []string{lppOutputFile} return createJobDefinition(jobNameLPPExecution, priorityLPPExecution, reqRootDir, inputFilePattern, outputTmpl, outputFileName) } -func ConglomerationDefinition(conf *config.Config) (*JobDefinition_Limitless, error) { +func ConglomerationDefinition(conf *config.Config) (*JobDefinition, error) { inpFile1Ext, inpFile2Ext := "", "" // TODO: Clairfy @linea-prover Can be have multiple limitless prover component running in different modes? @@ -241,7 +231,7 @@ func ConglomerationDefinition(conf *config.Config) (*JobDefinition_Limitless, er ), } reqRootDir := []string{conf.Conglomeration.GL.RequestsRootDir, conf.Conglomeration.LPP.RequestsRootDir} - outputTmpl := []string{"exec-output-file"} + outputTmpl := []string{conglomerationOutputTmpl} outputFileName := []string{conglomerationOutputFile} return createJobDefinition(jobNameConglomeration, priorityConglomeration, reqRootDir, inputFilePattern, outputTmpl, outputFileName) } diff --git a/prover/cmd/controller/controller/job_definition_limitless_test.go b/prover/cmd/controller/controller/job_definition_limitless_test.go new file mode 100644 index 000000000..307ed95cf --- /dev/null +++ b/prover/cmd/controller/controller/job_definition_limitless_test.go @@ -0,0 +1,249 @@ +package controller + +import ( + "testing" + + "github.com/consensys/linea-monorepo/prover/config" + "github.com/stretchr/testify/assert" +) + +func TestBootstrapDefinition(t *testing.T) { + var ( + correctM = "102-103-etv0.2.3-stv1.2.3-getZkProof.json" + correctWithFailM = "102-103-etv0.2.3-stv1.2.3-getZkProof.json.failure.code_77" + correctWith2FailsM = "102-103-etv0.2.3-stv1.2.3-getZkProof.json.failure.code_77.failure.code_77" + missingEtv = "102-103-stv1.2.3-getZkProof.json" + missingStv = "102-103-etv0.2.3-getZkProof.json" + notAPoint = "102-103-etv0.2.3-getZkProofAjson" + badName = "102-103-etv0.2.3-stv1.2.3-getAggregatedProof.json" + ) + + testcase := []inpFileNamesCases{ + { + Ext: "", Fail: "code", ShouldMatch: true, + Fnames: []string{correctM, correctWithFailM, correctWith2FailsM, missingEtv, missingStv}, + Explainer: "happy path, case M", + ExpectedOutput: []string{bootstrapSubmoduleFile, bootstrapSubmoduleFile, bootstrapSubmoduleFile, bootstrapSubmoduleFile, bootstrapSubmoduleFile}, + }, + { + Ext: "", Fail: "code", ShouldMatch: false, + Fnames: []string{notAPoint, badName}, + Explainer: "M does not pick obviously invalid files", + }, + } + + for _, c := range testcase { + conf := config.Config{} + conf.Version = "0.1.2" + conf.Bootstrap.CanRunFullLarge = c.Ext == "large" + + def, err := BootstrapDefinition(&conf) + assert.NoError(t, err) + + t.Run(c.Explainer, func(t *testing.T) { + runInpFileTestCaseLimitless(t, def, c) + }) + } +} + +func TestGLExecutionDefinition(t *testing.T) { + var ( + correctM = "102-103-etv0.2.3-stv1.2.3-getZKProof_Bootstrap_Submodule.json" + correctWithFailM = "102-103-etv0.2.3-stv1.2.3-getZKProof_Bootstrap_Submodule.json.failure.code_77" + correctWith2FailsM = "102-103-etv0.2.3-stv1.2.3-getZKProof_Bootstrap_Submodule.json.failure.code_77.failure.code_77" + missingEtv = "102-103-stv1.2.3-getZKProof_Bootstrap_Submodule.json" + missingStv = "102-103-etv0.2.3-getZKProof_Bootstrap_Submodule.json" + notAPoint = "102-103-etv0.2.3-getZKProof_Bootstrap_SubmoduleAjson" + badName = "102-103-etv0.2.3-stv1.2.3-getAggregatedProof.json" + ) + + testcase := []inpFileNamesCases{ + { + Ext: "", Fail: "code", ShouldMatch: true, + Fnames: []string{correctM, correctWithFailM, correctWith2FailsM, missingEtv, missingStv}, + Explainer: "happy path, case M", + ExpectedOutput: []string{glBeaconFile, glBeaconFile, glBeaconFile, glBeaconFile, glBeaconFile}, + }, + { + Ext: "", Fail: "code", ShouldMatch: false, + Fnames: []string{notAPoint, badName}, + Explainer: "M does not pick obviously invalid files", + }, + } + + for _, c := range testcase { + conf := config.Config{} + conf.Version = "0.1.2" + conf.GLExecution.CanRunFullLarge = c.Ext == "large" + + def, err := GLExecutionDefinition(&conf) + assert.NoError(t, err) + + t.Run(c.Explainer, func(t *testing.T) { + runInpFileTestCaseLimitless(t, def, c) + }) + } +} + +func TestRandomBeaconDefinition(t *testing.T) { + var ( + correctM = "102-103-etv0.2.3-stv1.2.3-getZKProof_Bootstrap_DistMetadata.json" + correctWithFailM = "102-103-etv0.2.3-stv1.2.3-getZKProof_Bootstrap_DistMetadata.json.failure.code_77" + correctWith2FailsM = "102-103-etv0.2.3-stv1.2.3-getZKProof_Bootstrap_DistMetadata.json.failure.code_77.failure.code_77" + missingEtv = "102-103-stv1.2.3-getZKProof_Bootstrap_DistMetadata.json" + missingStv = "102-103-etv0.2.3-getZKProof_Bootstrap_DistMetadata.json" + notAPoint = "102-103-etv0.2.3-getZKProof_Bootstrap_DistMetadataAjson" + badName = "102-103-etv0.2.3-stv1.2.3-getAggregatedProof.json" + ) + + testcase := []inpFileNamesCases{ + { + Ext: "", Fail: "code", ShouldMatch: true, + Fnames: []string{correctM, correctWithFailM, correctWith2FailsM, missingEtv, missingStv}, + Explainer: "happy path, case M", + ExpectedOutput: []string{randomBeaconOutputFile, randomBeaconOutputFile, randomBeaconOutputFile, randomBeaconOutputFile, randomBeaconOutputFile}, + }, + { + Ext: "", Fail: "code", ShouldMatch: false, + Fnames: []string{notAPoint, badName}, + Explainer: "M does not pick obviously invalid files", + }, + } + + for _, c := range testcase { + conf := config.Config{} + conf.Version = "0.1.2" + conf.RandomBeacon.Bootstrap.CanRunFullLarge = c.Ext == "large" + conf.RandomBeacon.GL.CanRunFullLarge = c.Ext == "large" + + def, err := RandomBeaconDefinition(&conf) + assert.NoError(t, err) + + t.Run(c.Explainer, func(t *testing.T) { + runInpFileTestCaseLimitless(t, def, c) + }) + } +} + +func TestLPPExecutionDefinition(t *testing.T) { + var ( + correctM = "102-103-etv0.2.3-stv1.2.3-getZKProof_RndBeacon.json" + correctWithFailM = "102-103-etv0.2.3-stv1.2.3-getZKProof_RndBeacon.json.failure.code_77" + correctWith2FailsM = "102-103-etv0.2.3-stv1.2.3-getZKProof_RndBeacon.json.failure.code_77.failure.code_77" + missingEtv = "102-103-stv1.2.3-getZKProof_RndBeacon.json" + missingStv = "102-103-etv0.2.3-getZKProof_RndBeacon.json" + notAPoint = "102-103-etv0.2.3-getZKProof_RndBeaconAjson" + badName = "102-103-etv0.2.3-stv1.2.3-getAggregatedProof.json" + ) + + testcase := []inpFileNamesCases{ + { + Ext: "", Fail: "code", ShouldMatch: true, + Fnames: []string{correctM, correctWithFailM, correctWith2FailsM, missingEtv, missingStv}, + Explainer: "happy path, case M", + ExpectedOutput: []string{lppOutputFile, lppOutputFile, lppOutputFile, lppOutputFile, lppOutputFile}, + }, + { + Ext: "", Fail: "code", ShouldMatch: false, + Fnames: []string{notAPoint, badName}, + Explainer: "M does not pick obviously invalid files", + }, + } + + for _, c := range testcase { + conf := config.Config{} + conf.Version = "0.1.2" + conf.LPPExecution.CanRunFullLarge = c.Ext == "large" + + def, err := LPPExecutionDefinition(&conf) + assert.NoError(t, err) + + t.Run(c.Explainer, func(t *testing.T) { + runInpFileTestCaseLimitless(t, def, c) + }) + } +} + +func TestConglomerationDefinition(t *testing.T) { + var ( + correctM = "102-103-etv0.2.3-stv1.2.3-getZKProof_GL.json" + correctWithFailM = "102-103-etv0.2.3-stv1.2.3-getZKProof_GL.json.failure.code_77" + correctWith2FailsM = "102-103-etv0.2.3-stv1.2.3-getZKProof_GL.json.failure.code_77.failure.code_77" + missingEtv = "102-103-stv1.2.3-getZKProof_GL.json" + missingStv = "102-103-etv0.2.3-getZKProof_GL.json" + notAPoint = "102-103-etv0.2.3-getZKProof_GLAjson" + badName = "102-103-etv0.2.3-stv1.2.3-getAggregatedProof.json" + ) + + testcase := []inpFileNamesCases{ + { + Ext: "", Fail: "code", ShouldMatch: true, + Fnames: []string{correctM, correctWithFailM, correctWith2FailsM, missingEtv, missingStv}, + Explainer: "happy path, case M", + ExpectedOutput: []string{conglomerationOutputFile, conglomerationOutputFile, conglomerationOutputFile, conglomerationOutputFile, conglomerationOutputFile}, + }, + { + Ext: "", Fail: "code", ShouldMatch: false, + Fnames: []string{notAPoint, badName}, + Explainer: "M does not pick obviously invalid files", + }, + } + + for _, c := range testcase { + conf := config.Config{} + conf.Version = "0.1.2" + conf.Conglomeration.GL.CanRunFullLarge = c.Ext == "large" + conf.Conglomeration.LPP.CanRunFullLarge = c.Ext == "large" + + def, err := ConglomerationDefinition(&conf) + assert.NoError(t, err) + + t.Run(c.Explainer, func(t *testing.T) { + runInpFileTestCaseLimitless(t, def, c) + }) + } +} + +func runInpFileTestCaseLimitless(t *testing.T, def *JobDefinition, c inpFileNamesCases) { + for i, fname := range c.Fnames { + job, err := NewJob(def, fname) + + if c.ShouldMatch { + if !assert.NoError(t, err, fname) { + continue + } + + resp, err := job.ResponseFile() + if assert.NoErrorf(t, err, "cannot produce a response for job %s", fname) { + assert.Equal(t, c.ExpectedOutput[i], resp, "wrong output file") + } + + if len(c.ExpToLarge) > 0 { + toLarge, err := job.DeferToLargeFile( + Status{ExitCode: 137}, + ) + + if assert.NoError(t, err, "cannot produce name for the too large job") { + assert.Equal(t, c.ExpToLarge[i], toLarge) + } + } + + if len(c.ExpSuccess) > 0 { + toSuccess := job.DoneFile(Status{ExitCode: 0}) + assert.Equal(t, c.ExpSuccess[i], toSuccess) + } + + if len(c.ExpFailW2) > 0 { + toFail2 := job.DoneFile(Status{ExitCode: 2}) + assert.Equal(t, c.ExpFailW2[i], toFail2) + } + + } else { + assert.Errorf( + t, err, fname, + "%v should not match %s", + fname, def.InputFileRegexp.String(), + ) + } + } +} From 2597d7d352cd867c62cfd2e26d041af4d0ec7cdc Mon Sep 17 00:00:00 2001 From: srinathln7 Date: Thu, 30 Jan 2025 16:31:52 +0000 Subject: [PATCH 08/48] TestBootstrapSubmodule definition success --- .../controller/controller/job_definition.go | 11 - .../controller/job_definition_limitless.go | 272 +++++++----------- .../job_definition_limitless_test.go | 265 +++++------------ prover/cmd/controller/controller/jobs.go | 15 +- prover/config/config.go | 38 +-- 5 files changed, 186 insertions(+), 415 deletions(-) diff --git a/prover/cmd/controller/controller/job_definition.go b/prover/cmd/controller/controller/job_definition.go index dfacf9e99..97027992c 100644 --- a/prover/cmd/controller/controller/job_definition.go +++ b/prover/cmd/controller/controller/job_definition.go @@ -42,13 +42,6 @@ type JobDefinition struct { // InputFileRegexp *regexp2.Regexp - // Secondary request root directory. Some jobs may have more than one input reqeust file - // Eg: Random Beacon and Conglomeration - SecRequestsRootDir string - - // Second input file pattern - SecInputFileRegexp *regexp2.Regexp - // Template to use to generate the output file. The template should have the // form of a go template. For instance, // @@ -56,10 +49,6 @@ type JobDefinition struct { // OutputFileTmpl *template.Template - // Some jobs may have more than one output file. - // Eg: Bootstraping => Submodule and distributed metadata - SecOutputFileTmpl *template.Template - // The associated compiled regexp, this saves on recompiling the regexps // everytime we want to use them. If a field is not needed, it can be left // at zero. diff --git a/prover/cmd/controller/controller/job_definition_limitless.go b/prover/cmd/controller/controller/job_definition_limitless.go index 2cfcd1d74..a5cca7747 100644 --- a/prover/cmd/controller/controller/job_definition_limitless.go +++ b/prover/cmd/controller/controller/job_definition_limitless.go @@ -2,117 +2,122 @@ package controller import ( "fmt" - "text/template" "github.com/consensys/linea-monorepo/prover/config" "github.com/dlclark/regexp2" ) +// Job definitions are defined such that each job has a single request and response file +// These jobs will execute asynchronously based on their set priorities const ( - jobNameBootstrap = "bootstrap-execution" - jobNameGLExecution = "gl-execution" - jobNameRandomBeacon = "randomness-beacon-execution" - jobNameLPPExecution = "lpp-execution" - jobNameConglomeration = "conglomeration-execution" -) + // Bootstrap + job_Exec_Bootstrap_Submodule = "exec-bootstrap-submodule" + job_Exec_Bootstrap_Metadata = "exec-bootstrap-metadata" -const ( - priorityBootstrap = 0 - priorityGLExecution = 1 - priorityRandomBeacon = 2 - priorityLPPExecution = 3 - priorityConglomeration = 4 + // Global-Local subprovers + job_Exec_GL_RndBeacon = "exec-GL-rndbeacon" + job_Exec_GL = "exec-GL" + + // Random Beacon + job_Exec_RndBeacon_LPP = "exec-rndbeacon-LPP" + job_Exec_RndBeacon_Metadata = "exec-rndbeacon-metadata" + + // LPP-subprovers + job_Exec_LPP = "exec-LPP" + + // Conglomerator + job_Exec_Congolomerate_LPP = "exec-congolo-LPP" + job_Exec_Congolomerate_GL = "exec-congolo-GL" + job_Exec_Congolomerate_Metadata = "exec-congolo-metadata" ) +// Priorities const ( - bootstrapInputPattern = `^[0-9]+-[0-9]+(-etv[0-9\.]+)?(-stv[0-9\.]+)?-getZkProof\.json%v(\.failure\.%v_[0-9]+)*$` - glInputPattern = `^[0-9]+-[0-9]+(-etv[0-9\.]+)?(-stv[0-9\.]+)?-getZKProof_Bootstrap_Submodule\.json%v(\.failure\.%v_[0-9]+)*$` - randomBeaconInputPattern1 = `^[0-9]+-[0-9]+(-etv[0-9\.]+)?(-stv[0-9\.]+)?-getZKProof_Bootstrap_DistMetadata\.json%v(\.failure\.%v_[0-9]+)*$` - randomBeaconInputPattern2 = `^[0-9]+-[0-9]+(-etv[0-9\.]+)?(-stv[0-9\.]+)?-getZKProof_GL_Beacon\.json%v(\.failure\.%v_[0-9]+)*$` - lppInputPattern = `^[0-9]+-[0-9]+(-etv[0-9\.]+)?(-stv[0-9\.]+)?-getZKProof_RndBeacon\.json%v(\.failure\.%v_[0-9]+)*$` - conglomerationInputPattern1 = `^[0-9]+-[0-9]+(-etv[0-9\.]+)?(-stv[0-9\.]+)?-getZKProof_GL\.json%v(\.failure\.%v_[0-9]+)*$` - conglomerationInputPattern2 = `^[0-9]+-[0-9]+(-etv[0-9\.]+)?(-stv[0-9\.]+)?-getZKProof_LPP\.json%v(\.failure\.%v_[0-9]+)*$` + priority_Exec_Bootstrap_Submodule = 0 + priority_Exec_Bootstrap_Metadata = 0 + + priority_Exec_GL_RndBeacon = 1 + priority_Exec_GL = 1 + + priority_Exec_RndBeacon_LPP = 2 + priority_Exec_RndBeacon_Metadata = 2 + + priority_Exec_LPP = 3 + + priority_Exec_Congolomerate_LPP = 4 + priority_Exec_Congolomerate_GL = 4 + priority_Exec_Congolomerate_Metadata = 4 ) +// Input file patterns const ( - bootstrapSubmoduleFile = "{{.Start}}-{{.End}}-.getZKProof_Bootstrap_Submodule.json" - bootstrapDistMetadataFile = "{{.Start}}-{{.End}}-.getZKProof_Bootstrap_DistMetadata.json" - glBeaconFile = "{{.Start}}-{{.End}}-.getZKProof_GL_Beacon.json" - glOutputFile = "{{.Start}}-{{.End}}-.getZKProof_GL.json" - randomBeaconDistMetadataFile = "{{.Start}}-{{.End}}-.getZKProof_Bootstrap_DistMetadata.json" - randomBeaconGLFile = "{{.Start}}-{{.End}}-.getZKProof_GL_Beacon.json" - randomBeaconOutputFile = "{{.Start}}-{{.End}}-.getZKProof_RndBeacon.json" - lppOutputFile = "{{.Start}}-{{.End}}-.getZKProof_LPP.json" - conglomerationGLFile = "{{.Start}}-{{.End}}-.getZKProof_GL.json" - conglomerationLPPFile = "{{.Start}}-{{.End}}-.getZKProof_LPP.json" - conglomerationOutputFile = "{{.Start}}-{{.End}}-.getZKProof.json" + exec_Bootstrap_Submodule_InputPattern = `^[0-9]+-[0-9]+(-etv[0-9\.]+)?(-stv[0-9\.]+)?-getZkProof\.json%v(\.failure\.%v_[0-9]+)*$` + exec_Bootstrap_MetaData_InputPattern = `^[0-9]+-[0-9]+(-etv[0-9\.]+)?(-stv[0-9\.]+)?-getZkProof\.json%v(\.failure\.%v_[0-9]+)*$` + + exec_GL_RndBeacon_InputPattern = `^[0-9]+-[0-9]+(-etv[0-9\.]+)?(-stv[0-9\.]+)?-getZKProof_Bootstrap_Submodule\.json%v(\.failure\.%v_[0-9]+)*$` + + exec_RndBeacon_Metadata_InputPattern = `^[0-9]+-[0-9]+(-etv[0-9\.]+)?(-stv[0-9\.]+)?-getZKProof_Bootstrap_DistMetadata\.json%v(\.failure\.%v_[0-9]+)*$` + exec_GL_InputPattern = `^[0-9]+-[0-9]+(-etv[0-9\.]+)?(-stv[0-9\.]+)?-getZKProof_GL_Beacon\.json%v(\.failure\.%v_[0-9]+)*$` + + exec_LPP_InputPattern = `^[0-9]+-[0-9]+(-etv[0-9\.]+)?(-stv[0-9\.]+)?-getZKProof_RndBeacon\.json%v(\.failure\.%v_[0-9]+)*$` + + exec_Congolomerate_GL_InputPattern = `^[0-9]+-[0-9]+(-etv[0-9\.]+)?(-stv[0-9\.]+)?-getZKProof_GL\.json%v(\.failure\.%v_[0-9]+)*$` + exec_Congolomerate_LPP_InputPattern = `^[0-9]+-[0-9]+(-etv[0-9\.]+)?(-stv[0-9\.]+)?-getZKProof_LPP\.json%v(\.failure\.%v_[0-9]+)*$` + exec_Congolomerate_Metadata_InputPattern = `^[0-9]+-[0-9]+(-etv[0-9\.]+)?(-stv[0-9\.]+)?-getZKProof_Bootstrap_DistMetadata\.json%v(\.failure\.%v_[0-9]+)*$` ) +// Ouput File patterns and templates const ( - bootstrapOutputTmpl = "exec-bootstrap-submodule-req-file" - bootstrapDistMetadataTmpl = "exec-bootstrap-submodule-distmetadata-file" - glBeaconOutputTmpl = "exec-GL-Beacon-file" - glOutputTmpl = "exec-GL-output-file" - randomBeaconOutputTmpl = "exec-rndbeacon-output-file" - lppOutputTmpl = "exec-LPP-output-file" - conglomerationOutputTmpl = "exec-output-file" -) + exec_Bootstrap_Submodule_File = "{{.Start}}-{{.End}}-getZkProof_Bootstrap_Submodule.json" + exec_Bootstrap_Submodule_Tmpl = "exec-bootstrap-submodule-req-file" -func createJobDefinition(name string, priority int, - reqRootDir, inputFilePattern []string, - outputTmpl, outputFileName []string) (*JobDefinition, error) { + exec_Bootstrap_DistMetadata_File = "{{.Start}}-{{.End}}-getZKProof_Bootstrap_DistMetadata.json" + exec_Bootstrap_DistMetadata_Tmpl = "exec-bootstrap-submodule-distmetadata-file" - numReqs, numIPs, numOPTmpl, numOPFileName := len(reqRootDir), len(inputFilePattern), len(outputTmpl), len(outputFileName) + // Global-Local subprovers + exec_GL_Beacon_File = "{{.Start}}-{{.End}}-getZKProof_GL_Beacon.json" + exec_GL_Beacon_Tmpl = "exec-GL-Beacon-file" - // Currently JobDefinition supports only primary and secondary inputs/output files - // Length cannot exceed 2 - if numReqs > 2 || numIPs > 2 || numOPTmpl > 2 || numOPFileName > 2 { - return nil, fmt.Errorf("input and output parameters length cannot be greater than 2") - } + exec_GL_File = "{{.Start}}-{{.End}}-getZKProof_GL.json" + exec_GL_Tmpl = "exec-GL-output-file" - if numReqs != numIPs || numOPTmpl != numOPFileName { - return nil, fmt.Errorf(`length mismatch: reqRootDir:%d, inputFilePattern:%d, - outputTmpl:%d, and outputFileName:%d must have the same length`, numReqs, numIPs, numOPTmpl, numOPFileName) - } + // Random Beacon + exec_RndBeacon_DistMetadata_File = "{{.Start}}-{{.End}}-getZKProof_Bootstrap_DistMetadata.json" - // Set primary request root directory and compile primary input file regexps - primaryReqRootDir := reqRootDir[0] - inpReq1FileRegexp, err := regexp2.Compile(inputFilePattern[0], regexp2.None) - if err != nil { - return nil, fmt.Errorf("invalid input file pattern: %v", err) - } + exec_RndBeacon_GL_File = "{{.Start}}-{{.End}}-getZKProof_GL_Beacon.json" - // Set secondary request root directory and compile secondary input file regexps - var inpReq2FileRegexp *regexp2.Regexp - var secReqRootDir string - if numReqs == 2 { - secReqRootDir = reqRootDir[1] - inpReq2FileRegexp, err = regexp2.Compile(inputFilePattern[1], regexp2.None) - if err != nil { - return nil, fmt.Errorf("invalid input file pattern: %v", err) - } - } + exec_RndBeacon_File = "{{.Start}}-{{.End}}-getZKProof_RndBeacon.json" + exec_RndBeacon_Tmpl = "exec-rndbeacon-output-file" - // Compile output file templates - opFile1Template := tmplMustCompile(outputTmpl[0], outputFileName[0]) - var opFile2Template *template.Template - if numOPTmpl == 2 { - opFile2Template = tmplMustCompile(outputTmpl[1], outputFileName[1]) - } + // LPP-subprovers + exec_LPP_File = "{{.Start}}-{{.End}}-getZKProof_LPP.json" + exec_LPP_Tmpl = "exec-LPP-output-file" + + // Conglomerator + // exec_Congolomerate_GL_File = "{{.Start}}-{{.End}}-.getZKProof_GL.json" + + // exec_Congolomerate_LPP_File = "{{.Start}}-{{.End}}-.getZKProof_LPP.json" + + // exec_Congolomerate_Metadata_File = "{{.Start}}-{{.End}}-.getZKProof_Bootstrap_DistMetadata.json" + + exec_Congolomerate_File = "{{.Start}}-{{.End}}-.getZKProof.json" + exec_Congolomerate_Tmpl = "exec-output-file" +) + +func createJobDefinition(name string, priority int, + reqRootDir, inputFilePattern string, + outputTmpl, outputFileName string) (*JobDefinition, error) { return &JobDefinition{ Name: name, Priority: priority, // Primary and Secondary Request (Input) Files - RequestsRootDir: primaryReqRootDir, - InputFileRegexp: inpReq1FileRegexp, - SecRequestsRootDir: secReqRootDir, - SecInputFileRegexp: inpReq2FileRegexp, + RequestsRootDir: reqRootDir, + InputFileRegexp: regexp2.MustCompile(inputFilePattern, regexp2.None), // Output Templates - OutputFileTmpl: opFile1Template, - SecOutputFileTmpl: opFile2Template, + OutputFileTmpl: tmplMustCompile(outputTmpl, outputFileName), ParamsRegexp: struct { Start *regexp2.Regexp @@ -127,111 +132,26 @@ func createJobDefinition(name string, priority int, Etv: matchVersionWithPrefix("etv"), Stv: matchVersionWithPrefix("stv"), }, - FailureSuffix: matchFailureSuffix("fail"), + FailureSuffix: matchFailureSuffix(config.FailSuffix), }, nil } -func BootstrapDefinition(conf *config.Config) (*JobDefinition, error) { +func BootstrapSubModDefinition(conf *config.Config) (*JobDefinition, error) { inpFileExt := "" - if conf.Bootstrap.CanRunFullLarge { + if conf.Bootstrap_Submodule.CanRunFullLarge { inpFileExt = fmt.Sprintf(`\.%v`, config.LargeSuffix) } - inputFilePattern := []string{ - fmt.Sprintf( - bootstrapInputPattern, - inpFileExt, - config.FailSuffix, - ), - } - reqRootDir := []string{conf.Bootstrap.RequestsRootDir} - outputTmpl := []string{bootstrapOutputTmpl, bootstrapDistMetadataTmpl} - outputFileName := []string{bootstrapSubmoduleFile, bootstrapDistMetadataFile} - return createJobDefinition(jobNameBootstrap, priorityBootstrap, reqRootDir, inputFilePattern, outputTmpl, outputFileName) + inputFilePattern := fmt.Sprintf(exec_Bootstrap_Submodule_InputPattern, inpFileExt, config.FailSuffix) + return createJobDefinition(job_Exec_Bootstrap_Submodule, priority_Exec_Bootstrap_Submodule, + conf.Bootstrap_Submodule.RequestsRootDir, inputFilePattern, exec_Bootstrap_Submodule_Tmpl, exec_Bootstrap_Submodule_File) } -func GLExecutionDefinition(conf *config.Config) (*JobDefinition, error) { +func BootstrapMetadataDefinition(conf *config.Config) (*JobDefinition, error) { inpFileExt := "" - if conf.GLExecution.CanRunFullLarge { + if conf.Bootstrap_Metadata.CanRunFullLarge { inpFileExt = fmt.Sprintf(`\.%v`, config.LargeSuffix) } - inputFilePattern := []string{ - fmt.Sprintf( - glInputPattern, - inpFileExt, - config.FailSuffix, - ), - } - reqRootDir := []string{conf.GLExecution.RequestsRootDir} - outputTmpl := []string{glBeaconOutputTmpl, glOutputTmpl} - outputFileName := []string{glBeaconFile, glOutputFile} - - return createJobDefinition(jobNameGLExecution, priorityGLExecution, reqRootDir, inputFilePattern, outputTmpl, outputFileName) -} - -func RandomBeaconDefinition(conf *config.Config) (*JobDefinition, error) { - inpFile1Ext, inpFile2Ext := "", "" - if conf.RandomBeacon.Bootstrap.CanRunFullLarge && conf.RandomBeacon.GL.CanRunFullLarge { - inpFile1Ext, inpFile2Ext = fmt.Sprintf(`\.%v`, config.LargeSuffix), fmt.Sprintf(`\.%v`, config.LargeSuffix) - } - inputFilePattern := []string{ - fmt.Sprintf( - randomBeaconInputPattern1, - inpFile1Ext, - config.FailSuffix, - ), - fmt.Sprintf( - randomBeaconInputPattern2, - inpFile2Ext, - config.FailSuffix, - ), - } - reqRootDir := []string{conf.RandomBeacon.Bootstrap.RequestsRootDir, conf.RandomBeacon.GL.RequestsRootDir} - outputTmpl := []string{randomBeaconOutputTmpl} - outputFileName := []string{randomBeaconOutputFile} - return createJobDefinition(jobNameRandomBeacon, priorityRandomBeacon, reqRootDir, inputFilePattern, outputTmpl, outputFileName) -} - -func LPPExecutionDefinition(conf *config.Config) (*JobDefinition, error) { - inpFileExt := "" - if conf.LPPExecution.CanRunFullLarge { - inpFileExt = fmt.Sprintf(`\.%v`, config.LargeSuffix) - } - inputFilePattern := []string{ - fmt.Sprintf( - lppInputPattern, - inpFileExt, - config.FailSuffix, - ), - } - reqRootDir := []string{conf.LPPExecution.RequestsRootDir} - outputTmpl := []string{lppOutputTmpl} - outputFileName := []string{lppOutputFile} - - return createJobDefinition(jobNameLPPExecution, priorityLPPExecution, reqRootDir, inputFilePattern, outputTmpl, outputFileName) -} - -func ConglomerationDefinition(conf *config.Config) (*JobDefinition, error) { - inpFile1Ext, inpFile2Ext := "", "" - - // TODO: Clairfy @linea-prover Can be have multiple limitless prover component running in different modes? - // For eg. Bootstraper - "full-large" and GL-subprover in "full". If so? how would the file be formated - if conf.Conglomeration.GL.CanRunFullLarge && conf.Conglomeration.LPP.CanRunFullLarge { - inpFile1Ext, inpFile2Ext = fmt.Sprintf(`\.%v`, config.LargeSuffix), fmt.Sprintf(`\.%v`, config.LargeSuffix) - } - inputFilePattern := []string{ - fmt.Sprintf( - conglomerationInputPattern1, - inpFile1Ext, - config.FailSuffix, - ), - fmt.Sprintf( - conglomerationInputPattern2, - inpFile2Ext, - config.FailSuffix, - ), - } - reqRootDir := []string{conf.Conglomeration.GL.RequestsRootDir, conf.Conglomeration.LPP.RequestsRootDir} - outputTmpl := []string{conglomerationOutputTmpl} - outputFileName := []string{conglomerationOutputFile} - return createJobDefinition(jobNameConglomeration, priorityConglomeration, reqRootDir, inputFilePattern, outputTmpl, outputFileName) + inputFilePattern := fmt.Sprintf(exec_Bootstrap_MetaData_InputPattern, inpFileExt, config.FailSuffix) + return createJobDefinition(job_Exec_Bootstrap_Metadata, priority_Exec_Bootstrap_Metadata, + conf.Bootstrap_Metadata.RequestsRootDir, inputFilePattern, exec_Bootstrap_DistMetadata_Tmpl, exec_Bootstrap_DistMetadata_File) } diff --git a/prover/cmd/controller/controller/job_definition_limitless_test.go b/prover/cmd/controller/controller/job_definition_limitless_test.go index 307ed95cf..278eba137 100644 --- a/prover/cmd/controller/controller/job_definition_limitless_test.go +++ b/prover/cmd/controller/controller/job_definition_limitless_test.go @@ -7,133 +7,69 @@ import ( "github.com/stretchr/testify/assert" ) -func TestBootstrapDefinition(t *testing.T) { +// This tests ensures that the naming convention is respected by the file-watcher +// i.e., files with the right naming only are recognized. And the corresponding +// output files are also recognized. +func TestBootstrapSubModInFileRegexp(t *testing.T) { + var ( correctM = "102-103-etv0.2.3-stv1.2.3-getZkProof.json" + correctL = "102-103-etv0.2.3-stv1.2.3-getZkProof.json.large" correctWithFailM = "102-103-etv0.2.3-stv1.2.3-getZkProof.json.failure.code_77" + correctWithFailL = "102-103-etv0.2.3-stv1.2.3-getZkProof.json.large.failure.code_77" correctWith2FailsM = "102-103-etv0.2.3-stv1.2.3-getZkProof.json.failure.code_77.failure.code_77" + correctWith2FailsL = "102-103-etv0.2.3-stv1.2.3-getZkProof.json.large.failure.code_77.failure.code_77" missingEtv = "102-103-stv1.2.3-getZkProof.json" missingStv = "102-103-etv0.2.3-getZkProof.json" notAPoint = "102-103-etv0.2.3-getZkProofAjson" badName = "102-103-etv0.2.3-stv1.2.3-getAggregatedProof.json" ) - testcase := []inpFileNamesCases{ - { - Ext: "", Fail: "code", ShouldMatch: true, - Fnames: []string{correctM, correctWithFailM, correctWith2FailsM, missingEtv, missingStv}, - Explainer: "happy path, case M", - ExpectedOutput: []string{bootstrapSubmoduleFile, bootstrapSubmoduleFile, bootstrapSubmoduleFile, bootstrapSubmoduleFile, bootstrapSubmoduleFile}, - }, - { - Ext: "", Fail: "code", ShouldMatch: false, - Fnames: []string{notAPoint, badName}, - Explainer: "M does not pick obviously invalid files", - }, - } - - for _, c := range testcase { - conf := config.Config{} - conf.Version = "0.1.2" - conf.Bootstrap.CanRunFullLarge = c.Ext == "large" - - def, err := BootstrapDefinition(&conf) - assert.NoError(t, err) - - t.Run(c.Explainer, func(t *testing.T) { - runInpFileTestCaseLimitless(t, def, c) - }) - } -} - -func TestGLExecutionDefinition(t *testing.T) { + // The responses in case of success var ( - correctM = "102-103-etv0.2.3-stv1.2.3-getZKProof_Bootstrap_Submodule.json" - correctWithFailM = "102-103-etv0.2.3-stv1.2.3-getZKProof_Bootstrap_Submodule.json.failure.code_77" - correctWith2FailsM = "102-103-etv0.2.3-stv1.2.3-getZKProof_Bootstrap_Submodule.json.failure.code_77.failure.code_77" - missingEtv = "102-103-stv1.2.3-getZKProof_Bootstrap_Submodule.json" - missingStv = "102-103-etv0.2.3-getZKProof_Bootstrap_Submodule.json" - notAPoint = "102-103-etv0.2.3-getZKProof_Bootstrap_SubmoduleAjson" - badName = "102-103-etv0.2.3-stv1.2.3-getAggregatedProof.json" + respM = "responses/102-103-getZkProof_Bootstrap_Submodule.json" + respL = "responses/102-103-getZkProof_Bootstrap_Submodule.json" + // #nosec G101 -- Not a credential + respWithFailM = "responses/102-103-getZkProof_Bootstrap_Submodule.json" + // #nosec G101 -- Not a credential + respWithFailL = "responses/102-103-getZkProof_Bootstrap_Submodule.json" + // #nosec G101 -- Not a credential + respWith2FailsM = "responses/102-103-getZkProof_Bootstrap_Submodule.json" + // #nosec G101 -- Not a credential + respWith2FailsL = "responses/102-103-getZkProof_Bootstrap_Submodule.json" ) - testcase := []inpFileNamesCases{ - { - Ext: "", Fail: "code", ShouldMatch: true, - Fnames: []string{correctM, correctWithFailM, correctWith2FailsM, missingEtv, missingStv}, - Explainer: "happy path, case M", - ExpectedOutput: []string{glBeaconFile, glBeaconFile, glBeaconFile, glBeaconFile, glBeaconFile}, - }, - { - Ext: "", Fail: "code", ShouldMatch: false, - Fnames: []string{notAPoint, badName}, - Explainer: "M does not pick obviously invalid files", - }, - } - - for _, c := range testcase { - conf := config.Config{} - conf.Version = "0.1.2" - conf.GLExecution.CanRunFullLarge = c.Ext == "large" - - def, err := GLExecutionDefinition(&conf) - assert.NoError(t, err) - - t.Run(c.Explainer, func(t *testing.T) { - runInpFileTestCaseLimitless(t, def, c) - }) - } -} - -func TestRandomBeaconDefinition(t *testing.T) { + // The rename in case it is deferred to the large prover var ( - correctM = "102-103-etv0.2.3-stv1.2.3-getZKProof_Bootstrap_DistMetadata.json" - correctWithFailM = "102-103-etv0.2.3-stv1.2.3-getZKProof_Bootstrap_DistMetadata.json.failure.code_77" - correctWith2FailsM = "102-103-etv0.2.3-stv1.2.3-getZKProof_Bootstrap_DistMetadata.json.failure.code_77.failure.code_77" - missingEtv = "102-103-stv1.2.3-getZKProof_Bootstrap_DistMetadata.json" - missingStv = "102-103-etv0.2.3-getZKProof_Bootstrap_DistMetadata.json" - notAPoint = "102-103-etv0.2.3-getZKProof_Bootstrap_DistMetadataAjson" - badName = "102-103-etv0.2.3-stv1.2.3-getAggregatedProof.json" + toLargeM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof.json.large.failure.code_137" + toLargeWithFailM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof.json.large.failure.code_137" + toLargeWith2FailsM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof.json.large.failure.code_137" + toLargeWoEtv = "requests/102-103-stv1.2.3-getZkProof.json.large.failure.code_137" + toLargeWoStv = "requests/102-103-etv0.2.3-getZkProof.json.large.failure.code_137" ) - testcase := []inpFileNamesCases{ - { - Ext: "", Fail: "code", ShouldMatch: true, - Fnames: []string{correctM, correctWithFailM, correctWith2FailsM, missingEtv, missingStv}, - Explainer: "happy path, case M", - ExpectedOutput: []string{randomBeaconOutputFile, randomBeaconOutputFile, randomBeaconOutputFile, randomBeaconOutputFile, randomBeaconOutputFile}, - }, - { - Ext: "", Fail: "code", ShouldMatch: false, - Fnames: []string{notAPoint, badName}, - Explainer: "M does not pick obviously invalid files", - }, - } - - for _, c := range testcase { - conf := config.Config{} - conf.Version = "0.1.2" - conf.RandomBeacon.Bootstrap.CanRunFullLarge = c.Ext == "large" - conf.RandomBeacon.GL.CanRunFullLarge = c.Ext == "large" - - def, err := RandomBeaconDefinition(&conf) - assert.NoError(t, err) - - t.Run(c.Explainer, func(t *testing.T) { - runInpFileTestCaseLimitless(t, def, c) - }) - } -} + // The rename in case it is a success + var ( + successM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof.json.success" + successMWoStv = "requests-done/102-103-etv0.2.3-getZkProof.json.success" + successtWoEtv = "requests-done/102-103-stv1.2.3-getZkProof.json.success" + successL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof.json.large.success" + successWithFailM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof.json.success" + successWithFailL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof.json.large.success" + successWith2FailsM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof.json.success" + successWith2FailsL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof.json.large.success" + ) -func TestLPPExecutionDefinition(t *testing.T) { + // The rename in case it is a panic (code = 2) var ( - correctM = "102-103-etv0.2.3-stv1.2.3-getZKProof_RndBeacon.json" - correctWithFailM = "102-103-etv0.2.3-stv1.2.3-getZKProof_RndBeacon.json.failure.code_77" - correctWith2FailsM = "102-103-etv0.2.3-stv1.2.3-getZKProof_RndBeacon.json.failure.code_77.failure.code_77" - missingEtv = "102-103-stv1.2.3-getZKProof_RndBeacon.json" - missingStv = "102-103-etv0.2.3-getZKProof_RndBeacon.json" - notAPoint = "102-103-etv0.2.3-getZKProof_RndBeaconAjson" - badName = "102-103-etv0.2.3-stv1.2.3-getAggregatedProof.json" + failM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof.json.failure.code_2" + failMWoStv = "requests-done/102-103-etv0.2.3-getZkProof.json.failure.code_2" + failtWoEtv = "requests-done/102-103-stv1.2.3-getZkProof.json.failure.code_2" + failL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof.json.large.failure.code_2" + failWithFailM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof.json.failure.code_2" + failWithFailL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof.json.large.failure.code_2" + failWith2FailsM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof.json.failure.code_2" + failWith2FailsL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof.json.large.failure.code_2" ) testcase := []inpFileNamesCases{ @@ -141,109 +77,54 @@ func TestLPPExecutionDefinition(t *testing.T) { Ext: "", Fail: "code", ShouldMatch: true, Fnames: []string{correctM, correctWithFailM, correctWith2FailsM, missingEtv, missingStv}, Explainer: "happy path, case M", - ExpectedOutput: []string{lppOutputFile, lppOutputFile, lppOutputFile, lppOutputFile, lppOutputFile}, + ExpectedOutput: []string{respM, respWithFailM, respWith2FailsM, respM, respM}, + ExpToLarge: []string{toLargeM, toLargeWithFailM, toLargeWith2FailsM, toLargeWoEtv, toLargeWoStv}, + ExpSuccess: []string{successM, successWithFailM, successWith2FailsM, successtWoEtv, successMWoStv}, + ExpFailW2: []string{failM, failWithFailM, failWith2FailsM, failtWoEtv, failMWoStv}, + }, + { + Ext: "large", Fail: "code", ShouldMatch: true, + Fnames: []string{correctL, correctWithFailL, correctWith2FailsL}, + Explainer: "happy path, case L", + ExpectedOutput: []string{respL, respWithFailL, respWith2FailsL}, + ExpSuccess: []string{successL, successWithFailL, successWith2FailsL}, + ExpFailW2: []string{failL, failWithFailL, failWith2FailsL}, }, { Ext: "", Fail: "code", ShouldMatch: false, - Fnames: []string{notAPoint, badName}, - Explainer: "M does not pick obviously invalid files", + Fnames: []string{correctL, correctWithFailL, correctWith2FailsL}, + Explainer: "M does not pick the files reserved for L", }, - } - - for _, c := range testcase { - conf := config.Config{} - conf.Version = "0.1.2" - conf.LPPExecution.CanRunFullLarge = c.Ext == "large" - - def, err := LPPExecutionDefinition(&conf) - assert.NoError(t, err) - - t.Run(c.Explainer, func(t *testing.T) { - runInpFileTestCaseLimitless(t, def, c) - }) - } -} - -func TestConglomerationDefinition(t *testing.T) { - var ( - correctM = "102-103-etv0.2.3-stv1.2.3-getZKProof_GL.json" - correctWithFailM = "102-103-etv0.2.3-stv1.2.3-getZKProof_GL.json.failure.code_77" - correctWith2FailsM = "102-103-etv0.2.3-stv1.2.3-getZKProof_GL.json.failure.code_77.failure.code_77" - missingEtv = "102-103-stv1.2.3-getZKProof_GL.json" - missingStv = "102-103-etv0.2.3-getZKProof_GL.json" - notAPoint = "102-103-etv0.2.3-getZKProof_GLAjson" - badName = "102-103-etv0.2.3-stv1.2.3-getAggregatedProof.json" - ) - - testcase := []inpFileNamesCases{ { - Ext: "", Fail: "code", ShouldMatch: true, - Fnames: []string{correctM, correctWithFailM, correctWith2FailsM, missingEtv, missingStv}, - Explainer: "happy path, case M", - ExpectedOutput: []string{conglomerationOutputFile, conglomerationOutputFile, conglomerationOutputFile, conglomerationOutputFile, conglomerationOutputFile}, + Ext: "large", Fail: "code", ShouldMatch: false, + Fnames: []string{correctM, correctWithFailM, correctWith2FailsM}, + Explainer: "L does not pick the files reserved for M", }, { Ext: "", Fail: "code", ShouldMatch: false, Fnames: []string{notAPoint, badName}, Explainer: "M does not pick obviously invalid files", }, + { + Ext: "large", Fail: "code", ShouldMatch: false, + Fnames: []string{missingEtv, missingStv, notAPoint, badName}, + Explainer: "L does not pick obviously invalid files", + }, } for _, c := range testcase { + + //log.Printf("Running testcase:%s \n", c.Explainer) + conf := config.Config{} conf.Version = "0.1.2" - conf.Conglomeration.GL.CanRunFullLarge = c.Ext == "large" - conf.Conglomeration.LPP.CanRunFullLarge = c.Ext == "large" + conf.Bootstrap_Submodule.CanRunFullLarge = c.Ext == "large" - def, err := ConglomerationDefinition(&conf) + def, err := BootstrapSubModDefinition(&conf) assert.NoError(t, err) t.Run(c.Explainer, func(t *testing.T) { - runInpFileTestCaseLimitless(t, def, c) + runInpFileTestCase(t, def, c) }) } } - -func runInpFileTestCaseLimitless(t *testing.T, def *JobDefinition, c inpFileNamesCases) { - for i, fname := range c.Fnames { - job, err := NewJob(def, fname) - - if c.ShouldMatch { - if !assert.NoError(t, err, fname) { - continue - } - - resp, err := job.ResponseFile() - if assert.NoErrorf(t, err, "cannot produce a response for job %s", fname) { - assert.Equal(t, c.ExpectedOutput[i], resp, "wrong output file") - } - - if len(c.ExpToLarge) > 0 { - toLarge, err := job.DeferToLargeFile( - Status{ExitCode: 137}, - ) - - if assert.NoError(t, err, "cannot produce name for the too large job") { - assert.Equal(t, c.ExpToLarge[i], toLarge) - } - } - - if len(c.ExpSuccess) > 0 { - toSuccess := job.DoneFile(Status{ExitCode: 0}) - assert.Equal(t, c.ExpSuccess[i], toSuccess) - } - - if len(c.ExpFailW2) > 0 { - toFail2 := job.DoneFile(Status{ExitCode: 2}) - assert.Equal(t, c.ExpFailW2[i], toFail2) - } - - } else { - assert.Errorf( - t, err, fname, - "%v should not match %s", - fname, def.InputFileRegexp.String(), - ) - } - } -} diff --git a/prover/cmd/controller/controller/jobs.go b/prover/cmd/controller/controller/jobs.go index e39a1e367..79e3f4bb6 100644 --- a/prover/cmd/controller/controller/jobs.go +++ b/prover/cmd/controller/controller/jobs.go @@ -103,12 +103,11 @@ func (j *Job) TmpResponseFile(c *config.Config) (s string) { return path.Join(j.Def.dirTo(), "tmp-response-file."+c.Controller.LocalID+".json") } -// Returns the name of the input file modified so that it is retried in -// large mode. It fails if the job's definition does not provide a suffix to -// retry in large mode. This is still unexpected because the configuration -// validation ensures that if there is an exit code amenable to defer the job to -// a larger machine, then the suffix must be set. If the status code of the -// prover is zero it will return an error. +// This function returns the name of the input file, modified to indicate that it should be retried in "large mode". +// It will fail if the job's configuration does not include a suffix for retrying in large mode. +// However, this situation is unexpected because the configuration validation ensures that if an exit code requires +// deferring the job to a larger machine, the suffix must be set. +// Additionally, if the prover's status code is zero (indicating success), the function will return an error. func (j *Job) DeferToLargeFile(status Status) (s string, err error) { // It's an invariant of the executor to not forget to set the status @@ -131,8 +130,8 @@ func (j *Job) DeferToLargeFile(status Status) (s string, err error) { logrus.Warnf( "Deferring the large machine but the input file `%v` already has"+ " the suffix %v. Still renaming it to %v, but it will likely"+ - " not be picked up again", - j.OriginalFile, suffixLarge, s, + // Returns the name of the input file modified so that it is retried in " not be picked up again", + j.OriginalFile, suffixLarge, s, ) } diff --git a/prover/config/config.go b/prover/config/config.go index f6426102f..c51c1ba4f 100644 --- a/prover/config/config.go +++ b/prover/config/config.go @@ -112,11 +112,16 @@ type Config struct { PublicInputInterconnection PublicInput `mapstructure:"public_input_interconnection"` // TODO add wizard compilation params // LIMITLESS PROVER Components - Bootstrap Bootstrap `mapstructure:"execution_bootstrap"` - GLExecution GLExecution `mapstructure:"execution_gl"` - RandomBeacon RandomBeacon `mapstructure:"execution_rndbeacon"` - LPPExecution LPPExecution `mapstructure:"execution_lpp"` - Conglomeration Conglomeration `mapstructure:"execution_conglomeration"` + Bootstrap_Submodule Execution `mapstructure:"execution_bootstrap_submodule"` + Bootstrap_Metadata Execution `mapstructure:"execution_bootstrap_submodule"` + + GLExecution Execution `mapstructure:"execution_gl"` + + RandomBeacon Execution `mapstructure:"execution_rndbeacon"` + + LPPExecution Execution `mapstructure:"execution_lpp"` + + Conglomeration Execution `mapstructure:"execution_conglomeration"` Debug struct { // Profiling indicates whether we want to generate profiles using the [runtime/pprof] pkg. @@ -219,29 +224,6 @@ type Execution struct { ConflatedTracesDir string `mapstructure:"conflated_traces_dir" validate:"required"` } -// TODO: Add and define Limitless prover components -type Bootstrap struct { - Execution -} - -type GLExecution struct { - Execution -} - -// Component with multiple input files -type RandomBeacon struct { - Bootstrap, GL Execution -} - -type LPPExecution struct { - Execution -} - -// Component with multiple input files -type Conglomeration struct { - GL, LPP Execution -} - type BlobDecompression struct { WithRequestDir `mapstructure:",squash"` From 497a5b2dc7631669dcac19bb2f212dfb8ff8daa8 Mon Sep 17 00:00:00 2001 From: srinathln7 Date: Fri, 31 Jan 2025 09:44:56 +0000 Subject: [PATCH 09/48] redefine resp outputfile patterns --- .../controller/job_definition_limitless.go | 75 ++++++---- .../job_definition_limitless_test.go | 140 ++++++++++++++++-- prover/cmd/controller/controller/jobs.go | 15 +- 3 files changed, 184 insertions(+), 46 deletions(-) diff --git a/prover/cmd/controller/controller/job_definition_limitless.go b/prover/cmd/controller/controller/job_definition_limitless.go index a5cca7747..ee151b089 100644 --- a/prover/cmd/controller/controller/job_definition_limitless.go +++ b/prover/cmd/controller/controller/job_definition_limitless.go @@ -11,8 +11,8 @@ import ( // These jobs will execute asynchronously based on their set priorities const ( // Bootstrap - job_Exec_Bootstrap_Submodule = "exec-bootstrap-submodule" - job_Exec_Bootstrap_Metadata = "exec-bootstrap-metadata" + job_Exec_Bootstrap_GLSubmodule = "exec-bootstrap-GLsubmodule" + job_Exec_Bootstrap_DistMetadata = "exec-bootstrap-metadata" // Global-Local subprovers job_Exec_GL_RndBeacon = "exec-GL-rndbeacon" @@ -33,8 +33,8 @@ const ( // Priorities const ( - priority_Exec_Bootstrap_Submodule = 0 - priority_Exec_Bootstrap_Metadata = 0 + priority_Exec_Bootstrap_GLSubmodule = 0 + priority_Exec_Bootstrap_DistMetadata = 0 priority_Exec_GL_RndBeacon = 1 priority_Exec_GL = 1 @@ -51,10 +51,10 @@ const ( // Input file patterns const ( - exec_Bootstrap_Submodule_InputPattern = `^[0-9]+-[0-9]+(-etv[0-9\.]+)?(-stv[0-9\.]+)?-getZkProof\.json%v(\.failure\.%v_[0-9]+)*$` - exec_Bootstrap_MetaData_InputPattern = `^[0-9]+-[0-9]+(-etv[0-9\.]+)?(-stv[0-9\.]+)?-getZkProof\.json%v(\.failure\.%v_[0-9]+)*$` + exec_Bootstrap_GLSubmodule_InputPattern = `^[0-9]+-[0-9]+(-etv[0-9\.]+)?(-stv[0-9\.]+)?-getZkProof\.json%v(\.failure\.%v_[0-9]+)*$` + exec_Bootstrap_DistMetaData_InputPattern = `^[0-9]+-[0-9]+(-etv[0-9\.]+)?(-stv[0-9\.]+)?-getZkProof\.json%v(\.failure\.%v_[0-9]+)*$` - exec_GL_RndBeacon_InputPattern = `^[0-9]+-[0-9]+(-etv[0-9\.]+)?(-stv[0-9\.]+)?-getZKProof_Bootstrap_Submodule\.json%v(\.failure\.%v_[0-9]+)*$` + exec_GL_RndBeacon_InputPattern = `^[0-9]+-[0-9]+(-etv[0-9\.]+)?(-stv[0-9\.]+)?-getZKProof_Bootstrap_GLSubmodule\.json%v(\.failure\.%v_[0-9]+)*$` exec_RndBeacon_Metadata_InputPattern = `^[0-9]+-[0-9]+(-etv[0-9\.]+)?(-stv[0-9\.]+)?-getZKProof_Bootstrap_DistMetadata\.json%v(\.failure\.%v_[0-9]+)*$` exec_GL_InputPattern = `^[0-9]+-[0-9]+(-etv[0-9\.]+)?(-stv[0-9\.]+)?-getZKProof_GL_Beacon\.json%v(\.failure\.%v_[0-9]+)*$` @@ -68,42 +68,39 @@ const ( // Ouput File patterns and templates const ( - exec_Bootstrap_Submodule_File = "{{.Start}}-{{.End}}-getZkProof_Bootstrap_Submodule.json" - exec_Bootstrap_Submodule_Tmpl = "exec-bootstrap-submodule-req-file" + exec_Bootstrap_GLSubmodule_File = "{{.Start}}-{{.End}}-etv{{.Etv}}-stv{{.Stv}}-getZkProof_Bootstrap_GLSubmodule.json" + exec_Bootstrap_Submodule_Tmpl = "exec-bootstrap-GLsubmodule-req-file" - exec_Bootstrap_DistMetadata_File = "{{.Start}}-{{.End}}-getZKProof_Bootstrap_DistMetadata.json" + exec_Bootstrap_DistMetadata_File = "{{.Start}}-{{.End}}-etv{{.Etv}}-stv{{.Stv}}-getZkProof_Bootstrap_DistMetadata.json" exec_Bootstrap_DistMetadata_Tmpl = "exec-bootstrap-submodule-distmetadata-file" // Global-Local subprovers - exec_GL_Beacon_File = "{{.Start}}-{{.End}}-getZKProof_GL_Beacon.json" - exec_GL_Beacon_Tmpl = "exec-GL-Beacon-file" + exec_GL_RndBeacon_File = "{{.Start}}-{{.End}}-getZkProof_GL_Beacon.json" + exec_GL_RndBeacon_Tmpl = "exec-GL-Beacon-file" - exec_GL_File = "{{.Start}}-{{.End}}-getZKProof_GL.json" + exec_GL_File = "{{.Start}}-{{.End}}-getZkProof_GL.json" exec_GL_Tmpl = "exec-GL-output-file" // Random Beacon - exec_RndBeacon_DistMetadata_File = "{{.Start}}-{{.End}}-getZKProof_Bootstrap_DistMetadata.json" + exec_RndBeacon_DistMetadata_File = "{{.Start}}-{{.End}}-getZkProof_Bootstrap_DistMetadata.json" - exec_RndBeacon_GL_File = "{{.Start}}-{{.End}}-getZKProof_GL_Beacon.json" + exec_RndBeacon_GL_File = "{{.Start}}-{{.End}}-getZkProof_GL_Beacon.json" - exec_RndBeacon_File = "{{.Start}}-{{.End}}-getZKProof_RndBeacon.json" + exec_RndBeacon_File = "{{.Start}}-{{.End}}-getZkProof_RndBeacon.json" exec_RndBeacon_Tmpl = "exec-rndbeacon-output-file" // LPP-subprovers - exec_LPP_File = "{{.Start}}-{{.End}}-getZKProof_LPP.json" + exec_LPP_File = "{{.Start}}-{{.End}}-getZkProof_LPP.json" exec_LPP_Tmpl = "exec-LPP-output-file" // Conglomerator - // exec_Congolomerate_GL_File = "{{.Start}}-{{.End}}-.getZKProof_GL.json" - - // exec_Congolomerate_LPP_File = "{{.Start}}-{{.End}}-.getZKProof_LPP.json" - - // exec_Congolomerate_Metadata_File = "{{.Start}}-{{.End}}-.getZKProof_Bootstrap_DistMetadata.json" - - exec_Congolomerate_File = "{{.Start}}-{{.End}}-.getZKProof.json" + exec_Congolomerate_File = "{{.Start}}-{{.End}}-.getZkProof.json" exec_Congolomerate_Tmpl = "exec-output-file" ) +// createJobDefinition creates a new JobDefinition with the provided parameters. +// It sets up the job's name, priority, request directory, input file pattern, and output template. +// The function returns a pointer to the JobDefinition and an error if any occurs during the setup. func createJobDefinition(name string, priority int, reqRootDir, inputFilePattern string, outputTmpl, outputFileName string) (*JobDefinition, error) { @@ -136,22 +133,38 @@ func createJobDefinition(name string, priority int, }, nil } -func BootstrapSubModDefinition(conf *config.Config) (*JobDefinition, error) { +// BootstrapGLSubModDefinition creates a job definition for the Bootstrap GL Submodule job. +// It sets the input file pattern based on the configuration and creates the job definition +// with the appropriate parameters. +func BootstrapGLSubModDefinition(conf *config.Config) (*JobDefinition, error) { inpFileExt := "" if conf.Bootstrap_Submodule.CanRunFullLarge { inpFileExt = fmt.Sprintf(`\.%v`, config.LargeSuffix) } - inputFilePattern := fmt.Sprintf(exec_Bootstrap_Submodule_InputPattern, inpFileExt, config.FailSuffix) - return createJobDefinition(job_Exec_Bootstrap_Submodule, priority_Exec_Bootstrap_Submodule, - conf.Bootstrap_Submodule.RequestsRootDir, inputFilePattern, exec_Bootstrap_Submodule_Tmpl, exec_Bootstrap_Submodule_File) + inputFilePattern := fmt.Sprintf(exec_Bootstrap_GLSubmodule_InputPattern, inpFileExt, config.FailSuffix) + return createJobDefinition(job_Exec_Bootstrap_GLSubmodule, priority_Exec_Bootstrap_GLSubmodule, + conf.Bootstrap_Submodule.RequestsRootDir, inputFilePattern, exec_Bootstrap_Submodule_Tmpl, exec_Bootstrap_GLSubmodule_File) } -func BootstrapMetadataDefinition(conf *config.Config) (*JobDefinition, error) { +// BootstrapDistMetadataDefinition creates a job definition for the Bootstrap Metadata job. +// It sets the input file pattern based on the configuration and creates the job definition +// with the appropriate parameters. +func BootstrapDistMetadataDefinition(conf *config.Config) (*JobDefinition, error) { inpFileExt := "" if conf.Bootstrap_Metadata.CanRunFullLarge { inpFileExt = fmt.Sprintf(`\.%v`, config.LargeSuffix) } - inputFilePattern := fmt.Sprintf(exec_Bootstrap_MetaData_InputPattern, inpFileExt, config.FailSuffix) - return createJobDefinition(job_Exec_Bootstrap_Metadata, priority_Exec_Bootstrap_Metadata, + inputFilePattern := fmt.Sprintf(exec_Bootstrap_DistMetaData_InputPattern, inpFileExt, config.FailSuffix) + return createJobDefinition(job_Exec_Bootstrap_DistMetadata, priority_Exec_Bootstrap_DistMetadata, conf.Bootstrap_Metadata.RequestsRootDir, inputFilePattern, exec_Bootstrap_DistMetadata_Tmpl, exec_Bootstrap_DistMetadata_File) } + +func GLRndBeaconDefinition(conf *config.Config) (*JobDefinition, error) { + inpFileExt := "" + if conf.GLExecution.CanRunFullLarge { + inpFileExt = fmt.Sprintf(`\.%v`, config.LargeSuffix) + } + inputFilePattern := fmt.Sprintf(exec_GL_RndBeacon_InputPattern, inpFileExt, config.FailSuffix) + return createJobDefinition(job_Exec_GL_RndBeacon, priority_Exec_GL_RndBeacon, + conf.GLExecution.RequestsRootDir, inputFilePattern, exec_GL_RndBeacon_Tmpl, exec_GL_RndBeacon_File) +} diff --git a/prover/cmd/controller/controller/job_definition_limitless_test.go b/prover/cmd/controller/controller/job_definition_limitless_test.go index 278eba137..d081fb8c3 100644 --- a/prover/cmd/controller/controller/job_definition_limitless_test.go +++ b/prover/cmd/controller/controller/job_definition_limitless_test.go @@ -27,16 +27,18 @@ func TestBootstrapSubModInFileRegexp(t *testing.T) { // The responses in case of success var ( - respM = "responses/102-103-getZkProof_Bootstrap_Submodule.json" - respL = "responses/102-103-getZkProof_Bootstrap_Submodule.json" + respM = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json" + respL = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json" // #nosec G101 -- Not a credential - respWithFailM = "responses/102-103-getZkProof_Bootstrap_Submodule.json" + respWithFailM = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json" // #nosec G101 -- Not a credential - respWithFailL = "responses/102-103-getZkProof_Bootstrap_Submodule.json" + respWithFailL = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json" // #nosec G101 -- Not a credential - respWith2FailsM = "responses/102-103-getZkProof_Bootstrap_Submodule.json" + respWith2FailsM = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json" // #nosec G101 -- Not a credential - respWith2FailsL = "responses/102-103-getZkProof_Bootstrap_Submodule.json" + respWith2FailsL = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json" + respWoEtv = "responses/102-103-etv-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json" + respWoStv = "responses/102-103-etv0.2.3-stv-getZkProof_Bootstrap_GLSubmodule.json" ) // The rename in case it is deferred to the large prover @@ -77,7 +79,7 @@ func TestBootstrapSubModInFileRegexp(t *testing.T) { Ext: "", Fail: "code", ShouldMatch: true, Fnames: []string{correctM, correctWithFailM, correctWith2FailsM, missingEtv, missingStv}, Explainer: "happy path, case M", - ExpectedOutput: []string{respM, respWithFailM, respWith2FailsM, respM, respM}, + ExpectedOutput: []string{respM, respWithFailM, respWith2FailsM, respWoEtv, respWoStv}, ExpToLarge: []string{toLargeM, toLargeWithFailM, toLargeWith2FailsM, toLargeWoEtv, toLargeWoStv}, ExpSuccess: []string{successM, successWithFailM, successWith2FailsM, successtWoEtv, successMWoStv}, ExpFailW2: []string{failM, failWithFailM, failWith2FailsM, failtWoEtv, failMWoStv}, @@ -113,14 +115,132 @@ func TestBootstrapSubModInFileRegexp(t *testing.T) { } for _, c := range testcase { + conf := config.Config{} + conf.Version = "0.1.2" + conf.Bootstrap_Submodule.CanRunFullLarge = c.Ext == "large" + + def, err := BootstrapGLSubModDefinition(&conf) + assert.NoError(t, err) + + t.Run(c.Explainer, func(t *testing.T) { + runInpFileTestCase(t, def, c) + }) + } +} + +// This tests ensures that the naming convention is respected by the file-watcher +// i.e., files with the right naming only are recognized. And the corresponding +// output files are also recognized. +func TestBootstrapMetaDataInFileRegexp(t *testing.T) { - //log.Printf("Running testcase:%s \n", c.Explainer) + var ( + correctM = "102-103-etv0.2.3-stv1.2.3-getZkProof.json" + correctL = "102-103-etv0.2.3-stv1.2.3-getZkProof.json.large" + correctWithFailM = "102-103-etv0.2.3-stv1.2.3-getZkProof.json.failure.code_77" + correctWithFailL = "102-103-etv0.2.3-stv1.2.3-getZkProof.json.large.failure.code_77" + correctWith2FailsM = "102-103-etv0.2.3-stv1.2.3-getZkProof.json.failure.code_77.failure.code_77" + correctWith2FailsL = "102-103-etv0.2.3-stv1.2.3-getZkProof.json.large.failure.code_77.failure.code_77" + missingEtv = "102-103-stv1.2.3-getZkProof.json" + missingStv = "102-103-etv0.2.3-getZkProof.json" + notAPoint = "102-103-etv0.2.3-getZkProofAjson" + badName = "102-103-etv0.2.3-stv1.2.3-getAggregatedProof.json" + ) + // The responses in case of success + var ( + respM = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json" + respL = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json" + // #nosec G101 -- Not a credential + respWithFailM = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json" + // #nosec G101 -- Not a credential + respWithFailL = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json" + // #nosec G101 -- Not a credential + respWith2FailsM = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json" + // #nosec G101 -- Not a credential + respWith2FailsL = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json" + respWoEtv = "responses/102-103-etv-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json" + respWoStv = "responses/102-103-etv0.2.3-stv-getZkProof_Bootstrap_DistMetadata.json" + ) + + // The rename in case it is deferred to the large prover + var ( + toLargeM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof.json.large.failure.code_137" + toLargeWithFailM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof.json.large.failure.code_137" + toLargeWith2FailsM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof.json.large.failure.code_137" + toLargeWoEtv = "requests/102-103-stv1.2.3-getZkProof.json.large.failure.code_137" + toLargeWoStv = "requests/102-103-etv0.2.3-getZkProof.json.large.failure.code_137" + ) + + // The rename in case it is a success + var ( + successM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof.json.success" + successMWoStv = "requests-done/102-103-etv0.2.3-getZkProof.json.success" + successtWoEtv = "requests-done/102-103-stv1.2.3-getZkProof.json.success" + successL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof.json.large.success" + successWithFailM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof.json.success" + successWithFailL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof.json.large.success" + successWith2FailsM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof.json.success" + successWith2FailsL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof.json.large.success" + ) + + // The rename in case it is a panic (code = 2) + var ( + failM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof.json.failure.code_2" + failMWoStv = "requests-done/102-103-etv0.2.3-getZkProof.json.failure.code_2" + failtWoEtv = "requests-done/102-103-stv1.2.3-getZkProof.json.failure.code_2" + failL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof.json.large.failure.code_2" + failWithFailM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof.json.failure.code_2" + failWithFailL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof.json.large.failure.code_2" + failWith2FailsM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof.json.failure.code_2" + failWith2FailsL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof.json.large.failure.code_2" + ) + + testcase := []inpFileNamesCases{ + { + Ext: "", Fail: "code", ShouldMatch: true, + Fnames: []string{correctM, correctWithFailM, correctWith2FailsM, missingEtv, missingStv}, + Explainer: "happy path, case M", + ExpectedOutput: []string{respM, respWithFailM, respWith2FailsM, respWoEtv, respWoStv}, + ExpToLarge: []string{toLargeM, toLargeWithFailM, toLargeWith2FailsM, toLargeWoEtv, toLargeWoStv}, + ExpSuccess: []string{successM, successWithFailM, successWith2FailsM, successtWoEtv, successMWoStv}, + ExpFailW2: []string{failM, failWithFailM, failWith2FailsM, failtWoEtv, failMWoStv}, + }, + { + Ext: "large", Fail: "code", ShouldMatch: true, + Fnames: []string{correctL, correctWithFailL, correctWith2FailsL}, + Explainer: "happy path, case L", + ExpectedOutput: []string{respL, respWithFailL, respWith2FailsL}, + ExpSuccess: []string{successL, successWithFailL, successWith2FailsL}, + ExpFailW2: []string{failL, failWithFailL, failWith2FailsL}, + }, + { + Ext: "", Fail: "code", ShouldMatch: false, + Fnames: []string{correctL, correctWithFailL, correctWith2FailsL}, + Explainer: "M does not pick the files reserved for L", + }, + { + Ext: "large", Fail: "code", ShouldMatch: false, + Fnames: []string{correctM, correctWithFailM, correctWith2FailsM}, + Explainer: "L does not pick the files reserved for M", + }, + { + Ext: "", Fail: "code", ShouldMatch: false, + Fnames: []string{notAPoint, badName}, + Explainer: "M does not pick obviously invalid files", + }, + { + Ext: "large", Fail: "code", ShouldMatch: false, + Fnames: []string{missingEtv, missingStv, notAPoint, badName}, + Explainer: "L does not pick obviously invalid files", + }, + } + + for _, c := range testcase { conf := config.Config{} conf.Version = "0.1.2" - conf.Bootstrap_Submodule.CanRunFullLarge = c.Ext == "large" + conf.Bootstrap_Metadata.CanRunFullLarge = c.Ext == "large" - def, err := BootstrapSubModDefinition(&conf) + def, err := BootstrapDistMetadataDefinition(&conf) assert.NoError(t, err) t.Run(c.Explainer, func(t *testing.T) { diff --git a/prover/cmd/controller/controller/jobs.go b/prover/cmd/controller/controller/jobs.go index 79e3f4bb6..2d7e110a2 100644 --- a/prover/cmd/controller/controller/jobs.go +++ b/prover/cmd/controller/controller/jobs.go @@ -25,9 +25,14 @@ type Job struct { Start int End int - VersionExecutionTracer string - VersionStateManager string - VersionCompressor string + // Execution Trace version + Etv string + + // State Manager Trace version + Stv string + + // Compressor version ccv + VersionCompressor string // The hex string of the content hash ContentHash string @@ -62,8 +67,8 @@ func NewJob(jdef *JobDefinition, filename string) (j *Job, err error) { j.Start = intIfRegexpNotNil(regs.Start, filename) j.End = intIfRegexpNotNil(regs.End, filename) j.VersionCompressor = stringIfRegexpNotNil(regs.Cv, filename) - j.VersionExecutionTracer = stringIfRegexpNotNil(regs.Etv, filename) - j.VersionStateManager = stringIfRegexpNotNil(regs.Stv, filename) + j.Etv = stringIfRegexpNotNil(regs.Etv, filename) + j.Stv = stringIfRegexpNotNil(regs.Stv, filename) j.ContentHash = stringIfRegexpNotNil(regs.ContentHash, filename) return j, nil From 722ad2b3a46c33ade70fce6bab124ee6ed291f7c Mon Sep 17 00:00:00 2001 From: srinathln7 Date: Fri, 31 Jan 2025 14:05:36 +0000 Subject: [PATCH 10/48] limitless prover job definitions with unit tests --- .../controller/job_definition_limitless.go | 96 ++- .../job_definition_limitless_test.go | 594 +++++++++++++++++- prover/config/config.go | 20 +- 3 files changed, 677 insertions(+), 33 deletions(-) diff --git a/prover/cmd/controller/controller/job_definition_limitless.go b/prover/cmd/controller/controller/job_definition_limitless.go index ee151b089..8cadf02be 100644 --- a/prover/cmd/controller/controller/job_definition_limitless.go +++ b/prover/cmd/controller/controller/job_definition_limitless.go @@ -19,8 +19,8 @@ const ( job_Exec_GL = "exec-GL" // Random Beacon - job_Exec_RndBeacon_LPP = "exec-rndbeacon-LPP" - job_Exec_RndBeacon_Metadata = "exec-rndbeacon-metadata" + job_Exec_RndBeacon_LPP = "exec-rndbeacon" + job_Exec_Bootstrap_RndBeacon = "exec-bootstrap-rndbeacon" // LPP-subprovers job_Exec_LPP = "exec-LPP" @@ -39,8 +39,8 @@ const ( priority_Exec_GL_RndBeacon = 1 priority_Exec_GL = 1 - priority_Exec_RndBeacon_LPP = 2 - priority_Exec_RndBeacon_Metadata = 2 + priority_Exec_RndBeacon_LPP = 2 + priority_Exec_Bootstrap_RndBeacon = 2 priority_Exec_LPP = 3 @@ -51,19 +51,23 @@ const ( // Input file patterns const ( - exec_Bootstrap_GLSubmodule_InputPattern = `^[0-9]+-[0-9]+(-etv[0-9\.]+)?(-stv[0-9\.]+)?-getZkProof\.json%v(\.failure\.%v_[0-9]+)*$` - exec_Bootstrap_DistMetaData_InputPattern = `^[0-9]+-[0-9]+(-etv[0-9\.]+)?(-stv[0-9\.]+)?-getZkProof\.json%v(\.failure\.%v_[0-9]+)*$` + // Bootstrap I/p file is the usual execution req. file + exec_Bootstrap_InputPattern = `^[0-9]+-[0-9]+(-etv[0-9\.]+)?(-stv[0-9\.]+)?-getZkProof\.json%v(\.failure\.%v_[0-9]+)*$` - exec_GL_RndBeacon_InputPattern = `^[0-9]+-[0-9]+(-etv[0-9\.]+)?(-stv[0-9\.]+)?-getZKProof_Bootstrap_GLSubmodule\.json%v(\.failure\.%v_[0-9]+)*$` + // GL input + exec_Bootstrap_GL_InputPattern = `^[0-9]+-[0-9]+(-etv[0-9\.]+)?(-stv[0-9\.]+)?-getZkProof_Bootstrap_GLSubmodule\.json%v(\.failure\.%v_[0-9]+)*$` - exec_RndBeacon_Metadata_InputPattern = `^[0-9]+-[0-9]+(-etv[0-9\.]+)?(-stv[0-9\.]+)?-getZKProof_Bootstrap_DistMetadata\.json%v(\.failure\.%v_[0-9]+)*$` - exec_GL_InputPattern = `^[0-9]+-[0-9]+(-etv[0-9\.]+)?(-stv[0-9\.]+)?-getZKProof_GL_Beacon\.json%v(\.failure\.%v_[0-9]+)*$` + // Rnd Beacon I/p + exec_Bootstrap_RndBeacon_InputPattern = `^[0-9]+-[0-9]+(-etv[0-9\.]+)?(-stv[0-9\.]+)?-getZkProof_Bootstrap_DistMetadata\.json%v(\.failure\.%v_[0-9]+)*$` + exec_GL_RndBeacon_InputPattern = `^[0-9]+-[0-9]+(-etv[0-9\.]+)?(-stv[0-9\.]+)?-getZkProof_GL_RndBeacon\.json%v(\.failure\.%v_[0-9]+)*$` - exec_LPP_InputPattern = `^[0-9]+-[0-9]+(-etv[0-9\.]+)?(-stv[0-9\.]+)?-getZKProof_RndBeacon\.json%v(\.failure\.%v_[0-9]+)*$` + // LPP Input + exec_LPP_InputPattern = `^[0-9]+-[0-9]+(-etv[0-9\.]+)?(-stv[0-9\.]+)?-getZkProof_RndBeacon\.json%v(\.failure\.%v_[0-9]+)*$` - exec_Congolomerate_GL_InputPattern = `^[0-9]+-[0-9]+(-etv[0-9\.]+)?(-stv[0-9\.]+)?-getZKProof_GL\.json%v(\.failure\.%v_[0-9]+)*$` - exec_Congolomerate_LPP_InputPattern = `^[0-9]+-[0-9]+(-etv[0-9\.]+)?(-stv[0-9\.]+)?-getZKProof_LPP\.json%v(\.failure\.%v_[0-9]+)*$` - exec_Congolomerate_Metadata_InputPattern = `^[0-9]+-[0-9]+(-etv[0-9\.]+)?(-stv[0-9\.]+)?-getZKProof_Bootstrap_DistMetadata\.json%v(\.failure\.%v_[0-9]+)*$` + // Conglomerator Input + exec_Conglomerate_GL_InputPattern = `^[0-9]+-[0-9]+(-etv[0-9\.]+)?(-stv[0-9\.]+)?-getZkProof_GL\.json%v(\.failure\.%v_[0-9]+)*$` + exec_Conglomerate_LPP_InputPattern = `^[0-9]+-[0-9]+(-etv[0-9\.]+)?(-stv[0-9\.]+)?-getZkProof_LPP\.json%v(\.failure\.%v_[0-9]+)*$` + exec_Conglomerate_DistMetadata_InputPattern = `^[0-9]+-[0-9]+(-etv[0-9\.]+)?(-stv[0-9\.]+)?-getZkProof_Bootstrap_DistMetadata\.json%v(\.failure\.%v_[0-9]+)*$` ) // Ouput File patterns and templates @@ -75,22 +79,18 @@ const ( exec_Bootstrap_DistMetadata_Tmpl = "exec-bootstrap-submodule-distmetadata-file" // Global-Local subprovers - exec_GL_RndBeacon_File = "{{.Start}}-{{.End}}-getZkProof_GL_Beacon.json" + exec_GL_RndBeacon_File = "{{.Start}}-{{.End}}-etv{{.Etv}}-stv{{.Stv}}-getZkProof_GL_RndBeacon.json" exec_GL_RndBeacon_Tmpl = "exec-GL-Beacon-file" - exec_GL_File = "{{.Start}}-{{.End}}-getZkProof_GL.json" + exec_GL_File = "{{.Start}}-{{.End}}-etv{{.Etv}}-stv{{.Stv}}-getZkProof_GL.json" exec_GL_Tmpl = "exec-GL-output-file" // Random Beacon - exec_RndBeacon_DistMetadata_File = "{{.Start}}-{{.End}}-getZkProof_Bootstrap_DistMetadata.json" - - exec_RndBeacon_GL_File = "{{.Start}}-{{.End}}-getZkProof_GL_Beacon.json" - - exec_RndBeacon_File = "{{.Start}}-{{.End}}-getZkProof_RndBeacon.json" + exec_RndBeacon_File = "{{.Start}}-{{.End}}-etv{{.Etv}}-stv{{.Stv}}-getZkProof_RndBeacon.json" exec_RndBeacon_Tmpl = "exec-rndbeacon-output-file" // LPP-subprovers - exec_LPP_File = "{{.Start}}-{{.End}}-getZkProof_LPP.json" + exec_LPP_File = "{{.Start}}-{{.End}}-etv{{.Etv}}-stv{{.Stv}}-getZkProof_LPP.json" exec_LPP_Tmpl = "exec-LPP-output-file" // Conglomerator @@ -138,12 +138,12 @@ func createJobDefinition(name string, priority int, // with the appropriate parameters. func BootstrapGLSubModDefinition(conf *config.Config) (*JobDefinition, error) { inpFileExt := "" - if conf.Bootstrap_Submodule.CanRunFullLarge { + if conf.Bootstrap.CanRunFullLarge { inpFileExt = fmt.Sprintf(`\.%v`, config.LargeSuffix) } - inputFilePattern := fmt.Sprintf(exec_Bootstrap_GLSubmodule_InputPattern, inpFileExt, config.FailSuffix) + inputFilePattern := fmt.Sprintf(exec_Bootstrap_InputPattern, inpFileExt, config.FailSuffix) return createJobDefinition(job_Exec_Bootstrap_GLSubmodule, priority_Exec_Bootstrap_GLSubmodule, - conf.Bootstrap_Submodule.RequestsRootDir, inputFilePattern, exec_Bootstrap_Submodule_Tmpl, exec_Bootstrap_GLSubmodule_File) + conf.Bootstrap.RequestsRootDir, inputFilePattern, exec_Bootstrap_Submodule_Tmpl, exec_Bootstrap_GLSubmodule_File) } // BootstrapDistMetadataDefinition creates a job definition for the Bootstrap Metadata job. @@ -151,12 +151,12 @@ func BootstrapGLSubModDefinition(conf *config.Config) (*JobDefinition, error) { // with the appropriate parameters. func BootstrapDistMetadataDefinition(conf *config.Config) (*JobDefinition, error) { inpFileExt := "" - if conf.Bootstrap_Metadata.CanRunFullLarge { + if conf.Bootstrap.CanRunFullLarge { inpFileExt = fmt.Sprintf(`\.%v`, config.LargeSuffix) } - inputFilePattern := fmt.Sprintf(exec_Bootstrap_DistMetaData_InputPattern, inpFileExt, config.FailSuffix) + inputFilePattern := fmt.Sprintf(exec_Bootstrap_InputPattern, inpFileExt, config.FailSuffix) return createJobDefinition(job_Exec_Bootstrap_DistMetadata, priority_Exec_Bootstrap_DistMetadata, - conf.Bootstrap_Metadata.RequestsRootDir, inputFilePattern, exec_Bootstrap_DistMetadata_Tmpl, exec_Bootstrap_DistMetadata_File) + conf.Bootstrap.RequestsRootDir, inputFilePattern, exec_Bootstrap_DistMetadata_Tmpl, exec_Bootstrap_DistMetadata_File) } func GLRndBeaconDefinition(conf *config.Config) (*JobDefinition, error) { @@ -164,7 +164,47 @@ func GLRndBeaconDefinition(conf *config.Config) (*JobDefinition, error) { if conf.GLExecution.CanRunFullLarge { inpFileExt = fmt.Sprintf(`\.%v`, config.LargeSuffix) } - inputFilePattern := fmt.Sprintf(exec_GL_RndBeacon_InputPattern, inpFileExt, config.FailSuffix) + inputFilePattern := fmt.Sprintf(exec_Bootstrap_GL_InputPattern, inpFileExt, config.FailSuffix) return createJobDefinition(job_Exec_GL_RndBeacon, priority_Exec_GL_RndBeacon, conf.GLExecution.RequestsRootDir, inputFilePattern, exec_GL_RndBeacon_Tmpl, exec_GL_RndBeacon_File) } + +func GLDefinition(conf *config.Config) (*JobDefinition, error) { + inpFileExt := "" + if conf.GLExecution.CanRunFullLarge { + inpFileExt = fmt.Sprintf(`\.%v`, config.LargeSuffix) + } + inputFilePattern := fmt.Sprintf(exec_Bootstrap_GL_InputPattern, inpFileExt, config.FailSuffix) + return createJobDefinition(job_Exec_GL, priority_Exec_GL, + conf.GLExecution.RequestsRootDir, inputFilePattern, exec_GL_Tmpl, exec_GL_File) +} + +func BootstrapRndBeaconDefinition(conf *config.Config) (*JobDefinition, error) { + inpFileExt := "" + if conf.RndBeacon.CanRunFullLarge { + inpFileExt = fmt.Sprintf(`\.%v`, config.LargeSuffix) + } + inputFilePattern := fmt.Sprintf(exec_Bootstrap_RndBeacon_InputPattern, inpFileExt, config.FailSuffix) + return createJobDefinition(job_Exec_Bootstrap_RndBeacon, priority_Exec_Bootstrap_RndBeacon, + conf.RndBeacon.MetaData.RequestsRootDir, inputFilePattern, exec_RndBeacon_Tmpl, exec_RndBeacon_File) +} + +func RndBeaconLPPDefinition(conf *config.Config) (*JobDefinition, error) { + inpFileExt := "" + if conf.RndBeacon.CanRunFullLarge { + inpFileExt = fmt.Sprintf(`\.%v`, config.LargeSuffix) + } + inputFilePattern := fmt.Sprintf(exec_GL_RndBeacon_InputPattern, inpFileExt, config.FailSuffix) + return createJobDefinition(job_Exec_RndBeacon_LPP, priority_Exec_RndBeacon_LPP, + conf.RndBeacon.GL.RequestsRootDir, inputFilePattern, exec_RndBeacon_Tmpl, exec_RndBeacon_File) +} + +func LPPDefinition(conf *config.Config) (*JobDefinition, error) { + inpFileExt := "" + if conf.LPPExecution.CanRunFullLarge { + inpFileExt = fmt.Sprintf(`\.%v`, config.LargeSuffix) + } + inputFilePattern := fmt.Sprintf(exec_LPP_InputPattern, inpFileExt, config.FailSuffix) + return createJobDefinition(job_Exec_LPP, priority_Exec_LPP, + conf.LPPExecution.RequestsRootDir, inputFilePattern, exec_LPP_Tmpl, exec_LPP_File) +} diff --git a/prover/cmd/controller/controller/job_definition_limitless_test.go b/prover/cmd/controller/controller/job_definition_limitless_test.go index d081fb8c3..12b8d4a99 100644 --- a/prover/cmd/controller/controller/job_definition_limitless_test.go +++ b/prover/cmd/controller/controller/job_definition_limitless_test.go @@ -117,7 +117,7 @@ func TestBootstrapSubModInFileRegexp(t *testing.T) { for _, c := range testcase { conf := config.Config{} conf.Version = "0.1.2" - conf.Bootstrap_Submodule.CanRunFullLarge = c.Ext == "large" + conf.Bootstrap.CanRunFullLarge = c.Ext == "large" def, err := BootstrapGLSubModDefinition(&conf) assert.NoError(t, err) @@ -238,7 +238,7 @@ func TestBootstrapMetaDataInFileRegexp(t *testing.T) { for _, c := range testcase { conf := config.Config{} conf.Version = "0.1.2" - conf.Bootstrap_Metadata.CanRunFullLarge = c.Ext == "large" + conf.Bootstrap.CanRunFullLarge = c.Ext == "large" def, err := BootstrapDistMetadataDefinition(&conf) assert.NoError(t, err) @@ -248,3 +248,593 @@ func TestBootstrapMetaDataInFileRegexp(t *testing.T) { }) } } + +func TestGLRndBeaconInFileRegexp(t *testing.T) { + + var ( + correctM = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json" + correctL = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large" + correctWithFailM = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.failure.code_77" + correctWithFailL = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.failure.code_77" + correctWith2FailsM = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.failure.code_77.failure.code_77" + correctWith2FailsL = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.failure.code_77.failure.code_77" + missingEtv = "102-103-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json" + missingStv = "102-103-etv0.2.3-getZkProof_Bootstrap_GLSubmodule.json" + notAPoint = "102-103-etv0.2.3-getZkProof_Bootstrap_SubmoduleAjson" + badName = "102-103-etv0.2.3-stv1.2.3-getAggregatedProof.json" + ) + + // The responses in case of success + var ( + respM = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json" + respL = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json" + // #nosec G101 -- Not a credential + respWithFailM = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json" + // #nosec G101 -- Not a credential + respWithFailL = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json" + // #nosec G101 -- Not a credential + respWith2FailsM = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json" + // #nosec G101 -- Not a credential + respWith2FailsL = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json" + respWoEtv = "responses/102-103-etv-stv1.2.3-getZkProof_GL_RndBeacon.json" + respWoStv = "responses/102-103-etv0.2.3-stv-getZkProof_GL_RndBeacon.json" + ) + + // The rename in case it is deferred to the large prover + var ( + toLargeM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.failure.code_137" + toLargeWithFailM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.failure.code_137" + toLargeWith2FailsM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.failure.code_137" + toLargeWoEtv = "requests/102-103-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.failure.code_137" + toLargeWoStv = "requests/102-103-etv0.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.failure.code_137" + ) + + // The rename in case it is a success + var ( + successM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.success" + successMWoStv = "requests-done/102-103-etv0.2.3-getZkProof_Bootstrap_GLSubmodule.json.success" + successtWoEtv = "requests-done/102-103-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.success" + successL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.success" + successWithFailM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.success" + successWithFailL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.success" + successWith2FailsM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.success" + successWith2FailsL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.success" + ) + + // The rename in case it is a panic (code = 2) + var ( + failM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.failure.code_2" + failMWoStv = "requests-done/102-103-etv0.2.3-getZkProof_Bootstrap_GLSubmodule.json.failure.code_2" + failtWoEtv = "requests-done/102-103-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.failure.code_2" + failL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.failure.code_2" + failWithFailM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.failure.code_2" + failWithFailL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.failure.code_2" + failWith2FailsM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.failure.code_2" + failWith2FailsL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.failure.code_2" + ) + + testCases := []inpFileNamesCases{ + { + Ext: "", Fail: "code", ShouldMatch: true, + Fnames: []string{correctM, correctWithFailM, correctWith2FailsM, missingEtv, missingStv}, + Explainer: "happy path, case M", + ExpectedOutput: []string{respM, respWithFailM, respWith2FailsM, respWoEtv, respWoStv}, + ExpToLarge: []string{toLargeM, toLargeWithFailM, toLargeWith2FailsM, toLargeWoEtv, toLargeWoStv}, + ExpSuccess: []string{successM, successWithFailM, successWith2FailsM, successtWoEtv, successMWoStv}, + ExpFailW2: []string{failM, failWithFailM, failWith2FailsM, failtWoEtv, failMWoStv}, + }, + { + Ext: "large", Fail: "code", ShouldMatch: true, + Fnames: []string{correctL, correctWithFailL, correctWith2FailsL}, + Explainer: "happy path, case L", + ExpectedOutput: []string{respL, respWithFailL, respWith2FailsL}, + ExpSuccess: []string{successL, successWithFailL, successWith2FailsL}, + ExpFailW2: []string{failL, failWithFailL, failWith2FailsL}, + }, + { + Ext: "", Fail: "code", ShouldMatch: false, + Fnames: []string{correctL, correctWithFailL, correctWith2FailsL}, + Explainer: "M does not pick the files reserved for L", + }, + { + Ext: "large", Fail: "code", ShouldMatch: false, + Fnames: []string{correctM, correctWithFailM, correctWith2FailsM}, + Explainer: "L does not pick the files reserved for M", + }, + { + Ext: "", Fail: "code", ShouldMatch: false, + Fnames: []string{notAPoint, badName}, + Explainer: "M does not pick obviously invalid files", + }, + { + Ext: "large", Fail: "code", ShouldMatch: false, + Fnames: []string{missingEtv, missingStv, notAPoint, badName}, + Explainer: "L does not pick obviously invalid files", + }, + } + + for _, c := range testCases { + conf := config.Config{} + conf.Version = "0.1.2" + conf.GLExecution.CanRunFullLarge = c.Ext == "large" + + def, err := GLRndBeaconDefinition(&conf) + assert.NoError(t, err) + + t.Run(c.Explainer, func(t *testing.T) { + runInpFileTestCase(t, def, c) + }) + } +} + +func TestGLInFileRegexp(t *testing.T) { + + var ( + correctM = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json" + correctL = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large" + correctWithFailM = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.failure.code_77" + correctWithFailL = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.failure.code_77" + correctWith2FailsM = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.failure.code_77.failure.code_77" + correctWith2FailsL = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.failure.code_77.failure.code_77" + missingEtv = "102-103-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json" + missingStv = "102-103-etv0.2.3-getZkProof_Bootstrap_GLSubmodule.json" + notAPoint = "102-103-etv0.2.3-getZkProof_Bootstrap_SubmoduleAjson" + badName = "102-103-etv0.2.3-stv1.2.3-getAggregatedProof.json" + ) + + // The responses in case of success + var ( + respM = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json" + respL = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json" + // #nosec G101 -- Not a credential + respWithFailM = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json" + // #nosec G101 -- Not a credential + respWithFailL = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json" + // #nosec G101 -- Not a credential + respWith2FailsM = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json" + // #nosec G101 -- Not a credential + respWith2FailsL = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json" + respWoEtv = "responses/102-103-etv-stv1.2.3-getZkProof_GL.json" + respWoStv = "responses/102-103-etv0.2.3-stv-getZkProof_GL.json" + ) + + // The rename in case it is deferred to the large prover + var ( + toLargeM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.failure.code_137" + toLargeWithFailM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.failure.code_137" + toLargeWith2FailsM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.failure.code_137" + toLargeWoEtv = "requests/102-103-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.failure.code_137" + toLargeWoStv = "requests/102-103-etv0.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.failure.code_137" + ) + + // The rename in case it is a success + var ( + successM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.success" + successMWoStv = "requests-done/102-103-etv0.2.3-getZkProof_Bootstrap_GLSubmodule.json.success" + successtWoEtv = "requests-done/102-103-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.success" + successL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.success" + successWithFailM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.success" + successWithFailL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.success" + successWith2FailsM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.success" + successWith2FailsL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.success" + ) + + // The rename in case it is a panic (code = 2) + var ( + failM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.failure.code_2" + failMWoStv = "requests-done/102-103-etv0.2.3-getZkProof_Bootstrap_GLSubmodule.json.failure.code_2" + failtWoEtv = "requests-done/102-103-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.failure.code_2" + failL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.failure.code_2" + failWithFailM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.failure.code_2" + failWithFailL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.failure.code_2" + failWith2FailsM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.failure.code_2" + failWith2FailsL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.failure.code_2" + ) + + testCases := []inpFileNamesCases{ + { + Ext: "", Fail: "code", ShouldMatch: true, + Fnames: []string{correctM, correctWithFailM, correctWith2FailsM, missingEtv, missingStv}, + Explainer: "happy path, case M", + ExpectedOutput: []string{respM, respWithFailM, respWith2FailsM, respWoEtv, respWoStv}, + ExpToLarge: []string{toLargeM, toLargeWithFailM, toLargeWith2FailsM, toLargeWoEtv, toLargeWoStv}, + ExpSuccess: []string{successM, successWithFailM, successWith2FailsM, successtWoEtv, successMWoStv}, + ExpFailW2: []string{failM, failWithFailM, failWith2FailsM, failtWoEtv, failMWoStv}, + }, + { + Ext: "large", Fail: "code", ShouldMatch: true, + Fnames: []string{correctL, correctWithFailL, correctWith2FailsL}, + Explainer: "happy path, case L", + ExpectedOutput: []string{respL, respWithFailL, respWith2FailsL}, + ExpSuccess: []string{successL, successWithFailL, successWith2FailsL}, + ExpFailW2: []string{failL, failWithFailL, failWith2FailsL}, + }, + { + Ext: "", Fail: "code", ShouldMatch: false, + Fnames: []string{correctL, correctWithFailL, correctWith2FailsL}, + Explainer: "M does not pick the files reserved for L", + }, + { + Ext: "large", Fail: "code", ShouldMatch: false, + Fnames: []string{correctM, correctWithFailM, correctWith2FailsM}, + Explainer: "L does not pick the files reserved for M", + }, + { + Ext: "", Fail: "code", ShouldMatch: false, + Fnames: []string{notAPoint, badName}, + Explainer: "M does not pick obviously invalid files", + }, + { + Ext: "large", Fail: "code", ShouldMatch: false, + Fnames: []string{missingEtv, missingStv, notAPoint, badName}, + Explainer: "L does not pick obviously invalid files", + }, + } + + for _, c := range testCases { + conf := config.Config{} + conf.Version = "0.1.2" + conf.GLExecution.CanRunFullLarge = c.Ext == "large" + + def, err := GLDefinition(&conf) + assert.NoError(t, err) + + t.Run(c.Explainer, func(t *testing.T) { + runInpFileTestCase(t, def, c) + }) + } +} + +func TestBootstrapRndBeaconInFileRegexp(t *testing.T) { + + var ( + correctM = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json" + correctL = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large" + correctWithFailM = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.failure.code_77" + correctWithFailL = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_77" + correctWith2FailsM = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.failure.code_77.failure.code_77" + correctWith2FailsL = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_77.failure.code_77" + missingEtv = "102-103-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json" + missingStv = "102-103-etv0.2.3-getZkProof_Bootstrap_DistMetadata.json" + notAPoint = "102-103-etv0.2.3-getZkProof_Bootstrap_DistMetadataAjson" + badName = "102-103-etv0.2.3-stv1.2.3-getAggregatedProof.json" + ) + + // The responses in case of success + var ( + respM = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json" + respL = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json" + // #nosec G101 -- Not a credential + respWithFailM = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json" + // #nosec G101 -- Not a credential + respWithFailL = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json" + // #nosec G101 -- Not a credential + respWith2FailsM = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json" + // #nosec G101 -- Not a credential + respWith2FailsL = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json" + respWoEtv = "responses/102-103-etv-stv1.2.3-getZkProof_RndBeacon.json" + respWoStv = "responses/102-103-etv0.2.3-stv-getZkProof_RndBeacon.json" + ) + + // The rename in case it is deferred to the large prover + var ( + toLargeM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_137" + toLargeWithFailM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_137" + toLargeWith2FailsM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_137" + toLargeWoEtv = "requests/102-103-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_137" + toLargeWoStv = "requests/102-103-etv0.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_137" + ) + + // The rename in case it is a success + var ( + successM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.success" + successMWoStv = "requests-done/102-103-etv0.2.3-getZkProof_Bootstrap_DistMetadata.json.success" + successtWoEtv = "requests-done/102-103-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.success" + successL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.success" + successWithFailM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.success" + successWithFailL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.success" + successWith2FailsM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.success" + successWith2FailsL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.success" + ) + + // The rename in case it is a panic (code = 2) + var ( + failM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.failure.code_2" + failMWoStv = "requests-done/102-103-etv0.2.3-getZkProof_Bootstrap_DistMetadata.json.failure.code_2" + failtWoEtv = "requests-done/102-103-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.failure.code_2" + failL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_2" + failWithFailM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.failure.code_2" + failWithFailL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_2" + failWith2FailsM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.failure.code_2" + failWith2FailsL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_2" + ) + + testCases := []inpFileNamesCases{ + { + Ext: "", Fail: "code", ShouldMatch: true, + Fnames: []string{correctM, correctWithFailM, correctWith2FailsM, missingEtv, missingStv}, + Explainer: "happy path, case M", + ExpectedOutput: []string{respM, respWithFailM, respWith2FailsM, respWoEtv, respWoStv}, + ExpToLarge: []string{toLargeM, toLargeWithFailM, toLargeWith2FailsM, toLargeWoEtv, toLargeWoStv}, + ExpSuccess: []string{successM, successWithFailM, successWith2FailsM, successtWoEtv, successMWoStv}, + ExpFailW2: []string{failM, failWithFailM, failWith2FailsM, failtWoEtv, failMWoStv}, + }, + { + Ext: "large", Fail: "code", ShouldMatch: true, + Fnames: []string{correctL, correctWithFailL, correctWith2FailsL}, + Explainer: "happy path, case L", + ExpectedOutput: []string{respL, respWithFailL, respWith2FailsL}, + ExpSuccess: []string{successL, successWithFailL, successWith2FailsL}, + ExpFailW2: []string{failL, failWithFailL, failWith2FailsL}, + }, + { + Ext: "", Fail: "code", ShouldMatch: false, + Fnames: []string{correctL, correctWithFailL, correctWith2FailsL}, + Explainer: "M does not pick the files reserved for L", + }, + { + Ext: "large", Fail: "code", ShouldMatch: false, + Fnames: []string{correctM, correctWithFailM, correctWith2FailsM}, + Explainer: "L does not pick the files reserved for M", + }, + { + Ext: "", Fail: "code", ShouldMatch: false, + Fnames: []string{notAPoint, badName}, + Explainer: "M does not pick obviously invalid files", + }, + { + Ext: "large", Fail: "code", ShouldMatch: false, + Fnames: []string{missingEtv, missingStv, notAPoint, badName}, + Explainer: "L does not pick obviously invalid files", + }, + } + + for _, c := range testCases { + conf := config.Config{} + conf.Version = "0.1.2" + conf.RndBeacon.CanRunFullLarge = c.Ext == "large" + + def, err := BootstrapRndBeaconDefinition(&conf) + assert.NoError(t, err) + + t.Run(c.Explainer, func(t *testing.T) { + runInpFileTestCase(t, def, c) + }) + } +} + +func TestRndBeaconLPPInFileRegexp(t *testing.T) { + + var ( + correctM = "102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json" + correctL = "102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.large" + correctWithFailM = "102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.failure.code_77" + correctWithFailL = "102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.large.failure.code_77" + correctWith2FailsM = "102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.failure.code_77.failure.code_77" + correctWith2FailsL = "102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.large.failure.code_77.failure.code_77" + missingEtv = "102-103-stv1.2.3-getZkProof_GL_RndBeacon.json" + missingStv = "102-103-etv0.2.3-getZkProof_GL_RndBeacon.json" + notAPoint = "102-103-etv0.2.3-getZkProof_GL_RndBeaconAjson" + badName = "102-103-etv0.2.3-stv1.2.3-getAggregatedProof.json" + ) + + // The responses in case of success + var ( + respM = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json" + respL = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json" + // #nosec G101 -- Not a credential + respWithFailM = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json" + // #nosec G101 -- Not a credential + respWithFailL = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json" + // #nosec G101 -- Not a credential + respWith2FailsM = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json" + // #nosec G101 -- Not a credential + respWith2FailsL = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json" + respWoEtv = "responses/102-103-etv-stv1.2.3-getZkProof_RndBeacon.json" + respWoStv = "responses/102-103-etv0.2.3-stv-getZkProof_RndBeacon.json" + ) + + // The rename in case it is deferred to the large prover + var ( + toLargeM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.large.failure.code_137" + toLargeWithFailM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.large.failure.code_137" + toLargeWith2FailsM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.large.failure.code_137" + toLargeWoEtv = "requests/102-103-stv1.2.3-getZkProof_GL_RndBeacon.json.large.failure.code_137" + toLargeWoStv = "requests/102-103-etv0.2.3-getZkProof_GL_RndBeacon.json.large.failure.code_137" + ) + + // The rename in case it is a success + var ( + successM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.success" + successMWoStv = "requests-done/102-103-etv0.2.3-getZkProof_GL_RndBeacon.json.success" + successtWoEtv = "requests-done/102-103-stv1.2.3-getZkProof_GL_RndBeacon.json.success" + successL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.large.success" + successWithFailM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.success" + successWithFailL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.large.success" + successWith2FailsM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.success" + successWith2FailsL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.large.success" + ) + + // The rename in case it is a panic (code = 2) + var ( + failM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.failure.code_2" + failMWoStv = "requests-done/102-103-etv0.2.3-getZkProof_GL_RndBeacon.json.failure.code_2" + failtWoEtv = "requests-done/102-103-stv1.2.3-getZkProof_GL_RndBeacon.json.failure.code_2" + failL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.large.failure.code_2" + failWithFailM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.failure.code_2" + failWithFailL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.large.failure.code_2" + failWith2FailsM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.failure.code_2" + failWith2FailsL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.large.failure.code_2" + ) + + testCases := []inpFileNamesCases{ + { + Ext: "", Fail: "code", ShouldMatch: true, + Fnames: []string{correctM, correctWithFailM, correctWith2FailsM, missingEtv, missingStv}, + Explainer: "happy path, case M", + ExpectedOutput: []string{respM, respWithFailM, respWith2FailsM, respWoEtv, respWoStv}, + ExpToLarge: []string{toLargeM, toLargeWithFailM, toLargeWith2FailsM, toLargeWoEtv, toLargeWoStv}, + ExpSuccess: []string{successM, successWithFailM, successWith2FailsM, successtWoEtv, successMWoStv}, + ExpFailW2: []string{failM, failWithFailM, failWith2FailsM, failtWoEtv, failMWoStv}, + }, + { + Ext: "large", Fail: "code", ShouldMatch: true, + Fnames: []string{correctL, correctWithFailL, correctWith2FailsL}, + Explainer: "happy path, case L", + ExpectedOutput: []string{respL, respWithFailL, respWith2FailsL}, + ExpSuccess: []string{successL, successWithFailL, successWith2FailsL}, + ExpFailW2: []string{failL, failWithFailL, failWith2FailsL}, + }, + { + Ext: "", Fail: "code", ShouldMatch: false, + Fnames: []string{correctL, correctWithFailL, correctWith2FailsL}, + Explainer: "M does not pick the files reserved for L", + }, + { + Ext: "large", Fail: "code", ShouldMatch: false, + Fnames: []string{correctM, correctWithFailM, correctWith2FailsM}, + Explainer: "L does not pick the files reserved for M", + }, + { + Ext: "", Fail: "code", ShouldMatch: false, + Fnames: []string{notAPoint, badName}, + Explainer: "M does not pick obviously invalid files", + }, + { + Ext: "large", Fail: "code", ShouldMatch: false, + Fnames: []string{missingEtv, missingStv, notAPoint, badName}, + Explainer: "L does not pick obviously invalid files", + }, + } + + for _, c := range testCases { + conf := config.Config{} + conf.Version = "0.1.2" + conf.RndBeacon.CanRunFullLarge = c.Ext == "large" + + def, err := RndBeaconLPPDefinition(&conf) + assert.NoError(t, err) + + t.Run(c.Explainer, func(t *testing.T) { + runInpFileTestCase(t, def, c) + }) + } +} + +func TestLPPInFileRegexp(t *testing.T) { + + var ( + correctM = "102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json" + correctL = "102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json.large" + correctWithFailM = "102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json.failure.code_77" + correctWithFailL = "102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json.large.failure.code_77" + correctWith2FailsM = "102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json.failure.code_77.failure.code_77" + correctWith2FailsL = "102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json.large.failure.code_77.failure.code_77" + missingEtv = "102-103-stv1.2.3-getZkProof_RndBeacon.json" + missingStv = "102-103-etv0.2.3-getZkProof_RndBeacon.json" + notAPoint = "102-103-etv0.2.3-getZkProof_RndBeaconAjson" + badName = "102-103-etv0.2.3-stv1.2.3-getAggregatedProof.json" + ) + + // The responses in case of success + var ( + respM = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json" + respL = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json" + // #nosec G101 -- Not a credential + respWithFailM = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json" + // #nosec G101 -- Not a credential + respWithFailL = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json" + // #nosec G101 -- Not a credential + respWith2FailsM = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json" + // #nosec G101 -- Not a credential + respWith2FailsL = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json" + respWoEtv = "responses/102-103-etv-stv1.2.3-getZkProof_LPP.json" + respWoStv = "responses/102-103-etv0.2.3-stv-getZkProof_LPP.json" + ) + + // The rename in case it is deferred to the large prover + var ( + toLargeM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json.large.failure.code_137" + toLargeWithFailM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json.large.failure.code_137" + toLargeWith2FailsM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json.large.failure.code_137" + toLargeWoEtv = "requests/102-103-stv1.2.3-getZkProof_RndBeacon.json.large.failure.code_137" + toLargeWoStv = "requests/102-103-etv0.2.3-getZkProof_RndBeacon.json.large.failure.code_137" + ) + + // The rename in case it is a success + var ( + successM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json.success" + successMWoStv = "requests-done/102-103-etv0.2.3-getZkProof_RndBeacon.json.success" + successtWoEtv = "requests-done/102-103-stv1.2.3-getZkProof_RndBeacon.json.success" + successL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json.large.success" + successWithFailM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json.success" + successWithFailL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json.large.success" + successWith2FailsM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json.success" + successWith2FailsL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json.large.success" + ) + + // The rename in case it is a panic (code = 2) + var ( + failM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json.failure.code_2" + failMWoStv = "requests-done/102-103-etv0.2.3-getZkProof_RndBeacon.json.failure.code_2" + failtWoEtv = "requests-done/102-103-stv1.2.3-getZkProof_RndBeacon.json.failure.code_2" + failL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json.large.failure.code_2" + failWithFailM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json.failure.code_2" + failWithFailL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json.large.failure.code_2" + failWith2FailsM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json.failure.code_2" + failWith2FailsL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json.large.failure.code_2" + ) + + testCases := []inpFileNamesCases{ + { + Ext: "", Fail: "code", ShouldMatch: true, + Fnames: []string{correctM, correctWithFailM, correctWith2FailsM, missingEtv, missingStv}, + Explainer: "happy path, case M", + ExpectedOutput: []string{respM, respWithFailM, respWith2FailsM, respWoEtv, respWoStv}, + ExpToLarge: []string{toLargeM, toLargeWithFailM, toLargeWith2FailsM, toLargeWoEtv, toLargeWoStv}, + ExpSuccess: []string{successM, successWithFailM, successWith2FailsM, successtWoEtv, successMWoStv}, + ExpFailW2: []string{failM, failWithFailM, failWith2FailsM, failtWoEtv, failMWoStv}, + }, + { + Ext: "large", Fail: "code", ShouldMatch: true, + Fnames: []string{correctL, correctWithFailL, correctWith2FailsL}, + Explainer: "happy path, case L", + ExpectedOutput: []string{respL, respWithFailL, respWith2FailsL}, + ExpSuccess: []string{successL, successWithFailL, successWith2FailsL}, + ExpFailW2: []string{failL, failWithFailL, failWith2FailsL}, + }, + { + Ext: "", Fail: "code", ShouldMatch: false, + Fnames: []string{correctL, correctWithFailL, correctWith2FailsL}, + Explainer: "M does not pick the files reserved for L", + }, + { + Ext: "large", Fail: "code", ShouldMatch: false, + Fnames: []string{correctM, correctWithFailM, correctWith2FailsM}, + Explainer: "L does not pick the files reserved for M", + }, + { + Ext: "", Fail: "code", ShouldMatch: false, + Fnames: []string{notAPoint, badName}, + Explainer: "M does not pick obviously invalid files", + }, + { + Ext: "large", Fail: "code", ShouldMatch: false, + Fnames: []string{missingEtv, missingStv, notAPoint, badName}, + Explainer: "L does not pick obviously invalid files", + }, + } + + for _, c := range testCases { + conf := config.Config{} + conf.Version = "0.1.2" + conf.LPPExecution.CanRunFullLarge = c.Ext == "large" + + def, err := LPPDefinition(&conf) + assert.NoError(t, err) + + t.Run(c.Explainer, func(t *testing.T) { + runInpFileTestCase(t, def, c) + }) + } +} diff --git a/prover/config/config.go b/prover/config/config.go index c51c1ba4f..7681d8401 100644 --- a/prover/config/config.go +++ b/prover/config/config.go @@ -112,12 +112,11 @@ type Config struct { PublicInputInterconnection PublicInput `mapstructure:"public_input_interconnection"` // TODO add wizard compilation params // LIMITLESS PROVER Components - Bootstrap_Submodule Execution `mapstructure:"execution_bootstrap_submodule"` - Bootstrap_Metadata Execution `mapstructure:"execution_bootstrap_submodule"` + Bootstrap Execution `mapstructure:"execution_bootstrap"` GLExecution Execution `mapstructure:"execution_gl"` - RandomBeacon Execution `mapstructure:"execution_rndbeacon"` + RndBeacon RndBeacon `mapstructure:"execution_rndbeacon"` LPPExecution Execution `mapstructure:"execution_lpp"` @@ -151,6 +150,21 @@ type Config struct { TracesLimitsLarge TracesLimits `mapstructure:"traces_limits_large" validate:"required"` } +type RndBeacon struct { + GL WithRequestDir `mapstructure:",squash"` + + MetaData WithRequestDir `mapstructure:",squash"` + + // ProverMode stores the kind of prover to use. + ProverMode ProverMode `mapstructure:"prover_mode" validate:"required,oneof=dev partial full proofless bench check-only encode-only"` + + // CanRunFullLarge indicates whether the prover is running on a large machine (and can run full large traces). + CanRunFullLarge bool `mapstructure:"can_run_full_large"` + + // ConflatedTracesDir stores the directory where the conflation traces are stored. + ConflatedTracesDir string `mapstructure:"conflated_traces_dir" validate:"required"` +} + func (cfg *Config) Logger() *logrus.Logger { // TODO @gbotrel revisit. return logrus.StandardLogger() From 8c30fcb5a35d819d0e55c64af00dad68b2a09f28 Mon Sep 17 00:00:00 2001 From: srinathln7 Date: Fri, 31 Jan 2025 15:28:55 +0000 Subject: [PATCH 11/48] add all limitless job def. with unit tests (success) --- .../controller/job_definition_limitless.go | 44 ++- .../job_definition_limitless_test.go | 348 ++++++++++++++++++ prover/config/config.go | 19 +- 3 files changed, 403 insertions(+), 8 deletions(-) diff --git a/prover/cmd/controller/controller/job_definition_limitless.go b/prover/cmd/controller/controller/job_definition_limitless.go index 8cadf02be..42b149960 100644 --- a/prover/cmd/controller/controller/job_definition_limitless.go +++ b/prover/cmd/controller/controller/job_definition_limitless.go @@ -26,9 +26,9 @@ const ( job_Exec_LPP = "exec-LPP" // Conglomerator - job_Exec_Congolomerate_LPP = "exec-congolo-LPP" - job_Exec_Congolomerate_GL = "exec-congolo-GL" - job_Exec_Congolomerate_Metadata = "exec-congolo-metadata" + job_Exec_Congolomerate_LPP = "exec-congolo-LPP" + job_Exec_Congolomerate_GL = "exec-congolo-GL" + job_Exec_Congolomerate_Bootstrap_Metadata = "exec-congolo-metadata" ) // Priorities @@ -65,9 +65,9 @@ const ( exec_LPP_InputPattern = `^[0-9]+-[0-9]+(-etv[0-9\.]+)?(-stv[0-9\.]+)?-getZkProof_RndBeacon\.json%v(\.failure\.%v_[0-9]+)*$` // Conglomerator Input - exec_Conglomerate_GL_InputPattern = `^[0-9]+-[0-9]+(-etv[0-9\.]+)?(-stv[0-9\.]+)?-getZkProof_GL\.json%v(\.failure\.%v_[0-9]+)*$` - exec_Conglomerate_LPP_InputPattern = `^[0-9]+-[0-9]+(-etv[0-9\.]+)?(-stv[0-9\.]+)?-getZkProof_LPP\.json%v(\.failure\.%v_[0-9]+)*$` - exec_Conglomerate_DistMetadata_InputPattern = `^[0-9]+-[0-9]+(-etv[0-9\.]+)?(-stv[0-9\.]+)?-getZkProof_Bootstrap_DistMetadata\.json%v(\.failure\.%v_[0-9]+)*$` + exec_Conglomerate_GL_InputPattern = `^[0-9]+-[0-9]+(-etv[0-9\.]+)?(-stv[0-9\.]+)?-getZkProof_GL\.json%v(\.failure\.%v_[0-9]+)*$` + exec_Conglomerate_LPP_InputPattern = `^[0-9]+-[0-9]+(-etv[0-9\.]+)?(-stv[0-9\.]+)?-getZkProof_LPP\.json%v(\.failure\.%v_[0-9]+)*$` + exec_Conglomerate_Bootstrap_DistMetadata_InputPattern = `^[0-9]+-[0-9]+(-etv[0-9\.]+)?(-stv[0-9\.]+)?-getZkProof_Bootstrap_DistMetadata\.json%v(\.failure\.%v_[0-9]+)*$` ) // Ouput File patterns and templates @@ -94,7 +94,7 @@ const ( exec_LPP_Tmpl = "exec-LPP-output-file" // Conglomerator - exec_Congolomerate_File = "{{.Start}}-{{.End}}-.getZkProof.json" + exec_Congolomerate_File = "{{.Start}}-{{.End}}-getZkProof.json" exec_Congolomerate_Tmpl = "exec-output-file" ) @@ -208,3 +208,33 @@ func LPPDefinition(conf *config.Config) (*JobDefinition, error) { return createJobDefinition(job_Exec_LPP, priority_Exec_LPP, conf.LPPExecution.RequestsRootDir, inputFilePattern, exec_LPP_Tmpl, exec_LPP_File) } + +func ConglomerateDistMetadataDefinition(conf *config.Config) (*JobDefinition, error) { + inpFileExt := "" + if conf.Conglomeration.CanRunFullLarge { + inpFileExt = fmt.Sprintf(`\.%v`, config.LargeSuffix) + } + inputFilePattern := fmt.Sprintf(exec_Conglomerate_Bootstrap_DistMetadata_InputPattern, inpFileExt, config.FailSuffix) + return createJobDefinition(job_Exec_Congolomerate_Bootstrap_Metadata, priority_Exec_Congolomerate_Metadata, + conf.Conglomeration.BootstrapMetadata.RequestsRootDir, inputFilePattern, exec_Congolomerate_Tmpl, exec_Congolomerate_File) +} + +func ConglomerateGLDefinition(conf *config.Config) (*JobDefinition, error) { + inpFileExt := "" + if conf.Conglomeration.CanRunFullLarge { + inpFileExt = fmt.Sprintf(`\.%v`, config.LargeSuffix) + } + inputFilePattern := fmt.Sprintf(exec_Conglomerate_GL_InputPattern, inpFileExt, config.FailSuffix) + return createJobDefinition(job_Exec_Congolomerate_GL, priority_Exec_Congolomerate_GL, + conf.Conglomeration.GL.RequestsRootDir, inputFilePattern, exec_Congolomerate_Tmpl, exec_Congolomerate_File) +} + +func ConglomerateLPPDefinition(conf *config.Config) (*JobDefinition, error) { + inpFileExt := "" + if conf.Conglomeration.CanRunFullLarge { + inpFileExt = fmt.Sprintf(`\.%v`, config.LargeSuffix) + } + inputFilePattern := fmt.Sprintf(exec_Conglomerate_LPP_InputPattern, inpFileExt, config.FailSuffix) + return createJobDefinition(job_Exec_Congolomerate_LPP, priority_Exec_Congolomerate_LPP, + conf.Conglomeration.LPP.RequestsRootDir, inputFilePattern, exec_Congolomerate_Tmpl, exec_Congolomerate_File) +} diff --git a/prover/cmd/controller/controller/job_definition_limitless_test.go b/prover/cmd/controller/controller/job_definition_limitless_test.go index 12b8d4a99..f8a972e6a 100644 --- a/prover/cmd/controller/controller/job_definition_limitless_test.go +++ b/prover/cmd/controller/controller/job_definition_limitless_test.go @@ -838,3 +838,351 @@ func TestLPPInFileRegexp(t *testing.T) { }) } } + +func TestConglomerateGLInFileRegexp(t *testing.T) { + + var ( + correctM = "102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json" + correctL = "102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json.large" + correctWithFailM = "102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json.failure.code_77" + correctWithFailL = "102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json.large.failure.code_77" + correctWith2FailsM = "102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json.failure.code_77.failure.code_77" + correctWith2FailsL = "102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json.large.failure.code_77.failure.code_77" + missingEtv = "102-103-stv1.2.3-getZkProof_GL.json" + missingStv = "102-103-etv0.2.3-getZkProof_GL.json" + notAPoint = "102-103-etv0.2.3-getZkProof_GLAjson" + badName = "102-103-etv0.2.3-stv1.2.3-getAggregatedProof.json" + ) + + // The responses in case of success + var ( + respM = "responses/102-103-getZkProof.json" + respL = "responses/102-103-getZkProof.json" + // #nosec G101 -- Not a credential + respWithFailM = "responses/102-103-getZkProof.json" + // #nosec G101 -- Not a credential + respWithFailL = "responses/102-103-getZkProof.json" + // #nosec G101 -- Not a credential + respWith2FailsM = "responses/102-103-getZkProof.json" + // #nosec G101 -- Not a credential + respWith2FailsL = "responses/102-103-getZkProof.json" + ) + + // The rename in case it is deferred to the large prover + var ( + toLargeM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json.large.failure.code_137" + toLargeWithFailM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json.large.failure.code_137" + toLargeWith2FailsM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json.large.failure.code_137" + toLargeWoEtv = "requests/102-103-stv1.2.3-getZkProof_GL.json.large.failure.code_137" + toLargeWoStv = "requests/102-103-etv0.2.3-getZkProof_GL.json.large.failure.code_137" + ) + + // The rename in case it is a success + var ( + successM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json.success" + successMWoStv = "requests-done/102-103-etv0.2.3-getZkProof_GL.json.success" + successtWoEtv = "requests-done/102-103-stv1.2.3-getZkProof_GL.json.success" + successL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json.large.success" + successWithFailM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json.success" + successWithFailL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json.large.success" + successWith2FailsM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json.success" + successWith2FailsL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json.large.success" + ) + + // The rename in case it is a panic (code = 2) + var ( + failM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json.failure.code_2" + failMWoStv = "requests-done/102-103-etv0.2.3-getZkProof_GL.json.failure.code_2" + failtWoEtv = "requests-done/102-103-stv1.2.3-getZkProof_GL.json.failure.code_2" + failL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json.large.failure.code_2" + failWithFailM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json.failure.code_2" + failWithFailL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json.large.failure.code_2" + failWith2FailsM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json.failure.code_2" + failWith2FailsL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json.large.failure.code_2" + ) + + testCases := []inpFileNamesCases{ + { + Ext: "", Fail: "code", ShouldMatch: true, + Fnames: []string{correctM, correctWithFailM, correctWith2FailsM, missingEtv, missingStv}, + Explainer: "happy path, case M", + ExpectedOutput: []string{respM, respWithFailM, respWith2FailsM, respM, respM}, + ExpToLarge: []string{toLargeM, toLargeWithFailM, toLargeWith2FailsM, toLargeWoEtv, toLargeWoStv}, + ExpSuccess: []string{successM, successWithFailM, successWith2FailsM, successtWoEtv, successMWoStv}, + ExpFailW2: []string{failM, failWithFailM, failWith2FailsM, failtWoEtv, failMWoStv}, + }, + { + Ext: "large", Fail: "code", ShouldMatch: true, + Fnames: []string{correctL, correctWithFailL, correctWith2FailsL}, + Explainer: "happy path, case L", + ExpectedOutput: []string{respL, respWithFailL, respWith2FailsL}, + ExpSuccess: []string{successL, successWithFailL, successWith2FailsL}, + ExpFailW2: []string{failL, failWithFailL, failWith2FailsL}, + }, + { + Ext: "", Fail: "code", ShouldMatch: false, + Fnames: []string{correctL, correctWithFailL, correctWith2FailsL}, + Explainer: "M does not pick the files reserved for L", + }, + { + Ext: "large", Fail: "code", ShouldMatch: false, + Fnames: []string{correctM, correctWithFailM, correctWith2FailsM}, + Explainer: "L does not pick the files reserved for M", + }, + { + Ext: "", Fail: "code", ShouldMatch: false, + Fnames: []string{notAPoint, badName}, + Explainer: "M does not pick obviously invalid files", + }, + { + Ext: "large", Fail: "code", ShouldMatch: false, + Fnames: []string{missingEtv, missingStv, notAPoint, badName}, + Explainer: "L does not pick obviously invalid files", + }, + } + + for _, c := range testCases { + conf := config.Config{} + conf.Version = "0.1.2" + conf.Conglomeration.CanRunFullLarge = c.Ext == "large" + + def, err := ConglomerateGLDefinition(&conf) + assert.NoError(t, err) + + t.Run(c.Explainer, func(t *testing.T) { + runInpFileTestCase(t, def, c) + }) + } +} + +func TestConglomerateDistMetadataInFileRegexp(t *testing.T) { + + var ( + correctM = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json" + correctL = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large" + correctWithFailM = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.failure.code_77" + correctWithFailL = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_77" + correctWith2FailsM = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.failure.code_77.failure.code_77" + correctWith2FailsL = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_77.failure.code_77" + missingEtv = "102-103-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json" + missingStv = "102-103-etv0.2.3-getZkProof_Bootstrap_DistMetadata.json" + notAPoint = "102-103-etv0.2.3-getZkProof_Bootstrap_DistMetadataAjson" + badName = "102-103-etv0.2.3-stv1.2.3-getAggregatedProof.json" + ) + + // The responses in case of success + var ( + respM = "responses/102-103-getZkProof.json" + respL = "responses/102-103-getZkProof.json" + // #nosec G101 -- Not a credential + respWithFailM = "responses/102-103-getZkProof.json" + // #nosec G101 -- Not a credential + respWithFailL = "responses/102-103-getZkProof.json" + // #nosec G101 -- Not a credential + respWith2FailsM = "responses/102-103-getZkProof.json" + // #nosec G101 -- Not a credential + respWith2FailsL = "responses/102-103-getZkProof.json" + ) + + // The rename in case it is deferred to the large prover + var ( + toLargeM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_137" + toLargeWithFailM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_137" + toLargeWith2FailsM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_137" + toLargeWoEtv = "requests/102-103-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_137" + toLargeWoStv = "requests/102-103-etv0.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_137" + ) + + // The rename in case it is a success + var ( + successM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.success" + successMWoStv = "requests-done/102-103-etv0.2.3-getZkProof_Bootstrap_DistMetadata.json.success" + successtWoEtv = "requests-done/102-103-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.success" + successL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.success" + successWithFailM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.success" + successWithFailL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.success" + successWith2FailsM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.success" + successWith2FailsL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.success" + ) + + // The rename in case it is a panic (code = 2) + var ( + failM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.failure.code_2" + failMWoStv = "requests-done/102-103-etv0.2.3-getZkProof_Bootstrap_DistMetadata.json.failure.code_2" + failtWoEtv = "requests-done/102-103-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.failure.code_2" + failL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_2" + failWithFailM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.failure.code_2" + failWithFailL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_2" + failWith2FailsM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.failure.code_2" + failWith2FailsL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_2" + ) + + testCases := []inpFileNamesCases{ + { + Ext: "", Fail: "code", ShouldMatch: true, + Fnames: []string{correctM, correctWithFailM, correctWith2FailsM, missingEtv, missingStv}, + Explainer: "happy path, case M", + ExpectedOutput: []string{respM, respWithFailM, respWith2FailsM, respM, respM}, + ExpToLarge: []string{toLargeM, toLargeWithFailM, toLargeWith2FailsM, toLargeWoEtv, toLargeWoStv}, + ExpSuccess: []string{successM, successWithFailM, successWith2FailsM, successtWoEtv, successMWoStv}, + ExpFailW2: []string{failM, failWithFailM, failWith2FailsM, failtWoEtv, failMWoStv}, + }, + { + Ext: "large", Fail: "code", ShouldMatch: true, + Fnames: []string{correctL, correctWithFailL, correctWith2FailsL}, + Explainer: "happy path, case L", + ExpectedOutput: []string{respL, respWithFailL, respWith2FailsL}, + ExpSuccess: []string{successL, successWithFailL, successWith2FailsL}, + ExpFailW2: []string{failL, failWithFailL, failWith2FailsL}, + }, + { + Ext: "", Fail: "code", ShouldMatch: false, + Fnames: []string{correctL, correctWithFailL, correctWith2FailsL}, + Explainer: "M does not pick the files reserved for L", + }, + { + Ext: "large", Fail: "code", ShouldMatch: false, + Fnames: []string{correctM, correctWithFailM, correctWith2FailsM}, + Explainer: "L does not pick the files reserved for M", + }, + { + Ext: "", Fail: "code", ShouldMatch: false, + Fnames: []string{notAPoint, badName}, + Explainer: "M does not pick obviously invalid files", + }, + { + Ext: "large", Fail: "code", ShouldMatch: false, + Fnames: []string{missingEtv, missingStv, notAPoint, badName}, + Explainer: "L does not pick obviously invalid files", + }, + } + + for _, c := range testCases { + conf := config.Config{} + conf.Version = "0.1.2" + conf.Conglomeration.CanRunFullLarge = c.Ext == "large" + + def, err := ConglomerateDistMetadataDefinition(&conf) + assert.NoError(t, err) + + t.Run(c.Explainer, func(t *testing.T) { + runInpFileTestCase(t, def, c) + }) + } +} + +func TestConglomerateLPPInFileRegexp(t *testing.T) { + + var ( + correctM = "102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json" + correctL = "102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json.large" + correctWithFailM = "102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json.failure.code_77" + correctWithFailL = "102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json.large.failure.code_77" + correctWith2FailsM = "102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json.failure.code_77.failure.code_77" + correctWith2FailsL = "102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json.large.failure.code_77.failure.code_77" + missingEtv = "102-103-stv1.2.3-getZkProof_LPP.json" + missingStv = "102-103-etv0.2.3-getZkProof_LPP.json" + notAPoint = "102-103-etv0.2.3-getZkProof_LPPAjson" + badName = "102-103-etv0.2.3-stv1.2.3-getAggregatedProof.json" + ) + + // The responses in case of success + var ( + respM = "responses/102-103-getZkProof.json" + respL = "responses/102-103-getZkProof.json" + // #nosec G101 -- Not a credential + respWithFailM = "responses/102-103-getZkProof.json" + // #nosec G101 -- Not a credential + respWithFailL = "responses/102-103-getZkProof.json" + // #nosec G101 -- Not a credential + respWith2FailsM = "responses/102-103-getZkProof.json" + // #nosec G101 -- Not a credential + respWith2FailsL = "responses/102-103-getZkProof.json" + ) + + // The rename in case it is deferred to the large prover + var ( + toLargeM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json.large.failure.code_137" + toLargeWithFailM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json.large.failure.code_137" + toLargeWith2FailsM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json.large.failure.code_137" + toLargeWoEtv = "requests/102-103-stv1.2.3-getZkProof_LPP.json.large.failure.code_137" + toLargeWoStv = "requests/102-103-etv0.2.3-getZkProof_LPP.json.large.failure.code_137" + ) + + // The rename in case it is a success + var ( + successM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json.success" + successMWoStv = "requests-done/102-103-etv0.2.3-getZkProof_LPP.json.success" + successtWoEtv = "requests-done/102-103-stv1.2.3-getZkProof_LPP.json.success" + successL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json.large.success" + successWithFailM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json.success" + successWithFailL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json.large.success" + successWith2FailsM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json.success" + successWith2FailsL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json.large.success" + ) + + // The rename in case it is a panic (code = 2) + var ( + failM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json.failure.code_2" + failMWoStv = "requests-done/102-103-etv0.2.3-getZkProof_LPP.json.failure.code_2" + failtWoEtv = "requests-done/102-103-stv1.2.3-getZkProof_LPP.json.failure.code_2" + failL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json.large.failure.code_2" + failWithFailM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json.failure.code_2" + failWithFailL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json.large.failure.code_2" + failWith2FailsM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json.failure.code_2" + failWith2FailsL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json.large.failure.code_2" + ) + + testCases := []inpFileNamesCases{ + { + Ext: "", Fail: "code", ShouldMatch: true, + Fnames: []string{correctM, correctWithFailM, correctWith2FailsM, missingEtv, missingStv}, + Explainer: "happy path, case M", + ExpectedOutput: []string{respM, respWithFailM, respWith2FailsM, respM, respM}, + ExpToLarge: []string{toLargeM, toLargeWithFailM, toLargeWith2FailsM, toLargeWoEtv, toLargeWoStv}, + ExpSuccess: []string{successM, successWithFailM, successWith2FailsM, successtWoEtv, successMWoStv}, + ExpFailW2: []string{failM, failWithFailM, failWith2FailsM, failtWoEtv, failMWoStv}, + }, + { + Ext: "large", Fail: "code", ShouldMatch: true, + Fnames: []string{correctL, correctWithFailL, correctWith2FailsL}, + Explainer: "happy path, case L", + ExpectedOutput: []string{respL, respWithFailL, respWith2FailsL}, + ExpSuccess: []string{successL, successWithFailL, successWith2FailsL}, + ExpFailW2: []string{failL, failWithFailL, failWith2FailsL}, + }, + { + Ext: "", Fail: "code", ShouldMatch: false, + Fnames: []string{correctL, correctWithFailL, correctWith2FailsL}, + Explainer: "M does not pick the files reserved for L", + }, + { + Ext: "large", Fail: "code", ShouldMatch: false, + Fnames: []string{correctM, correctWithFailM, correctWith2FailsM}, + Explainer: "L does not pick the files reserved for M", + }, + { + Ext: "", Fail: "code", ShouldMatch: false, + Fnames: []string{notAPoint, badName}, + Explainer: "M does not pick obviously invalid files", + }, + { + Ext: "large", Fail: "code", ShouldMatch: false, + Fnames: []string{missingEtv, missingStv, notAPoint, badName}, + Explainer: "L does not pick obviously invalid files", + }, + } + + for _, c := range testCases { + conf := config.Config{} + conf.Version = "0.1.2" + conf.Conglomeration.CanRunFullLarge = c.Ext == "large" + + def, err := ConglomerateLPPDefinition(&conf) + assert.NoError(t, err) + + t.Run(c.Explainer, func(t *testing.T) { + runInpFileTestCase(t, def, c) + }) + } +} diff --git a/prover/config/config.go b/prover/config/config.go index 7681d8401..67164625b 100644 --- a/prover/config/config.go +++ b/prover/config/config.go @@ -120,7 +120,7 @@ type Config struct { LPPExecution Execution `mapstructure:"execution_lpp"` - Conglomeration Execution `mapstructure:"execution_conglomeration"` + Conglomeration Conglomeration `mapstructure:"execution_conglomeration"` Debug struct { // Profiling indicates whether we want to generate profiles using the [runtime/pprof] pkg. @@ -165,6 +165,23 @@ type RndBeacon struct { ConflatedTracesDir string `mapstructure:"conflated_traces_dir" validate:"required"` } +type Conglomeration struct { + GL WithRequestDir `mapstructure:",squash"` + + LPP WithRequestDir `mapstructure:",squash"` + + BootstrapMetadata WithRequestDir `mapstructure:",squash"` + + // ProverMode stores the kind of prover to use. + ProverMode ProverMode `mapstructure:"prover_mode" validate:"required,oneof=dev partial full proofless bench check-only encode-only"` + + // CanRunFullLarge indicates whether the prover is running on a large machine (and can run full large traces). + CanRunFullLarge bool `mapstructure:"can_run_full_large"` + + // ConflatedTracesDir stores the directory where the conflation traces are stored. + ConflatedTracesDir string `mapstructure:"conflated_traces_dir" validate:"required"` +} + func (cfg *Config) Logger() *logrus.Logger { // TODO @gbotrel revisit. return logrus.StandardLogger() From 7d8b1ec0a946da33881c6c6c3467c60884dd75bb Mon Sep 17 00:00:00 2001 From: srinathln7 Date: Tue, 4 Feb 2025 15:35:06 +0000 Subject: [PATCH 12/48] refactor after draft PR feedback --- .../cmd/controller/controller/fs_watcher.go | 128 +- .../controller/controller/job_definition.go | 339 +-- .../controller/job_definition_limitless.go | 476 ++-- .../job_definition_limitless_test.go | 2372 ++++++++--------- prover/cmd/controller/controller/jobs.go | 142 +- 5 files changed, 1754 insertions(+), 1703 deletions(-) diff --git a/prover/cmd/controller/controller/fs_watcher.go b/prover/cmd/controller/controller/fs_watcher.go index 962f72381..96677dd2a 100644 --- a/prover/cmd/controller/controller/fs_watcher.go +++ b/prover/cmd/controller/controller/fs_watcher.go @@ -15,7 +15,7 @@ import ( "golang.org/x/exp/slices" ) -// FsWatcher is a struct who will watch the filesystem and return files as if +// FsWatcher watches the filesystem and return files as if // it were a message queue. type FsWatcher struct { // Unique ID of the container. Used to identify the owner of a locked file @@ -72,11 +72,13 @@ func (fs *FsWatcher) GetBest() (job *Job) { // of every jobs found so far and they will all be attributed to the // last job definition. jdef := &fs.JobToWatch[i] - if err := fs.appendJobFromDef(jdef, &jobs); err != nil { - fs.Logger.Errorf( - "Got an error trying to fetch job `%v` from dir %v: %v", - jdef.Name, jdef.dirFrom(), err, - ) + for j := range jdef.RequestsRootDir { + if err := fs.appendJobFromDef(jdef, &jobs); err != nil { + fs.Logger.Errorf( + "error trying to fetch job `%v` from dir %v: %v", + jdef.Name, jdef.dirFrom(j), err, + ) + } } } @@ -85,8 +87,8 @@ func (fs *FsWatcher) GetBest() (job *Job) { return nil } - // Sort the jobs by scores in ascending order. Lower scores mean more - // priority. + // Sort the jobs by scores in ascending order. + // Lower scores mean more priority. slices.SortStableFunc(jobs, func(a, b *Job) int { return a.Score() - b.Score() }) @@ -119,32 +121,37 @@ func (f *FsWatcher) lockBest(jobs []*Job) (pos int, success bool) { // is returned if the function fails to read the directory. func (fs *FsWatcher) appendJobFromDef(jdef *JobDefinition, jobs *[]*Job) (err error) { - dirFrom := jdef.dirFrom() - fs.Logger.Tracef("Seeking jobs for %v in %v", jdef.Name, dirFrom) - - // This will fail if the provided directory is not a directory - dirents, err := lsname(dirFrom) - if err != nil { - return fmt.Errorf("cannot ls `%s` : %v", dirFrom, err) - } numMatched := 0 - // Search and append the valid files into the list. - for _, dirent := range dirents { + var dirEntStr []string + for idx := range jdef.RequestsRootDir { + dirFrom := jdef.dirFrom(idx) + fs.Logger.Tracef("Seeking jobs for %v in %v", jdef.Name, dirFrom) - fs.Logger.Tracef("Examining entry %s in %s", dirFrom, dirent.Name()) + // This will fail if the provided directory is not a directory + dirents, err := lsname(dirFrom) + if err != nil { + return fmt.Errorf("cannot ls `%s` : %v", dirFrom, err) + } - // Ignore directories - if !dirent.Type().IsRegular() { - fs.Logger.Debugf("Ignoring directory `%s`", dirent.Name()) - continue + for _, dirent := range dirents { + + fs.Logger.Tracef("Examining entry %s in %s", dirFrom, dirent.Name()) + + // Ignore directories + if !dirent.Type().IsRegular() { + fs.Logger.Debugf("Ignoring directory `%s`", dirent.Name()) + continue + } + + dirEntStr = append(dirEntStr, dirent.Name()) } - // Attempt to construct a job from the filename. If the filename is + // Attempt to construct a job from the filenames. If the filenames is // not parseable to the target JobType, it will return an error. - job, err := NewJob(jdef, dirent.Name()) + job, err := NewJob(jdef, dirEntStr) if err != nil { - fs.Logger.Debugf("Found invalid file `%v` : %v", dirent.Name(), err) + fs.Logger.Debugf("Found invalid file `%v` : %v", err) continue } @@ -153,52 +160,51 @@ func (fs *FsWatcher) appendJobFromDef(jdef *JobDefinition, jobs *[]*Job) (err er *jobs = append(*jobs, job) numMatched++ } - // Pass prometheus metrics - metrics.CollectFS(jdef.Name, len(dirents), numMatched) - + metrics.CollectFS(jdef.Name, len(dirEntStr), numMatched) return nil } -// Trylock attempts to rename a file by adding an IN_PROGRESS suffix. The lock -// operation is atomic only on Unix systems. +// Trylock attempts to rename a file by adding an IN_PROGRESS suffix. +// The lock operation is atomic only on Unix systems. func (fs *FsWatcher) tryLockFile(job *Job) (success bool) { - dirName := job.Def.dirFrom() - lockedFile := strings.Join( - []string{ - job.OriginalFile, - fs.InProgress, - fs.LocalID, - }, ".") - old := path.Join(dirName, job.OriginalFile) - new := path.Join(dirName, lockedFile) - err := os.Rename(old, new) + for idx := range job.OriginalFile { + dirName := job.Def.dirFrom(idx) + lockedFile := strings.Join( + []string{ + job.OriginalFile[idx], + fs.InProgress, + fs.LocalID, + }, ".") + old := path.Join(dirName, job.OriginalFile[idx]) + new := path.Join(dirName, lockedFile) + err := os.Rename(old, new) - if err != nil { - // Detect the case where the old file still exists but the new one - // already exists. - _, errOld := os.Lstat(old) - _, errNew := os.Lstat(new) - - if errNew == nil && errOld == nil { - fs.Logger.Errorf( - "old file `%v` and new files `%v` both exists", - old, new, + if err != nil { + // Detect the case where the old file still exists but the new one + // already exists. + _, errOld := os.Lstat(old) + _, errNew := os.Lstat(new) + + if errNew == nil && errOld == nil { + fs.Logger.Errorf( + "old file `%v` and new files `%v` both exists", + old, new, + ) + } + + fs.Logger.Tracef( + "could not lock file %v because : %v", + old, errOld, ) - } - fs.Logger.Tracef( - "could not lock file %v because : %v", - old, errOld, - ) + return false + } - return false + // Success, write the name of the locked file + job.LockedFile[idx] = lockedFile } - - // Success, write the name of the locked file - job.LockedFile = lockedFile - return true } diff --git a/prover/cmd/controller/controller/job_definition.go b/prover/cmd/controller/controller/job_definition.go index 97027992c..cef5b8740 100644 --- a/prover/cmd/controller/controller/job_definition.go +++ b/prover/cmd/controller/controller/job_definition.go @@ -16,21 +16,27 @@ const ( jobNameAggregation = "aggregation" ) -// JobDefinition represents a collection of static parameters allowing to define -// a job. +// ParamsRegexp represents the associated compiled regexps for a job definition. +type ParamsRegexp struct { + Start *regexp2.Regexp + End *regexp2.Regexp + Stv *regexp2.Regexp + Etv *regexp2.Regexp + Cv *regexp2.Regexp + ContentHash *regexp2.Regexp +} + +// JobDefinition represents a collection of static parameters allowing to define a job. type JobDefinition struct { // Name of the job Name string - // Priority at which this type of job should be processed. The lower the - // more of a priority. - // + // Priority at which this type of job should be processed. The lower the more of a priority. // Typically 0 for execution, 1 for compression and 2 for aggregation. - // Priority int // Parameters for the job definition provided by the user - RequestsRootDir string + RequestsRootDir []string // The regexp to use to match input files. For instance, // @@ -40,191 +46,167 @@ type JobDefinition struct { // but to only accept execution trace. The regexp should always start "^" // and end with "$" otherwise you are going to match in-progress files. // - InputFileRegexp *regexp2.Regexp + InputFileRegexp []*regexp2.Regexp // Template to use to generate the output file. The template should have the // form of a go template. For instance, // // `{{.From}}-{{.To}}-pv{{.Version}}-stv{{.Stv}}-etv{{.Etv}}-zkProof.json` // - OutputFileTmpl *template.Template + OutputFileTmpl []*template.Template // The associated compiled regexp, this saves on recompiling the regexps // everytime we want to use them. If a field is not needed, it can be left // at zero. - ParamsRegexp struct { - Start *regexp2.Regexp - End *regexp2.Regexp - Stv *regexp2.Regexp - Etv *regexp2.Regexp - Cv *regexp2.Regexp - ContentHash *regexp2.Regexp - } + ParamsRegexp []ParamsRegexp // Regexp of the failure code so that we can trim it if we want to retry. FailureSuffix *regexp2.Regexp } -// Definition of an execution prover job. The function panics on any error since -// it is called at start up. -func ExecutionDefinition(conf *config.Config) JobDefinition { +// commonJobDefinition creates a new JobDefinition with the provided parameters. +// It sets up the job definition's name, priority, request directories, input file patterns, output templates, +// and parameter regexps. The function returns a JobDefinition and an error if any occurs during the setup. +func commonJobDefinition(name string, priority int, + reqRootDirs []string, inputFilePatterns []string, + outputFileTmpls []string, outputFileNames []string, + paramsRegexp []ParamsRegexp, failSuffix string) (*JobDefinition, error) { + + m, n := len(reqRootDirs), len(inputFilePatterns) + if m != n { + return nil, fmt.Errorf(`length mis-match between the number of request files:%d + and input file patterns:%d specified in the job definition`, m, n) + } + + p, q := len(outputFileTmpls), len(outputFileNames) + if p != q { + return nil, fmt.Errorf(`length mis-match between the number of output file templates:%d + and output file names:%d specified in the job definition`, p, q) + } - // format the extension part of the regexp if provided + inputFileRegexps := make([]*regexp2.Regexp, m) + paramsRegexps := make([]ParamsRegexp, m) + outputFileTemplates := make([]*template.Template, p) + + for i := range inputFilePatterns { + inputFileRegexps[i] = regexp2.MustCompile(inputFilePatterns[i], regexp2.None) + paramsRegexps[i] = paramsRegexp[i] + } + + for j := range outputFileNames { + outputFileTemplates[j] = tmplMustCompile(outputFileTmpls[j], outputFileNames[j]) + } + + return &JobDefinition{ + Name: name, + Priority: priority, + RequestsRootDir: reqRootDirs, + InputFileRegexp: inputFileRegexps, + OutputFileTmpl: outputFileTemplates, + ParamsRegexp: paramsRegexps, + FailureSuffix: matchFailureSuffix(failSuffix), + }, nil +} + +// ExecutionDefinition creates a job definition for the execution prover job. +// It sets the input file pattern based on the configuration and creates the job definition with the appropriate parameters. +// The function panics on any error since it is called at startup. +func ExecutionDefinition(conf *config.Config) JobDefinition { inpFileExt := "" if conf.Execution.CanRunFullLarge { inpFileExt = fmt.Sprintf(`\.%v`, config.LargeSuffix) } - return JobDefinition{ - RequestsRootDir: conf.Execution.RequestsRootDir, - - // Name of the job - Name: jobNameExecution, - - // This will panic at startup if the regexp is invalid - InputFileRegexp: regexp2.MustCompile( - fmt.Sprintf( - `^[0-9]+-[0-9]+(-etv[0-9\.]+)?(-stv[0-9\.]+)?-getZkProof\.json%v(\.failure\.%v_[0-9]+)*$`, - inpFileExt, - config.FailSuffix, - ), - regexp2.None, - ), - - // This will panic at startup if the template is invalid - OutputFileTmpl: tmplMustCompile( - "exec-output-file", - "{{.Start}}-{{.End}}-getZkProof.json", - ), - - // Execution job are at utmost priority - Priority: 0, - - // Parameters of the regexp, they can loose in the sense that these regexp - // are only called if the `InputFileRegexp` is matched. - ParamsRegexp: struct { - Start *regexp2.Regexp - End *regexp2.Regexp - Stv *regexp2.Regexp - Etv *regexp2.Regexp - Cv *regexp2.Regexp - ContentHash *regexp2.Regexp - }{ - // Match a string of digit at the beginning of the line - Start: regexp2.MustCompile(`^[0-9]+`, regexp2.None), - // Match a string of digit coming after the first string of digits that - // initiate the line and followed by a "-" - End: regexp2.MustCompile(`(?<=^[0-9]+-)[0-9]+`, regexp2.None), - // Match a sequence of digits and "." comining after (resp.) "etv" and - // "cv" - Etv: matchVersionWithPrefix("etv"), - Stv: matchVersionWithPrefix("stv"), - }, - - FailureSuffix: matchFailureSuffix(config.FailSuffix), + inputFilePattern := fmt.Sprintf( + `^[0-9]+-[0-9]+(-etv[0-9\.]+)?(-stv[0-9\.]+)?-getZkProof\.json%v(\.failure\.%v_[0-9]+)*$`, + inpFileExt, + config.FailSuffix, + ) + + paramsRegexp := ParamsRegexp{ + Start: regexp2.MustCompile(`^[0-9]+`, regexp2.None), + End: regexp2.MustCompile(`(?<=^[0-9]+-)[0-9]+`, regexp2.None), + Etv: matchVersionWithPrefix("etv"), + Stv: matchVersionWithPrefix("stv"), + } + + jobDef, err := commonJobDefinition( + jobNameExecution, + 0, + []string{conf.Execution.RequestsRootDir}, + []string{inputFilePattern}, + []string{"exec-output-file"}, + []string{"{{.Start}}-{{.End}}-getZkProof.json"}, + []ParamsRegexp{paramsRegexp}, + config.FailSuffix, + ) + if err != nil { + utils.Panic("could not create job definition: %v", err) } + return *jobDef } -// Definition of an execution prover job. +// CompressionDefinition creates a job definition for the blob decompression prover job. +// It sets the input file pattern based on the configuration and creates the job definition with the appropriate parameters. +// The function panics on any error since it is called at startup. func CompressionDefinition(conf *config.Config) JobDefinition { + inputFilePattern := fmt.Sprintf( + `^[0-9]+-[0-9]+(-bcv[0-9\.]+)?(-ccv[0-9\.]+)?-((0x)?[0-9a-zA-Z]*-)?getZkBlobCompressionProof\.json(\.failure\.%v_[0-9]+)*$`, + config.FailSuffix, + ) - return JobDefinition{ - RequestsRootDir: conf.BlobDecompression.RequestsRootDir, - - // Name of the job - Name: jobNameBlobDecompression, - - // This will panic at startup if the regexp is invalid - InputFileRegexp: regexp2.MustCompile( - fmt.Sprintf( - `^[0-9]+-[0-9]+(-bcv[0-9\.]+)?(-ccv[0-9\.]+)?-((0x)?[0-9a-zA-Z]*-)?getZkBlobCompressionProof\.json(\.failure\.%v_[0-9]+)*$`, - config.FailSuffix, - ), - regexp2.None, - ), - - // This will panic at startup if the template is invalid - OutputFileTmpl: tmplMustCompile( - "compress-output-file", - "{{.Start}}-{{.End}}-{{.ContentHash}}getZkBlobCompressionProof.json", - ), - - // Compression jobs have secondary priority - Priority: 1, - - // Parameters of the regexp, they can loose in the sense that these regexp - // are only called if the `InputFileRegexp` is matched. - ParamsRegexp: struct { - Start *regexp2.Regexp - End *regexp2.Regexp - Stv *regexp2.Regexp - Etv *regexp2.Regexp - Cv *regexp2.Regexp - ContentHash *regexp2.Regexp - }{ - // Match a string of digit at the beginning of the line - Start: regexp2.MustCompile(`^[0-9]+`, regexp2.None), - // Match a string of digit coming after the first string of digits that - // initiate the line and followed by a "-" - End: regexp2.MustCompile(`(?<=^[0-9]+-)[0-9]+`, regexp2.None), - // Match any string containing digits and "." coming after the "cv" - Cv: matchVersionWithPrefix("cv"), - // Matches the string between ccv and and getZkBlobCompression - ContentHash: regexp2.MustCompile(`(?<=ccv[0-9\.]+-)(0x)?[0-9a-zA-Z]+-(?=getZk)`, regexp2.None), - }, - - FailureSuffix: matchFailureSuffix(config.FailSuffix), + paramsRegexp := ParamsRegexp{ + Start: regexp2.MustCompile(`^[0-9]+`, regexp2.None), + End: regexp2.MustCompile(`(?<=^[0-9]+-)[0-9]+`, regexp2.None), + Cv: matchVersionWithPrefix("cv"), + ContentHash: regexp2.MustCompile(`(?<=ccv[0-9\.]+-)(0x)?[0-9a-zA-Z]+-(?=getZk)`, regexp2.None), } + + jobDef, err := commonJobDefinition( + jobNameBlobDecompression, + 1, + []string{conf.BlobDecompression.RequestsRootDir}, + []string{inputFilePattern}, + []string{"compress-output-file"}, + []string{"{{.Start}}-{{.End}}-{{.ContentHash}}getZkBlobCompressionProof.json"}, + []ParamsRegexp{paramsRegexp}, + config.FailSuffix, + ) + if err != nil { + utils.Panic("could not create job definition: %v", err) + } + return *jobDef } -// Definition of an aggregated prover job. +// AggregatedDefinition creates a job definition for the aggregated prover job. +// It sets the input file pattern based on the configuration and creates the job definition with the appropriate parameters. +// The function panics on any error since it is called at startup. func AggregatedDefinition(conf *config.Config) JobDefinition { + inputFilePattern := fmt.Sprintf( + `^[0-9]+-[0-9]+(-[a-fA-F0-9]+)?-getZkAggregatedProof\.json(\.failure\.%v_[0-9]+)*$`, + config.FailSuffix, + ) - return JobDefinition{ - RequestsRootDir: conf.Aggregation.RequestsRootDir, - - // Name of the job - Name: jobNameAggregation, - - // This will panic at startup if the regexp is invalid - InputFileRegexp: regexp2.MustCompile( - fmt.Sprintf( - `^[0-9]+-[0-9]+(-[a-fA-F0-9]+)?-getZkAggregatedProof\.json(\.failure\.%v_[0-9]+)*$`, - config.FailSuffix, - ), - regexp2.None, - ), - - // This will panic at startup if the template is invalid - OutputFileTmpl: tmplMustCompile( - "agreg-output-file", - "{{.Start}}-{{.End}}-{{.ContentHash}}-getZkAggregatedProof.json", - ), - - // Aggregation job are at lowest priority - Priority: 2, - - // Parameters of the regexp, they can loose in the sense that these - // regexp are only called if the `InputFileRegexp` is matched. - ParamsRegexp: struct { - Start *regexp2.Regexp - End *regexp2.Regexp - Stv *regexp2.Regexp - Etv *regexp2.Regexp - Cv *regexp2.Regexp - ContentHash *regexp2.Regexp - }{ - // Match a string of digit at the beginning of the line - Start: regexp2.MustCompile(`^[0-9]+`, regexp2.None), - // Match a string of digit coming after the first string of digits - // that initiate the line and followed by a "-" - End: regexp2.MustCompile(`(?<=^[0-9]+-)[0-9]+`, regexp2.None), - // Match the hexadecimal string that precedes `getZkAggregatedProof` - ContentHash: regexp2.MustCompile(`(?<=^[0-9]+-[0-9]+-)[a-fA-F0-9]+(?=-getZk)`, regexp2.None), - }, - - FailureSuffix: matchFailureSuffix(config.FailSuffix), + paramsRegexp := ParamsRegexp{ + Start: regexp2.MustCompile(`^[0-9]+`, regexp2.None), + End: regexp2.MustCompile(`(?<=^[0-9]+-)[0-9]+`, regexp2.None), + ContentHash: regexp2.MustCompile(`(?<=^[0-9]+-[0-9]+-)[a-fA-F0-9]+(?=-getZk)`, regexp2.None), } + + jobDef, err := commonJobDefinition( + jobNameAggregation, + 2, + []string{conf.Aggregation.RequestsRootDir}, + []string{inputFilePattern}, + []string{"agreg-output-file"}, + []string{"{{.Start}}-{{.End}}-{{.ContentHash}}-getZkAggregatedProof.json"}, + []ParamsRegexp{paramsRegexp}, + config.FailSuffix, + ) + if err != nil { + utils.Panic("could not create job definition: %v", err) + } + return *jobDef } // Version prefix template @@ -236,7 +218,7 @@ func matchVersionWithPrefix(pre string) *regexp2.Regexp { } // Match the failure code suffix. This string will essentially match all the -// substrints of the form `.failure.code_` so that they can be replaced with +// substrings of the form `.failure.code_` so that they can be replaced with // the empty string. func matchFailureSuffix(pre string) *regexp2.Regexp { return regexp2.MustCompile( @@ -254,14 +236,37 @@ func tmplMustCompile(name, tmpl string) *template.Template { return res } -func (jd *JobDefinition) dirFrom() string { - return filepath.Join(jd.RequestsRootDir, config.RequestsFromSubDir) +func (jd *JobDefinition) isValidReqRootDirIdx(idx int) error { + if idx < 0 || idx >= len(jd.RequestsRootDir) { + return fmt.Errorf("out-of-bound request root dir. index specified for job definition: %s", jd.Name) + } + return nil +} + +func (jd *JobDefinition) isValidOutputFileIdx(idx int) error { + if idx < 0 || idx >= len(jd.OutputFileTmpl) { + return fmt.Errorf("out-of-bound output file template index specified for job definition: %s", jd.Name) + } + return nil +} + +func (jd *JobDefinition) dirFrom(idx int) string { + if err := jd.isValidReqRootDirIdx(idx); err != nil { + utils.Panic(err.Error()) + } + return filepath.Join(jd.RequestsRootDir[idx], config.RequestsFromSubDir) } -func (jd *JobDefinition) dirDone() string { - return filepath.Join(jd.RequestsRootDir, config.RequestsDoneSubDir) +func (jd *JobDefinition) dirDone(idx int) string { + if err := jd.isValidReqRootDirIdx(idx); err != nil { + utils.Panic(err.Error()) + } + return filepath.Join(jd.RequestsRootDir[idx], config.RequestsDoneSubDir) } -func (jd *JobDefinition) dirTo() string { - return filepath.Join(jd.RequestsRootDir, config.RequestsToSubDir) +func (jd *JobDefinition) dirTo(idx int) string { + if err := jd.isValidReqRootDirIdx(idx); err != nil { + utils.Panic(err.Error()) + } + return filepath.Join(jd.RequestsRootDir[idx], config.RequestsToSubDir) } diff --git a/prover/cmd/controller/controller/job_definition_limitless.go b/prover/cmd/controller/controller/job_definition_limitless.go index 42b149960..ea1d96750 100644 --- a/prover/cmd/controller/controller/job_definition_limitless.go +++ b/prover/cmd/controller/controller/job_definition_limitless.go @@ -1,240 +1,240 @@ package controller -import ( - "fmt" - - "github.com/consensys/linea-monorepo/prover/config" - "github.com/dlclark/regexp2" -) - -// Job definitions are defined such that each job has a single request and response file -// These jobs will execute asynchronously based on their set priorities -const ( - // Bootstrap - job_Exec_Bootstrap_GLSubmodule = "exec-bootstrap-GLsubmodule" - job_Exec_Bootstrap_DistMetadata = "exec-bootstrap-metadata" - - // Global-Local subprovers - job_Exec_GL_RndBeacon = "exec-GL-rndbeacon" - job_Exec_GL = "exec-GL" - - // Random Beacon - job_Exec_RndBeacon_LPP = "exec-rndbeacon" - job_Exec_Bootstrap_RndBeacon = "exec-bootstrap-rndbeacon" - - // LPP-subprovers - job_Exec_LPP = "exec-LPP" - - // Conglomerator - job_Exec_Congolomerate_LPP = "exec-congolo-LPP" - job_Exec_Congolomerate_GL = "exec-congolo-GL" - job_Exec_Congolomerate_Bootstrap_Metadata = "exec-congolo-metadata" -) - -// Priorities -const ( - priority_Exec_Bootstrap_GLSubmodule = 0 - priority_Exec_Bootstrap_DistMetadata = 0 - - priority_Exec_GL_RndBeacon = 1 - priority_Exec_GL = 1 - - priority_Exec_RndBeacon_LPP = 2 - priority_Exec_Bootstrap_RndBeacon = 2 - - priority_Exec_LPP = 3 - - priority_Exec_Congolomerate_LPP = 4 - priority_Exec_Congolomerate_GL = 4 - priority_Exec_Congolomerate_Metadata = 4 -) - -// Input file patterns -const ( - // Bootstrap I/p file is the usual execution req. file - exec_Bootstrap_InputPattern = `^[0-9]+-[0-9]+(-etv[0-9\.]+)?(-stv[0-9\.]+)?-getZkProof\.json%v(\.failure\.%v_[0-9]+)*$` - - // GL input - exec_Bootstrap_GL_InputPattern = `^[0-9]+-[0-9]+(-etv[0-9\.]+)?(-stv[0-9\.]+)?-getZkProof_Bootstrap_GLSubmodule\.json%v(\.failure\.%v_[0-9]+)*$` - - // Rnd Beacon I/p - exec_Bootstrap_RndBeacon_InputPattern = `^[0-9]+-[0-9]+(-etv[0-9\.]+)?(-stv[0-9\.]+)?-getZkProof_Bootstrap_DistMetadata\.json%v(\.failure\.%v_[0-9]+)*$` - exec_GL_RndBeacon_InputPattern = `^[0-9]+-[0-9]+(-etv[0-9\.]+)?(-stv[0-9\.]+)?-getZkProof_GL_RndBeacon\.json%v(\.failure\.%v_[0-9]+)*$` - - // LPP Input - exec_LPP_InputPattern = `^[0-9]+-[0-9]+(-etv[0-9\.]+)?(-stv[0-9\.]+)?-getZkProof_RndBeacon\.json%v(\.failure\.%v_[0-9]+)*$` - - // Conglomerator Input - exec_Conglomerate_GL_InputPattern = `^[0-9]+-[0-9]+(-etv[0-9\.]+)?(-stv[0-9\.]+)?-getZkProof_GL\.json%v(\.failure\.%v_[0-9]+)*$` - exec_Conglomerate_LPP_InputPattern = `^[0-9]+-[0-9]+(-etv[0-9\.]+)?(-stv[0-9\.]+)?-getZkProof_LPP\.json%v(\.failure\.%v_[0-9]+)*$` - exec_Conglomerate_Bootstrap_DistMetadata_InputPattern = `^[0-9]+-[0-9]+(-etv[0-9\.]+)?(-stv[0-9\.]+)?-getZkProof_Bootstrap_DistMetadata\.json%v(\.failure\.%v_[0-9]+)*$` -) - -// Ouput File patterns and templates -const ( - exec_Bootstrap_GLSubmodule_File = "{{.Start}}-{{.End}}-etv{{.Etv}}-stv{{.Stv}}-getZkProof_Bootstrap_GLSubmodule.json" - exec_Bootstrap_Submodule_Tmpl = "exec-bootstrap-GLsubmodule-req-file" - - exec_Bootstrap_DistMetadata_File = "{{.Start}}-{{.End}}-etv{{.Etv}}-stv{{.Stv}}-getZkProof_Bootstrap_DistMetadata.json" - exec_Bootstrap_DistMetadata_Tmpl = "exec-bootstrap-submodule-distmetadata-file" - - // Global-Local subprovers - exec_GL_RndBeacon_File = "{{.Start}}-{{.End}}-etv{{.Etv}}-stv{{.Stv}}-getZkProof_GL_RndBeacon.json" - exec_GL_RndBeacon_Tmpl = "exec-GL-Beacon-file" - - exec_GL_File = "{{.Start}}-{{.End}}-etv{{.Etv}}-stv{{.Stv}}-getZkProof_GL.json" - exec_GL_Tmpl = "exec-GL-output-file" - - // Random Beacon - exec_RndBeacon_File = "{{.Start}}-{{.End}}-etv{{.Etv}}-stv{{.Stv}}-getZkProof_RndBeacon.json" - exec_RndBeacon_Tmpl = "exec-rndbeacon-output-file" - - // LPP-subprovers - exec_LPP_File = "{{.Start}}-{{.End}}-etv{{.Etv}}-stv{{.Stv}}-getZkProof_LPP.json" - exec_LPP_Tmpl = "exec-LPP-output-file" - - // Conglomerator - exec_Congolomerate_File = "{{.Start}}-{{.End}}-getZkProof.json" - exec_Congolomerate_Tmpl = "exec-output-file" -) - -// createJobDefinition creates a new JobDefinition with the provided parameters. -// It sets up the job's name, priority, request directory, input file pattern, and output template. -// The function returns a pointer to the JobDefinition and an error if any occurs during the setup. -func createJobDefinition(name string, priority int, - reqRootDir, inputFilePattern string, - outputTmpl, outputFileName string) (*JobDefinition, error) { - - return &JobDefinition{ - Name: name, - Priority: priority, - - // Primary and Secondary Request (Input) Files - RequestsRootDir: reqRootDir, - InputFileRegexp: regexp2.MustCompile(inputFilePattern, regexp2.None), - - // Output Templates - OutputFileTmpl: tmplMustCompile(outputTmpl, outputFileName), - - ParamsRegexp: struct { - Start *regexp2.Regexp - End *regexp2.Regexp - Stv *regexp2.Regexp - Etv *regexp2.Regexp - Cv *regexp2.Regexp - ContentHash *regexp2.Regexp - }{ - Start: regexp2.MustCompile(`^[0-9]+`, regexp2.None), - End: regexp2.MustCompile(`(?<=^[0-9]+-)[0-9]+`, regexp2.None), - Etv: matchVersionWithPrefix("etv"), - Stv: matchVersionWithPrefix("stv"), - }, - FailureSuffix: matchFailureSuffix(config.FailSuffix), - }, nil -} - -// BootstrapGLSubModDefinition creates a job definition for the Bootstrap GL Submodule job. -// It sets the input file pattern based on the configuration and creates the job definition -// with the appropriate parameters. -func BootstrapGLSubModDefinition(conf *config.Config) (*JobDefinition, error) { - inpFileExt := "" - if conf.Bootstrap.CanRunFullLarge { - inpFileExt = fmt.Sprintf(`\.%v`, config.LargeSuffix) - } - inputFilePattern := fmt.Sprintf(exec_Bootstrap_InputPattern, inpFileExt, config.FailSuffix) - return createJobDefinition(job_Exec_Bootstrap_GLSubmodule, priority_Exec_Bootstrap_GLSubmodule, - conf.Bootstrap.RequestsRootDir, inputFilePattern, exec_Bootstrap_Submodule_Tmpl, exec_Bootstrap_GLSubmodule_File) -} - -// BootstrapDistMetadataDefinition creates a job definition for the Bootstrap Metadata job. -// It sets the input file pattern based on the configuration and creates the job definition -// with the appropriate parameters. -func BootstrapDistMetadataDefinition(conf *config.Config) (*JobDefinition, error) { - inpFileExt := "" - if conf.Bootstrap.CanRunFullLarge { - inpFileExt = fmt.Sprintf(`\.%v`, config.LargeSuffix) - } - inputFilePattern := fmt.Sprintf(exec_Bootstrap_InputPattern, inpFileExt, config.FailSuffix) - return createJobDefinition(job_Exec_Bootstrap_DistMetadata, priority_Exec_Bootstrap_DistMetadata, - conf.Bootstrap.RequestsRootDir, inputFilePattern, exec_Bootstrap_DistMetadata_Tmpl, exec_Bootstrap_DistMetadata_File) -} - -func GLRndBeaconDefinition(conf *config.Config) (*JobDefinition, error) { - inpFileExt := "" - if conf.GLExecution.CanRunFullLarge { - inpFileExt = fmt.Sprintf(`\.%v`, config.LargeSuffix) - } - inputFilePattern := fmt.Sprintf(exec_Bootstrap_GL_InputPattern, inpFileExt, config.FailSuffix) - return createJobDefinition(job_Exec_GL_RndBeacon, priority_Exec_GL_RndBeacon, - conf.GLExecution.RequestsRootDir, inputFilePattern, exec_GL_RndBeacon_Tmpl, exec_GL_RndBeacon_File) -} - -func GLDefinition(conf *config.Config) (*JobDefinition, error) { - inpFileExt := "" - if conf.GLExecution.CanRunFullLarge { - inpFileExt = fmt.Sprintf(`\.%v`, config.LargeSuffix) - } - inputFilePattern := fmt.Sprintf(exec_Bootstrap_GL_InputPattern, inpFileExt, config.FailSuffix) - return createJobDefinition(job_Exec_GL, priority_Exec_GL, - conf.GLExecution.RequestsRootDir, inputFilePattern, exec_GL_Tmpl, exec_GL_File) -} - -func BootstrapRndBeaconDefinition(conf *config.Config) (*JobDefinition, error) { - inpFileExt := "" - if conf.RndBeacon.CanRunFullLarge { - inpFileExt = fmt.Sprintf(`\.%v`, config.LargeSuffix) - } - inputFilePattern := fmt.Sprintf(exec_Bootstrap_RndBeacon_InputPattern, inpFileExt, config.FailSuffix) - return createJobDefinition(job_Exec_Bootstrap_RndBeacon, priority_Exec_Bootstrap_RndBeacon, - conf.RndBeacon.MetaData.RequestsRootDir, inputFilePattern, exec_RndBeacon_Tmpl, exec_RndBeacon_File) -} - -func RndBeaconLPPDefinition(conf *config.Config) (*JobDefinition, error) { - inpFileExt := "" - if conf.RndBeacon.CanRunFullLarge { - inpFileExt = fmt.Sprintf(`\.%v`, config.LargeSuffix) - } - inputFilePattern := fmt.Sprintf(exec_GL_RndBeacon_InputPattern, inpFileExt, config.FailSuffix) - return createJobDefinition(job_Exec_RndBeacon_LPP, priority_Exec_RndBeacon_LPP, - conf.RndBeacon.GL.RequestsRootDir, inputFilePattern, exec_RndBeacon_Tmpl, exec_RndBeacon_File) -} - -func LPPDefinition(conf *config.Config) (*JobDefinition, error) { - inpFileExt := "" - if conf.LPPExecution.CanRunFullLarge { - inpFileExt = fmt.Sprintf(`\.%v`, config.LargeSuffix) - } - inputFilePattern := fmt.Sprintf(exec_LPP_InputPattern, inpFileExt, config.FailSuffix) - return createJobDefinition(job_Exec_LPP, priority_Exec_LPP, - conf.LPPExecution.RequestsRootDir, inputFilePattern, exec_LPP_Tmpl, exec_LPP_File) -} - -func ConglomerateDistMetadataDefinition(conf *config.Config) (*JobDefinition, error) { - inpFileExt := "" - if conf.Conglomeration.CanRunFullLarge { - inpFileExt = fmt.Sprintf(`\.%v`, config.LargeSuffix) - } - inputFilePattern := fmt.Sprintf(exec_Conglomerate_Bootstrap_DistMetadata_InputPattern, inpFileExt, config.FailSuffix) - return createJobDefinition(job_Exec_Congolomerate_Bootstrap_Metadata, priority_Exec_Congolomerate_Metadata, - conf.Conglomeration.BootstrapMetadata.RequestsRootDir, inputFilePattern, exec_Congolomerate_Tmpl, exec_Congolomerate_File) -} - -func ConglomerateGLDefinition(conf *config.Config) (*JobDefinition, error) { - inpFileExt := "" - if conf.Conglomeration.CanRunFullLarge { - inpFileExt = fmt.Sprintf(`\.%v`, config.LargeSuffix) - } - inputFilePattern := fmt.Sprintf(exec_Conglomerate_GL_InputPattern, inpFileExt, config.FailSuffix) - return createJobDefinition(job_Exec_Congolomerate_GL, priority_Exec_Congolomerate_GL, - conf.Conglomeration.GL.RequestsRootDir, inputFilePattern, exec_Congolomerate_Tmpl, exec_Congolomerate_File) -} - -func ConglomerateLPPDefinition(conf *config.Config) (*JobDefinition, error) { - inpFileExt := "" - if conf.Conglomeration.CanRunFullLarge { - inpFileExt = fmt.Sprintf(`\.%v`, config.LargeSuffix) - } - inputFilePattern := fmt.Sprintf(exec_Conglomerate_LPP_InputPattern, inpFileExt, config.FailSuffix) - return createJobDefinition(job_Exec_Congolomerate_LPP, priority_Exec_Congolomerate_LPP, - conf.Conglomeration.LPP.RequestsRootDir, inputFilePattern, exec_Congolomerate_Tmpl, exec_Congolomerate_File) -} +// import ( +// "fmt" + +// "github.com/consensys/linea-monorepo/prover/config" +// "github.com/dlclark/regexp2" +// ) + +// // Job definitions are defined such that each job has a single request and response file +// // These jobs will execute asynchronously based on their set priorities +// const ( +// // Bootstrap +// job_Exec_Bootstrap_GLSubmodule = "exec-bootstrap-GLsubmodule" +// job_Exec_Bootstrap_DistMetadata = "exec-bootstrap-metadata" + +// // Global-Local subprovers +// job_Exec_GL_RndBeacon = "exec-GL-rndbeacon" +// job_Exec_GL = "exec-GL" + +// // Random Beacon +// job_Exec_RndBeacon_LPP = "exec-rndbeacon" +// job_Exec_Bootstrap_RndBeacon = "exec-bootstrap-rndbeacon" + +// // LPP-subprovers +// job_Exec_LPP = "exec-LPP" + +// // Conglomerator +// job_Exec_Congolomerate_LPP = "exec-conglo-LPP" +// job_Exec_Congolomerate_GL = "exec-conglo-GL" +// job_Exec_Congolomerate_Bootstrap_Metadata = "exec-conglo-metadata" +// ) + +// // Priorities +// const ( +// priority_Exec_Bootstrap_GLSubmodule = 0 +// priority_Exec_Bootstrap_DistMetadata = 0 + +// priority_Exec_GL_RndBeacon = 1 +// priority_Exec_GL = 1 + +// priority_Exec_RndBeacon_LPP = 2 +// priority_Exec_Bootstrap_RndBeacon = 2 + +// priority_Exec_LPP = 3 + +// priority_Exec_Congolomerate_LPP = 4 +// priority_Exec_Congolomerate_GL = 4 +// priority_Exec_Congolomerate_Metadata = 4 +// ) + +// // Input file patterns +// const ( +// // Bootstrap I/p file is the usual execution req. file +// exec_Bootstrap_InputPattern = `^[0-9]+-[0-9]+(-etv[0-9\.]+)?(-stv[0-9\.]+)?-getZkProof\.json%v(\.failure\.%v_[0-9]+)*$` + +// // GL input +// exec_Bootstrap_GL_InputPattern = `^[0-9]+-[0-9]+(-etv[0-9\.]+)?(-stv[0-9\.]+)?-getZkProof_Bootstrap_GLSubmodule\.json%v(\.failure\.%v_[0-9]+)*$` + +// // Rnd Beacon I/p +// exec_Bootstrap_RndBeacon_InputPattern = `^[0-9]+-[0-9]+(-etv[0-9\.]+)?(-stv[0-9\.]+)?-getZkProof_Bootstrap_DistMetadata\.json%v(\.failure\.%v_[0-9]+)*$` +// exec_GL_RndBeacon_InputPattern = `^[0-9]+-[0-9]+(-etv[0-9\.]+)?(-stv[0-9\.]+)?-getZkProof_GL_RndBeacon\.json%v(\.failure\.%v_[0-9]+)*$` + +// // LPP Input +// exec_LPP_InputPattern = `^[0-9]+-[0-9]+(-etv[0-9\.]+)?(-stv[0-9\.]+)?-getZkProof_RndBeacon\.json%v(\.failure\.%v_[0-9]+)*$` + +// // Conglomerator Input +// exec_Conglomerate_GL_InputPattern = `^[0-9]+-[0-9]+(-etv[0-9\.]+)?(-stv[0-9\.]+)?-getZkProof_GL\.json%v(\.failure\.%v_[0-9]+)*$` +// exec_Conglomerate_LPP_InputPattern = `^[0-9]+-[0-9]+(-etv[0-9\.]+)?(-stv[0-9\.]+)?-getZkProof_LPP\.json%v(\.failure\.%v_[0-9]+)*$` +// exec_Conglomerate_Bootstrap_DistMetadata_InputPattern = `^[0-9]+-[0-9]+(-etv[0-9\.]+)?(-stv[0-9\.]+)?-getZkProof_Bootstrap_DistMetadata\.json%v(\.failure\.%v_[0-9]+)*$` +// ) + +// // Ouput File patterns and templates +// const ( +// exec_Bootstrap_GLSubmodule_File = "{{.Start}}-{{.End}}-etv{{.Etv}}-stv{{.Stv}}-getZkProof_Bootstrap_GLSubmodule.json" +// exec_Bootstrap_Submodule_Tmpl = "exec-bootstrap-GLsubmodule-req-file" + +// exec_Bootstrap_DistMetadata_File = "{{.Start}}-{{.End}}-etv{{.Etv}}-stv{{.Stv}}-getZkProof_Bootstrap_DistMetadata.json" +// exec_Bootstrap_DistMetadata_Tmpl = "exec-bootstrap-submodule-distmetadata-file" + +// // Global-Local subprovers +// exec_GL_RndBeacon_File = "{{.Start}}-{{.End}}-etv{{.Etv}}-stv{{.Stv}}-getZkProof_GL_RndBeacon.json" +// exec_GL_RndBeacon_Tmpl = "exec-GL-Beacon-file" + +// exec_GL_File = "{{.Start}}-{{.End}}-etv{{.Etv}}-stv{{.Stv}}-getZkProof_GL.json" +// exec_GL_Tmpl = "exec-GL-output-file" + +// // Random Beacon +// exec_RndBeacon_File = "{{.Start}}-{{.End}}-etv{{.Etv}}-stv{{.Stv}}-getZkProof_RndBeacon.json" +// exec_RndBeacon_Tmpl = "exec-rndbeacon-output-file" + +// // LPP-subprovers +// exec_LPP_File = "{{.Start}}-{{.End}}-etv{{.Etv}}-stv{{.Stv}}-getZkProof_LPP.json" +// exec_LPP_Tmpl = "exec-LPP-output-file" + +// // Conglomerator +// exec_Congolomerate_File = "{{.Start}}-{{.End}}-getZkProof.json" +// exec_Congolomerate_Tmpl = "exec-output-file" +// ) + +// // createJobDefinition creates a new JobDefinition with the provided parameters. +// // It sets up the job's name, priority, request directory, input file pattern, and output template. +// // The function returns a pointer to the JobDefinition and an error if any occurs during the setup. +// func createJobDefinition(name string, priority int, +// reqRootDir, inputFilePattern string, +// outputTmpl, outputFileName string) (*JobDefinition, error) { + +// return &JobDefinition{ +// Name: name, +// Priority: priority, + +// // Primary and Secondary Request (Input) Files +// RequestsRootDir: reqRootDir, +// InputFileRegexp: regexp2.MustCompile(inputFilePattern, regexp2.None), + +// // Output Templates +// OutputFileTmpl: tmplMustCompile(outputTmpl, outputFileName), + +// ParamsRegexp: struct { +// Start *regexp2.Regexp +// End *regexp2.Regexp +// Stv *regexp2.Regexp +// Etv *regexp2.Regexp +// Cv *regexp2.Regexp +// ContentHash *regexp2.Regexp +// }{ +// Start: regexp2.MustCompile(`^[0-9]+`, regexp2.None), +// End: regexp2.MustCompile(`(?<=^[0-9]+-)[0-9]+`, regexp2.None), +// Etv: matchVersionWithPrefix("etv"), +// Stv: matchVersionWithPrefix("stv"), +// }, +// FailureSuffix: matchFailureSuffix(config.FailSuffix), +// }, nil +// } + +// // BootstrapGLSubModDefinition creates a job definition for the Bootstrap GL Submodule job. +// // It sets the input file pattern based on the configuration and creates the job definition +// // with the appropriate parameters. +// func BootstrapGLSubModDefinition(conf *config.Config) (*JobDefinition, error) { +// inpFileExt := "" +// if conf.Bootstrap.CanRunFullLarge { +// inpFileExt = fmt.Sprintf(`\.%v`, config.LargeSuffix) +// } +// inputFilePattern := fmt.Sprintf(exec_Bootstrap_InputPattern, inpFileExt, config.FailSuffix) +// return createJobDefinition(job_Exec_Bootstrap_GLSubmodule, priority_Exec_Bootstrap_GLSubmodule, +// conf.Bootstrap.RequestsRootDir, inputFilePattern, exec_Bootstrap_Submodule_Tmpl, exec_Bootstrap_GLSubmodule_File) +// } + +// // BootstrapDistMetadataDefinition creates a job definition for the Bootstrap Metadata job. +// // It sets the input file pattern based on the configuration and creates the job definition +// // with the appropriate parameters. +// func BootstrapDistMetadataDefinition(conf *config.Config) (*JobDefinition, error) { +// inpFileExt := "" +// if conf.Bootstrap.CanRunFullLarge { +// inpFileExt = fmt.Sprintf(`\.%v`, config.LargeSuffix) +// } +// inputFilePattern := fmt.Sprintf(exec_Bootstrap_InputPattern, inpFileExt, config.FailSuffix) +// return createJobDefinition(job_Exec_Bootstrap_DistMetadata, priority_Exec_Bootstrap_DistMetadata, +// conf.Bootstrap.RequestsRootDir, inputFilePattern, exec_Bootstrap_DistMetadata_Tmpl, exec_Bootstrap_DistMetadata_File) +// } + +// func GLRndBeaconDefinition(conf *config.Config) (*JobDefinition, error) { +// inpFileExt := "" +// if conf.GLExecution.CanRunFullLarge { +// inpFileExt = fmt.Sprintf(`\.%v`, config.LargeSuffix) +// } +// inputFilePattern := fmt.Sprintf(exec_Bootstrap_GL_InputPattern, inpFileExt, config.FailSuffix) +// return createJobDefinition(job_Exec_GL_RndBeacon, priority_Exec_GL_RndBeacon, +// conf.GLExecution.RequestsRootDir, inputFilePattern, exec_GL_RndBeacon_Tmpl, exec_GL_RndBeacon_File) +// } + +// func GLDefinition(conf *config.Config) (*JobDefinition, error) { +// inpFileExt := "" +// if conf.GLExecution.CanRunFullLarge { +// inpFileExt = fmt.Sprintf(`\.%v`, config.LargeSuffix) +// } +// inputFilePattern := fmt.Sprintf(exec_Bootstrap_GL_InputPattern, inpFileExt, config.FailSuffix) +// return createJobDefinition(job_Exec_GL, priority_Exec_GL, +// conf.GLExecution.RequestsRootDir, inputFilePattern, exec_GL_Tmpl, exec_GL_File) +// } + +// func BootstrapRndBeaconDefinition(conf *config.Config) (*JobDefinition, error) { +// inpFileExt := "" +// if conf.RndBeacon.CanRunFullLarge { +// inpFileExt = fmt.Sprintf(`\.%v`, config.LargeSuffix) +// } +// inputFilePattern := fmt.Sprintf(exec_Bootstrap_RndBeacon_InputPattern, inpFileExt, config.FailSuffix) +// return createJobDefinition(job_Exec_Bootstrap_RndBeacon, priority_Exec_Bootstrap_RndBeacon, +// conf.RndBeacon.MetaData.RequestsRootDir, inputFilePattern, exec_RndBeacon_Tmpl, exec_RndBeacon_File) +// } + +// func RndBeaconLPPDefinition(conf *config.Config) (*JobDefinition, error) { +// inpFileExt := "" +// if conf.RndBeacon.CanRunFullLarge { +// inpFileExt = fmt.Sprintf(`\.%v`, config.LargeSuffix) +// } +// inputFilePattern := fmt.Sprintf(exec_GL_RndBeacon_InputPattern, inpFileExt, config.FailSuffix) +// return createJobDefinition(job_Exec_RndBeacon_LPP, priority_Exec_RndBeacon_LPP, +// conf.RndBeacon.GL.RequestsRootDir, inputFilePattern, exec_RndBeacon_Tmpl, exec_RndBeacon_File) +// } + +// func LPPDefinition(conf *config.Config) (*JobDefinition, error) { +// inpFileExt := "" +// if conf.LPPExecution.CanRunFullLarge { +// inpFileExt = fmt.Sprintf(`\.%v`, config.LargeSuffix) +// } +// inputFilePattern := fmt.Sprintf(exec_LPP_InputPattern, inpFileExt, config.FailSuffix) +// return createJobDefinition(job_Exec_LPP, priority_Exec_LPP, +// conf.LPPExecution.RequestsRootDir, inputFilePattern, exec_LPP_Tmpl, exec_LPP_File) +// } + +// func ConglomerateDistMetadataDefinition(conf *config.Config) (*JobDefinition, error) { +// inpFileExt := "" +// if conf.Conglomeration.CanRunFullLarge { +// inpFileExt = fmt.Sprintf(`\.%v`, config.LargeSuffix) +// } +// inputFilePattern := fmt.Sprintf(exec_Conglomerate_Bootstrap_DistMetadata_InputPattern, inpFileExt, config.FailSuffix) +// return createJobDefinition(job_Exec_Congolomerate_Bootstrap_Metadata, priority_Exec_Congolomerate_Metadata, +// conf.Conglomeration.BootstrapMetadata.RequestsRootDir, inputFilePattern, exec_Congolomerate_Tmpl, exec_Congolomerate_File) +// } + +// func ConglomerateGLDefinition(conf *config.Config) (*JobDefinition, error) { +// inpFileExt := "" +// if conf.Conglomeration.CanRunFullLarge { +// inpFileExt = fmt.Sprintf(`\.%v`, config.LargeSuffix) +// } +// inputFilePattern := fmt.Sprintf(exec_Conglomerate_GL_InputPattern, inpFileExt, config.FailSuffix) +// return createJobDefinition(job_Exec_Congolomerate_GL, priority_Exec_Congolomerate_GL, +// conf.Conglomeration.GL.RequestsRootDir, inputFilePattern, exec_Congolomerate_Tmpl, exec_Congolomerate_File) +// } + +// func ConglomerateLPPDefinition(conf *config.Config) (*JobDefinition, error) { +// inpFileExt := "" +// if conf.Conglomeration.CanRunFullLarge { +// inpFileExt = fmt.Sprintf(`\.%v`, config.LargeSuffix) +// } +// inputFilePattern := fmt.Sprintf(exec_Conglomerate_LPP_InputPattern, inpFileExt, config.FailSuffix) +// return createJobDefinition(job_Exec_Congolomerate_LPP, priority_Exec_Congolomerate_LPP, +// conf.Conglomeration.LPP.RequestsRootDir, inputFilePattern, exec_Congolomerate_Tmpl, exec_Congolomerate_File) +// } diff --git a/prover/cmd/controller/controller/job_definition_limitless_test.go b/prover/cmd/controller/controller/job_definition_limitless_test.go index f8a972e6a..88be7c5d6 100644 --- a/prover/cmd/controller/controller/job_definition_limitless_test.go +++ b/prover/cmd/controller/controller/job_definition_limitless_test.go @@ -1,1188 +1,1188 @@ package controller -import ( - "testing" - - "github.com/consensys/linea-monorepo/prover/config" - "github.com/stretchr/testify/assert" -) - -// This tests ensures that the naming convention is respected by the file-watcher -// i.e., files with the right naming only are recognized. And the corresponding -// output files are also recognized. -func TestBootstrapSubModInFileRegexp(t *testing.T) { - - var ( - correctM = "102-103-etv0.2.3-stv1.2.3-getZkProof.json" - correctL = "102-103-etv0.2.3-stv1.2.3-getZkProof.json.large" - correctWithFailM = "102-103-etv0.2.3-stv1.2.3-getZkProof.json.failure.code_77" - correctWithFailL = "102-103-etv0.2.3-stv1.2.3-getZkProof.json.large.failure.code_77" - correctWith2FailsM = "102-103-etv0.2.3-stv1.2.3-getZkProof.json.failure.code_77.failure.code_77" - correctWith2FailsL = "102-103-etv0.2.3-stv1.2.3-getZkProof.json.large.failure.code_77.failure.code_77" - missingEtv = "102-103-stv1.2.3-getZkProof.json" - missingStv = "102-103-etv0.2.3-getZkProof.json" - notAPoint = "102-103-etv0.2.3-getZkProofAjson" - badName = "102-103-etv0.2.3-stv1.2.3-getAggregatedProof.json" - ) - - // The responses in case of success - var ( - respM = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json" - respL = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json" - // #nosec G101 -- Not a credential - respWithFailM = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json" - // #nosec G101 -- Not a credential - respWithFailL = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json" - // #nosec G101 -- Not a credential - respWith2FailsM = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json" - // #nosec G101 -- Not a credential - respWith2FailsL = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json" - respWoEtv = "responses/102-103-etv-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json" - respWoStv = "responses/102-103-etv0.2.3-stv-getZkProof_Bootstrap_GLSubmodule.json" - ) - - // The rename in case it is deferred to the large prover - var ( - toLargeM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof.json.large.failure.code_137" - toLargeWithFailM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof.json.large.failure.code_137" - toLargeWith2FailsM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof.json.large.failure.code_137" - toLargeWoEtv = "requests/102-103-stv1.2.3-getZkProof.json.large.failure.code_137" - toLargeWoStv = "requests/102-103-etv0.2.3-getZkProof.json.large.failure.code_137" - ) - - // The rename in case it is a success - var ( - successM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof.json.success" - successMWoStv = "requests-done/102-103-etv0.2.3-getZkProof.json.success" - successtWoEtv = "requests-done/102-103-stv1.2.3-getZkProof.json.success" - successL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof.json.large.success" - successWithFailM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof.json.success" - successWithFailL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof.json.large.success" - successWith2FailsM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof.json.success" - successWith2FailsL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof.json.large.success" - ) - - // The rename in case it is a panic (code = 2) - var ( - failM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof.json.failure.code_2" - failMWoStv = "requests-done/102-103-etv0.2.3-getZkProof.json.failure.code_2" - failtWoEtv = "requests-done/102-103-stv1.2.3-getZkProof.json.failure.code_2" - failL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof.json.large.failure.code_2" - failWithFailM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof.json.failure.code_2" - failWithFailL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof.json.large.failure.code_2" - failWith2FailsM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof.json.failure.code_2" - failWith2FailsL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof.json.large.failure.code_2" - ) - - testcase := []inpFileNamesCases{ - { - Ext: "", Fail: "code", ShouldMatch: true, - Fnames: []string{correctM, correctWithFailM, correctWith2FailsM, missingEtv, missingStv}, - Explainer: "happy path, case M", - ExpectedOutput: []string{respM, respWithFailM, respWith2FailsM, respWoEtv, respWoStv}, - ExpToLarge: []string{toLargeM, toLargeWithFailM, toLargeWith2FailsM, toLargeWoEtv, toLargeWoStv}, - ExpSuccess: []string{successM, successWithFailM, successWith2FailsM, successtWoEtv, successMWoStv}, - ExpFailW2: []string{failM, failWithFailM, failWith2FailsM, failtWoEtv, failMWoStv}, - }, - { - Ext: "large", Fail: "code", ShouldMatch: true, - Fnames: []string{correctL, correctWithFailL, correctWith2FailsL}, - Explainer: "happy path, case L", - ExpectedOutput: []string{respL, respWithFailL, respWith2FailsL}, - ExpSuccess: []string{successL, successWithFailL, successWith2FailsL}, - ExpFailW2: []string{failL, failWithFailL, failWith2FailsL}, - }, - { - Ext: "", Fail: "code", ShouldMatch: false, - Fnames: []string{correctL, correctWithFailL, correctWith2FailsL}, - Explainer: "M does not pick the files reserved for L", - }, - { - Ext: "large", Fail: "code", ShouldMatch: false, - Fnames: []string{correctM, correctWithFailM, correctWith2FailsM}, - Explainer: "L does not pick the files reserved for M", - }, - { - Ext: "", Fail: "code", ShouldMatch: false, - Fnames: []string{notAPoint, badName}, - Explainer: "M does not pick obviously invalid files", - }, - { - Ext: "large", Fail: "code", ShouldMatch: false, - Fnames: []string{missingEtv, missingStv, notAPoint, badName}, - Explainer: "L does not pick obviously invalid files", - }, - } - - for _, c := range testcase { - conf := config.Config{} - conf.Version = "0.1.2" - conf.Bootstrap.CanRunFullLarge = c.Ext == "large" - - def, err := BootstrapGLSubModDefinition(&conf) - assert.NoError(t, err) - - t.Run(c.Explainer, func(t *testing.T) { - runInpFileTestCase(t, def, c) - }) - } -} - -// This tests ensures that the naming convention is respected by the file-watcher -// i.e., files with the right naming only are recognized. And the corresponding -// output files are also recognized. -func TestBootstrapMetaDataInFileRegexp(t *testing.T) { - - var ( - correctM = "102-103-etv0.2.3-stv1.2.3-getZkProof.json" - correctL = "102-103-etv0.2.3-stv1.2.3-getZkProof.json.large" - correctWithFailM = "102-103-etv0.2.3-stv1.2.3-getZkProof.json.failure.code_77" - correctWithFailL = "102-103-etv0.2.3-stv1.2.3-getZkProof.json.large.failure.code_77" - correctWith2FailsM = "102-103-etv0.2.3-stv1.2.3-getZkProof.json.failure.code_77.failure.code_77" - correctWith2FailsL = "102-103-etv0.2.3-stv1.2.3-getZkProof.json.large.failure.code_77.failure.code_77" - missingEtv = "102-103-stv1.2.3-getZkProof.json" - missingStv = "102-103-etv0.2.3-getZkProof.json" - notAPoint = "102-103-etv0.2.3-getZkProofAjson" - badName = "102-103-etv0.2.3-stv1.2.3-getAggregatedProof.json" - ) - - // The responses in case of success - var ( - respM = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json" - respL = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json" - // #nosec G101 -- Not a credential - respWithFailM = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json" - // #nosec G101 -- Not a credential - respWithFailL = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json" - // #nosec G101 -- Not a credential - respWith2FailsM = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json" - // #nosec G101 -- Not a credential - respWith2FailsL = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json" - respWoEtv = "responses/102-103-etv-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json" - respWoStv = "responses/102-103-etv0.2.3-stv-getZkProof_Bootstrap_DistMetadata.json" - ) - - // The rename in case it is deferred to the large prover - var ( - toLargeM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof.json.large.failure.code_137" - toLargeWithFailM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof.json.large.failure.code_137" - toLargeWith2FailsM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof.json.large.failure.code_137" - toLargeWoEtv = "requests/102-103-stv1.2.3-getZkProof.json.large.failure.code_137" - toLargeWoStv = "requests/102-103-etv0.2.3-getZkProof.json.large.failure.code_137" - ) - - // The rename in case it is a success - var ( - successM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof.json.success" - successMWoStv = "requests-done/102-103-etv0.2.3-getZkProof.json.success" - successtWoEtv = "requests-done/102-103-stv1.2.3-getZkProof.json.success" - successL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof.json.large.success" - successWithFailM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof.json.success" - successWithFailL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof.json.large.success" - successWith2FailsM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof.json.success" - successWith2FailsL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof.json.large.success" - ) - - // The rename in case it is a panic (code = 2) - var ( - failM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof.json.failure.code_2" - failMWoStv = "requests-done/102-103-etv0.2.3-getZkProof.json.failure.code_2" - failtWoEtv = "requests-done/102-103-stv1.2.3-getZkProof.json.failure.code_2" - failL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof.json.large.failure.code_2" - failWithFailM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof.json.failure.code_2" - failWithFailL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof.json.large.failure.code_2" - failWith2FailsM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof.json.failure.code_2" - failWith2FailsL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof.json.large.failure.code_2" - ) - - testcase := []inpFileNamesCases{ - { - Ext: "", Fail: "code", ShouldMatch: true, - Fnames: []string{correctM, correctWithFailM, correctWith2FailsM, missingEtv, missingStv}, - Explainer: "happy path, case M", - ExpectedOutput: []string{respM, respWithFailM, respWith2FailsM, respWoEtv, respWoStv}, - ExpToLarge: []string{toLargeM, toLargeWithFailM, toLargeWith2FailsM, toLargeWoEtv, toLargeWoStv}, - ExpSuccess: []string{successM, successWithFailM, successWith2FailsM, successtWoEtv, successMWoStv}, - ExpFailW2: []string{failM, failWithFailM, failWith2FailsM, failtWoEtv, failMWoStv}, - }, - { - Ext: "large", Fail: "code", ShouldMatch: true, - Fnames: []string{correctL, correctWithFailL, correctWith2FailsL}, - Explainer: "happy path, case L", - ExpectedOutput: []string{respL, respWithFailL, respWith2FailsL}, - ExpSuccess: []string{successL, successWithFailL, successWith2FailsL}, - ExpFailW2: []string{failL, failWithFailL, failWith2FailsL}, - }, - { - Ext: "", Fail: "code", ShouldMatch: false, - Fnames: []string{correctL, correctWithFailL, correctWith2FailsL}, - Explainer: "M does not pick the files reserved for L", - }, - { - Ext: "large", Fail: "code", ShouldMatch: false, - Fnames: []string{correctM, correctWithFailM, correctWith2FailsM}, - Explainer: "L does not pick the files reserved for M", - }, - { - Ext: "", Fail: "code", ShouldMatch: false, - Fnames: []string{notAPoint, badName}, - Explainer: "M does not pick obviously invalid files", - }, - { - Ext: "large", Fail: "code", ShouldMatch: false, - Fnames: []string{missingEtv, missingStv, notAPoint, badName}, - Explainer: "L does not pick obviously invalid files", - }, - } - - for _, c := range testcase { - conf := config.Config{} - conf.Version = "0.1.2" - conf.Bootstrap.CanRunFullLarge = c.Ext == "large" - - def, err := BootstrapDistMetadataDefinition(&conf) - assert.NoError(t, err) - - t.Run(c.Explainer, func(t *testing.T) { - runInpFileTestCase(t, def, c) - }) - } -} - -func TestGLRndBeaconInFileRegexp(t *testing.T) { - - var ( - correctM = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json" - correctL = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large" - correctWithFailM = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.failure.code_77" - correctWithFailL = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.failure.code_77" - correctWith2FailsM = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.failure.code_77.failure.code_77" - correctWith2FailsL = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.failure.code_77.failure.code_77" - missingEtv = "102-103-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json" - missingStv = "102-103-etv0.2.3-getZkProof_Bootstrap_GLSubmodule.json" - notAPoint = "102-103-etv0.2.3-getZkProof_Bootstrap_SubmoduleAjson" - badName = "102-103-etv0.2.3-stv1.2.3-getAggregatedProof.json" - ) - - // The responses in case of success - var ( - respM = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json" - respL = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json" - // #nosec G101 -- Not a credential - respWithFailM = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json" - // #nosec G101 -- Not a credential - respWithFailL = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json" - // #nosec G101 -- Not a credential - respWith2FailsM = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json" - // #nosec G101 -- Not a credential - respWith2FailsL = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json" - respWoEtv = "responses/102-103-etv-stv1.2.3-getZkProof_GL_RndBeacon.json" - respWoStv = "responses/102-103-etv0.2.3-stv-getZkProof_GL_RndBeacon.json" - ) - - // The rename in case it is deferred to the large prover - var ( - toLargeM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.failure.code_137" - toLargeWithFailM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.failure.code_137" - toLargeWith2FailsM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.failure.code_137" - toLargeWoEtv = "requests/102-103-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.failure.code_137" - toLargeWoStv = "requests/102-103-etv0.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.failure.code_137" - ) - - // The rename in case it is a success - var ( - successM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.success" - successMWoStv = "requests-done/102-103-etv0.2.3-getZkProof_Bootstrap_GLSubmodule.json.success" - successtWoEtv = "requests-done/102-103-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.success" - successL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.success" - successWithFailM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.success" - successWithFailL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.success" - successWith2FailsM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.success" - successWith2FailsL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.success" - ) - - // The rename in case it is a panic (code = 2) - var ( - failM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.failure.code_2" - failMWoStv = "requests-done/102-103-etv0.2.3-getZkProof_Bootstrap_GLSubmodule.json.failure.code_2" - failtWoEtv = "requests-done/102-103-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.failure.code_2" - failL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.failure.code_2" - failWithFailM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.failure.code_2" - failWithFailL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.failure.code_2" - failWith2FailsM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.failure.code_2" - failWith2FailsL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.failure.code_2" - ) - - testCases := []inpFileNamesCases{ - { - Ext: "", Fail: "code", ShouldMatch: true, - Fnames: []string{correctM, correctWithFailM, correctWith2FailsM, missingEtv, missingStv}, - Explainer: "happy path, case M", - ExpectedOutput: []string{respM, respWithFailM, respWith2FailsM, respWoEtv, respWoStv}, - ExpToLarge: []string{toLargeM, toLargeWithFailM, toLargeWith2FailsM, toLargeWoEtv, toLargeWoStv}, - ExpSuccess: []string{successM, successWithFailM, successWith2FailsM, successtWoEtv, successMWoStv}, - ExpFailW2: []string{failM, failWithFailM, failWith2FailsM, failtWoEtv, failMWoStv}, - }, - { - Ext: "large", Fail: "code", ShouldMatch: true, - Fnames: []string{correctL, correctWithFailL, correctWith2FailsL}, - Explainer: "happy path, case L", - ExpectedOutput: []string{respL, respWithFailL, respWith2FailsL}, - ExpSuccess: []string{successL, successWithFailL, successWith2FailsL}, - ExpFailW2: []string{failL, failWithFailL, failWith2FailsL}, - }, - { - Ext: "", Fail: "code", ShouldMatch: false, - Fnames: []string{correctL, correctWithFailL, correctWith2FailsL}, - Explainer: "M does not pick the files reserved for L", - }, - { - Ext: "large", Fail: "code", ShouldMatch: false, - Fnames: []string{correctM, correctWithFailM, correctWith2FailsM}, - Explainer: "L does not pick the files reserved for M", - }, - { - Ext: "", Fail: "code", ShouldMatch: false, - Fnames: []string{notAPoint, badName}, - Explainer: "M does not pick obviously invalid files", - }, - { - Ext: "large", Fail: "code", ShouldMatch: false, - Fnames: []string{missingEtv, missingStv, notAPoint, badName}, - Explainer: "L does not pick obviously invalid files", - }, - } - - for _, c := range testCases { - conf := config.Config{} - conf.Version = "0.1.2" - conf.GLExecution.CanRunFullLarge = c.Ext == "large" - - def, err := GLRndBeaconDefinition(&conf) - assert.NoError(t, err) - - t.Run(c.Explainer, func(t *testing.T) { - runInpFileTestCase(t, def, c) - }) - } -} - -func TestGLInFileRegexp(t *testing.T) { - - var ( - correctM = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json" - correctL = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large" - correctWithFailM = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.failure.code_77" - correctWithFailL = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.failure.code_77" - correctWith2FailsM = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.failure.code_77.failure.code_77" - correctWith2FailsL = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.failure.code_77.failure.code_77" - missingEtv = "102-103-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json" - missingStv = "102-103-etv0.2.3-getZkProof_Bootstrap_GLSubmodule.json" - notAPoint = "102-103-etv0.2.3-getZkProof_Bootstrap_SubmoduleAjson" - badName = "102-103-etv0.2.3-stv1.2.3-getAggregatedProof.json" - ) - - // The responses in case of success - var ( - respM = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json" - respL = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json" - // #nosec G101 -- Not a credential - respWithFailM = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json" - // #nosec G101 -- Not a credential - respWithFailL = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json" - // #nosec G101 -- Not a credential - respWith2FailsM = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json" - // #nosec G101 -- Not a credential - respWith2FailsL = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json" - respWoEtv = "responses/102-103-etv-stv1.2.3-getZkProof_GL.json" - respWoStv = "responses/102-103-etv0.2.3-stv-getZkProof_GL.json" - ) - - // The rename in case it is deferred to the large prover - var ( - toLargeM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.failure.code_137" - toLargeWithFailM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.failure.code_137" - toLargeWith2FailsM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.failure.code_137" - toLargeWoEtv = "requests/102-103-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.failure.code_137" - toLargeWoStv = "requests/102-103-etv0.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.failure.code_137" - ) - - // The rename in case it is a success - var ( - successM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.success" - successMWoStv = "requests-done/102-103-etv0.2.3-getZkProof_Bootstrap_GLSubmodule.json.success" - successtWoEtv = "requests-done/102-103-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.success" - successL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.success" - successWithFailM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.success" - successWithFailL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.success" - successWith2FailsM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.success" - successWith2FailsL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.success" - ) - - // The rename in case it is a panic (code = 2) - var ( - failM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.failure.code_2" - failMWoStv = "requests-done/102-103-etv0.2.3-getZkProof_Bootstrap_GLSubmodule.json.failure.code_2" - failtWoEtv = "requests-done/102-103-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.failure.code_2" - failL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.failure.code_2" - failWithFailM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.failure.code_2" - failWithFailL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.failure.code_2" - failWith2FailsM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.failure.code_2" - failWith2FailsL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.failure.code_2" - ) - - testCases := []inpFileNamesCases{ - { - Ext: "", Fail: "code", ShouldMatch: true, - Fnames: []string{correctM, correctWithFailM, correctWith2FailsM, missingEtv, missingStv}, - Explainer: "happy path, case M", - ExpectedOutput: []string{respM, respWithFailM, respWith2FailsM, respWoEtv, respWoStv}, - ExpToLarge: []string{toLargeM, toLargeWithFailM, toLargeWith2FailsM, toLargeWoEtv, toLargeWoStv}, - ExpSuccess: []string{successM, successWithFailM, successWith2FailsM, successtWoEtv, successMWoStv}, - ExpFailW2: []string{failM, failWithFailM, failWith2FailsM, failtWoEtv, failMWoStv}, - }, - { - Ext: "large", Fail: "code", ShouldMatch: true, - Fnames: []string{correctL, correctWithFailL, correctWith2FailsL}, - Explainer: "happy path, case L", - ExpectedOutput: []string{respL, respWithFailL, respWith2FailsL}, - ExpSuccess: []string{successL, successWithFailL, successWith2FailsL}, - ExpFailW2: []string{failL, failWithFailL, failWith2FailsL}, - }, - { - Ext: "", Fail: "code", ShouldMatch: false, - Fnames: []string{correctL, correctWithFailL, correctWith2FailsL}, - Explainer: "M does not pick the files reserved for L", - }, - { - Ext: "large", Fail: "code", ShouldMatch: false, - Fnames: []string{correctM, correctWithFailM, correctWith2FailsM}, - Explainer: "L does not pick the files reserved for M", - }, - { - Ext: "", Fail: "code", ShouldMatch: false, - Fnames: []string{notAPoint, badName}, - Explainer: "M does not pick obviously invalid files", - }, - { - Ext: "large", Fail: "code", ShouldMatch: false, - Fnames: []string{missingEtv, missingStv, notAPoint, badName}, - Explainer: "L does not pick obviously invalid files", - }, - } - - for _, c := range testCases { - conf := config.Config{} - conf.Version = "0.1.2" - conf.GLExecution.CanRunFullLarge = c.Ext == "large" - - def, err := GLDefinition(&conf) - assert.NoError(t, err) - - t.Run(c.Explainer, func(t *testing.T) { - runInpFileTestCase(t, def, c) - }) - } -} - -func TestBootstrapRndBeaconInFileRegexp(t *testing.T) { - - var ( - correctM = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json" - correctL = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large" - correctWithFailM = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.failure.code_77" - correctWithFailL = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_77" - correctWith2FailsM = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.failure.code_77.failure.code_77" - correctWith2FailsL = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_77.failure.code_77" - missingEtv = "102-103-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json" - missingStv = "102-103-etv0.2.3-getZkProof_Bootstrap_DistMetadata.json" - notAPoint = "102-103-etv0.2.3-getZkProof_Bootstrap_DistMetadataAjson" - badName = "102-103-etv0.2.3-stv1.2.3-getAggregatedProof.json" - ) - - // The responses in case of success - var ( - respM = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json" - respL = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json" - // #nosec G101 -- Not a credential - respWithFailM = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json" - // #nosec G101 -- Not a credential - respWithFailL = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json" - // #nosec G101 -- Not a credential - respWith2FailsM = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json" - // #nosec G101 -- Not a credential - respWith2FailsL = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json" - respWoEtv = "responses/102-103-etv-stv1.2.3-getZkProof_RndBeacon.json" - respWoStv = "responses/102-103-etv0.2.3-stv-getZkProof_RndBeacon.json" - ) - - // The rename in case it is deferred to the large prover - var ( - toLargeM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_137" - toLargeWithFailM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_137" - toLargeWith2FailsM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_137" - toLargeWoEtv = "requests/102-103-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_137" - toLargeWoStv = "requests/102-103-etv0.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_137" - ) - - // The rename in case it is a success - var ( - successM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.success" - successMWoStv = "requests-done/102-103-etv0.2.3-getZkProof_Bootstrap_DistMetadata.json.success" - successtWoEtv = "requests-done/102-103-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.success" - successL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.success" - successWithFailM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.success" - successWithFailL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.success" - successWith2FailsM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.success" - successWith2FailsL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.success" - ) - - // The rename in case it is a panic (code = 2) - var ( - failM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.failure.code_2" - failMWoStv = "requests-done/102-103-etv0.2.3-getZkProof_Bootstrap_DistMetadata.json.failure.code_2" - failtWoEtv = "requests-done/102-103-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.failure.code_2" - failL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_2" - failWithFailM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.failure.code_2" - failWithFailL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_2" - failWith2FailsM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.failure.code_2" - failWith2FailsL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_2" - ) - - testCases := []inpFileNamesCases{ - { - Ext: "", Fail: "code", ShouldMatch: true, - Fnames: []string{correctM, correctWithFailM, correctWith2FailsM, missingEtv, missingStv}, - Explainer: "happy path, case M", - ExpectedOutput: []string{respM, respWithFailM, respWith2FailsM, respWoEtv, respWoStv}, - ExpToLarge: []string{toLargeM, toLargeWithFailM, toLargeWith2FailsM, toLargeWoEtv, toLargeWoStv}, - ExpSuccess: []string{successM, successWithFailM, successWith2FailsM, successtWoEtv, successMWoStv}, - ExpFailW2: []string{failM, failWithFailM, failWith2FailsM, failtWoEtv, failMWoStv}, - }, - { - Ext: "large", Fail: "code", ShouldMatch: true, - Fnames: []string{correctL, correctWithFailL, correctWith2FailsL}, - Explainer: "happy path, case L", - ExpectedOutput: []string{respL, respWithFailL, respWith2FailsL}, - ExpSuccess: []string{successL, successWithFailL, successWith2FailsL}, - ExpFailW2: []string{failL, failWithFailL, failWith2FailsL}, - }, - { - Ext: "", Fail: "code", ShouldMatch: false, - Fnames: []string{correctL, correctWithFailL, correctWith2FailsL}, - Explainer: "M does not pick the files reserved for L", - }, - { - Ext: "large", Fail: "code", ShouldMatch: false, - Fnames: []string{correctM, correctWithFailM, correctWith2FailsM}, - Explainer: "L does not pick the files reserved for M", - }, - { - Ext: "", Fail: "code", ShouldMatch: false, - Fnames: []string{notAPoint, badName}, - Explainer: "M does not pick obviously invalid files", - }, - { - Ext: "large", Fail: "code", ShouldMatch: false, - Fnames: []string{missingEtv, missingStv, notAPoint, badName}, - Explainer: "L does not pick obviously invalid files", - }, - } - - for _, c := range testCases { - conf := config.Config{} - conf.Version = "0.1.2" - conf.RndBeacon.CanRunFullLarge = c.Ext == "large" - - def, err := BootstrapRndBeaconDefinition(&conf) - assert.NoError(t, err) - - t.Run(c.Explainer, func(t *testing.T) { - runInpFileTestCase(t, def, c) - }) - } -} - -func TestRndBeaconLPPInFileRegexp(t *testing.T) { - - var ( - correctM = "102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json" - correctL = "102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.large" - correctWithFailM = "102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.failure.code_77" - correctWithFailL = "102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.large.failure.code_77" - correctWith2FailsM = "102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.failure.code_77.failure.code_77" - correctWith2FailsL = "102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.large.failure.code_77.failure.code_77" - missingEtv = "102-103-stv1.2.3-getZkProof_GL_RndBeacon.json" - missingStv = "102-103-etv0.2.3-getZkProof_GL_RndBeacon.json" - notAPoint = "102-103-etv0.2.3-getZkProof_GL_RndBeaconAjson" - badName = "102-103-etv0.2.3-stv1.2.3-getAggregatedProof.json" - ) - - // The responses in case of success - var ( - respM = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json" - respL = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json" - // #nosec G101 -- Not a credential - respWithFailM = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json" - // #nosec G101 -- Not a credential - respWithFailL = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json" - // #nosec G101 -- Not a credential - respWith2FailsM = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json" - // #nosec G101 -- Not a credential - respWith2FailsL = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json" - respWoEtv = "responses/102-103-etv-stv1.2.3-getZkProof_RndBeacon.json" - respWoStv = "responses/102-103-etv0.2.3-stv-getZkProof_RndBeacon.json" - ) - - // The rename in case it is deferred to the large prover - var ( - toLargeM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.large.failure.code_137" - toLargeWithFailM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.large.failure.code_137" - toLargeWith2FailsM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.large.failure.code_137" - toLargeWoEtv = "requests/102-103-stv1.2.3-getZkProof_GL_RndBeacon.json.large.failure.code_137" - toLargeWoStv = "requests/102-103-etv0.2.3-getZkProof_GL_RndBeacon.json.large.failure.code_137" - ) - - // The rename in case it is a success - var ( - successM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.success" - successMWoStv = "requests-done/102-103-etv0.2.3-getZkProof_GL_RndBeacon.json.success" - successtWoEtv = "requests-done/102-103-stv1.2.3-getZkProof_GL_RndBeacon.json.success" - successL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.large.success" - successWithFailM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.success" - successWithFailL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.large.success" - successWith2FailsM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.success" - successWith2FailsL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.large.success" - ) - - // The rename in case it is a panic (code = 2) - var ( - failM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.failure.code_2" - failMWoStv = "requests-done/102-103-etv0.2.3-getZkProof_GL_RndBeacon.json.failure.code_2" - failtWoEtv = "requests-done/102-103-stv1.2.3-getZkProof_GL_RndBeacon.json.failure.code_2" - failL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.large.failure.code_2" - failWithFailM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.failure.code_2" - failWithFailL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.large.failure.code_2" - failWith2FailsM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.failure.code_2" - failWith2FailsL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.large.failure.code_2" - ) - - testCases := []inpFileNamesCases{ - { - Ext: "", Fail: "code", ShouldMatch: true, - Fnames: []string{correctM, correctWithFailM, correctWith2FailsM, missingEtv, missingStv}, - Explainer: "happy path, case M", - ExpectedOutput: []string{respM, respWithFailM, respWith2FailsM, respWoEtv, respWoStv}, - ExpToLarge: []string{toLargeM, toLargeWithFailM, toLargeWith2FailsM, toLargeWoEtv, toLargeWoStv}, - ExpSuccess: []string{successM, successWithFailM, successWith2FailsM, successtWoEtv, successMWoStv}, - ExpFailW2: []string{failM, failWithFailM, failWith2FailsM, failtWoEtv, failMWoStv}, - }, - { - Ext: "large", Fail: "code", ShouldMatch: true, - Fnames: []string{correctL, correctWithFailL, correctWith2FailsL}, - Explainer: "happy path, case L", - ExpectedOutput: []string{respL, respWithFailL, respWith2FailsL}, - ExpSuccess: []string{successL, successWithFailL, successWith2FailsL}, - ExpFailW2: []string{failL, failWithFailL, failWith2FailsL}, - }, - { - Ext: "", Fail: "code", ShouldMatch: false, - Fnames: []string{correctL, correctWithFailL, correctWith2FailsL}, - Explainer: "M does not pick the files reserved for L", - }, - { - Ext: "large", Fail: "code", ShouldMatch: false, - Fnames: []string{correctM, correctWithFailM, correctWith2FailsM}, - Explainer: "L does not pick the files reserved for M", - }, - { - Ext: "", Fail: "code", ShouldMatch: false, - Fnames: []string{notAPoint, badName}, - Explainer: "M does not pick obviously invalid files", - }, - { - Ext: "large", Fail: "code", ShouldMatch: false, - Fnames: []string{missingEtv, missingStv, notAPoint, badName}, - Explainer: "L does not pick obviously invalid files", - }, - } - - for _, c := range testCases { - conf := config.Config{} - conf.Version = "0.1.2" - conf.RndBeacon.CanRunFullLarge = c.Ext == "large" - - def, err := RndBeaconLPPDefinition(&conf) - assert.NoError(t, err) - - t.Run(c.Explainer, func(t *testing.T) { - runInpFileTestCase(t, def, c) - }) - } -} - -func TestLPPInFileRegexp(t *testing.T) { - - var ( - correctM = "102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json" - correctL = "102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json.large" - correctWithFailM = "102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json.failure.code_77" - correctWithFailL = "102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json.large.failure.code_77" - correctWith2FailsM = "102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json.failure.code_77.failure.code_77" - correctWith2FailsL = "102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json.large.failure.code_77.failure.code_77" - missingEtv = "102-103-stv1.2.3-getZkProof_RndBeacon.json" - missingStv = "102-103-etv0.2.3-getZkProof_RndBeacon.json" - notAPoint = "102-103-etv0.2.3-getZkProof_RndBeaconAjson" - badName = "102-103-etv0.2.3-stv1.2.3-getAggregatedProof.json" - ) - - // The responses in case of success - var ( - respM = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json" - respL = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json" - // #nosec G101 -- Not a credential - respWithFailM = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json" - // #nosec G101 -- Not a credential - respWithFailL = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json" - // #nosec G101 -- Not a credential - respWith2FailsM = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json" - // #nosec G101 -- Not a credential - respWith2FailsL = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json" - respWoEtv = "responses/102-103-etv-stv1.2.3-getZkProof_LPP.json" - respWoStv = "responses/102-103-etv0.2.3-stv-getZkProof_LPP.json" - ) - - // The rename in case it is deferred to the large prover - var ( - toLargeM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json.large.failure.code_137" - toLargeWithFailM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json.large.failure.code_137" - toLargeWith2FailsM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json.large.failure.code_137" - toLargeWoEtv = "requests/102-103-stv1.2.3-getZkProof_RndBeacon.json.large.failure.code_137" - toLargeWoStv = "requests/102-103-etv0.2.3-getZkProof_RndBeacon.json.large.failure.code_137" - ) - - // The rename in case it is a success - var ( - successM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json.success" - successMWoStv = "requests-done/102-103-etv0.2.3-getZkProof_RndBeacon.json.success" - successtWoEtv = "requests-done/102-103-stv1.2.3-getZkProof_RndBeacon.json.success" - successL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json.large.success" - successWithFailM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json.success" - successWithFailL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json.large.success" - successWith2FailsM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json.success" - successWith2FailsL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json.large.success" - ) - - // The rename in case it is a panic (code = 2) - var ( - failM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json.failure.code_2" - failMWoStv = "requests-done/102-103-etv0.2.3-getZkProof_RndBeacon.json.failure.code_2" - failtWoEtv = "requests-done/102-103-stv1.2.3-getZkProof_RndBeacon.json.failure.code_2" - failL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json.large.failure.code_2" - failWithFailM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json.failure.code_2" - failWithFailL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json.large.failure.code_2" - failWith2FailsM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json.failure.code_2" - failWith2FailsL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json.large.failure.code_2" - ) - - testCases := []inpFileNamesCases{ - { - Ext: "", Fail: "code", ShouldMatch: true, - Fnames: []string{correctM, correctWithFailM, correctWith2FailsM, missingEtv, missingStv}, - Explainer: "happy path, case M", - ExpectedOutput: []string{respM, respWithFailM, respWith2FailsM, respWoEtv, respWoStv}, - ExpToLarge: []string{toLargeM, toLargeWithFailM, toLargeWith2FailsM, toLargeWoEtv, toLargeWoStv}, - ExpSuccess: []string{successM, successWithFailM, successWith2FailsM, successtWoEtv, successMWoStv}, - ExpFailW2: []string{failM, failWithFailM, failWith2FailsM, failtWoEtv, failMWoStv}, - }, - { - Ext: "large", Fail: "code", ShouldMatch: true, - Fnames: []string{correctL, correctWithFailL, correctWith2FailsL}, - Explainer: "happy path, case L", - ExpectedOutput: []string{respL, respWithFailL, respWith2FailsL}, - ExpSuccess: []string{successL, successWithFailL, successWith2FailsL}, - ExpFailW2: []string{failL, failWithFailL, failWith2FailsL}, - }, - { - Ext: "", Fail: "code", ShouldMatch: false, - Fnames: []string{correctL, correctWithFailL, correctWith2FailsL}, - Explainer: "M does not pick the files reserved for L", - }, - { - Ext: "large", Fail: "code", ShouldMatch: false, - Fnames: []string{correctM, correctWithFailM, correctWith2FailsM}, - Explainer: "L does not pick the files reserved for M", - }, - { - Ext: "", Fail: "code", ShouldMatch: false, - Fnames: []string{notAPoint, badName}, - Explainer: "M does not pick obviously invalid files", - }, - { - Ext: "large", Fail: "code", ShouldMatch: false, - Fnames: []string{missingEtv, missingStv, notAPoint, badName}, - Explainer: "L does not pick obviously invalid files", - }, - } - - for _, c := range testCases { - conf := config.Config{} - conf.Version = "0.1.2" - conf.LPPExecution.CanRunFullLarge = c.Ext == "large" - - def, err := LPPDefinition(&conf) - assert.NoError(t, err) - - t.Run(c.Explainer, func(t *testing.T) { - runInpFileTestCase(t, def, c) - }) - } -} - -func TestConglomerateGLInFileRegexp(t *testing.T) { - - var ( - correctM = "102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json" - correctL = "102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json.large" - correctWithFailM = "102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json.failure.code_77" - correctWithFailL = "102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json.large.failure.code_77" - correctWith2FailsM = "102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json.failure.code_77.failure.code_77" - correctWith2FailsL = "102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json.large.failure.code_77.failure.code_77" - missingEtv = "102-103-stv1.2.3-getZkProof_GL.json" - missingStv = "102-103-etv0.2.3-getZkProof_GL.json" - notAPoint = "102-103-etv0.2.3-getZkProof_GLAjson" - badName = "102-103-etv0.2.3-stv1.2.3-getAggregatedProof.json" - ) - - // The responses in case of success - var ( - respM = "responses/102-103-getZkProof.json" - respL = "responses/102-103-getZkProof.json" - // #nosec G101 -- Not a credential - respWithFailM = "responses/102-103-getZkProof.json" - // #nosec G101 -- Not a credential - respWithFailL = "responses/102-103-getZkProof.json" - // #nosec G101 -- Not a credential - respWith2FailsM = "responses/102-103-getZkProof.json" - // #nosec G101 -- Not a credential - respWith2FailsL = "responses/102-103-getZkProof.json" - ) - - // The rename in case it is deferred to the large prover - var ( - toLargeM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json.large.failure.code_137" - toLargeWithFailM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json.large.failure.code_137" - toLargeWith2FailsM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json.large.failure.code_137" - toLargeWoEtv = "requests/102-103-stv1.2.3-getZkProof_GL.json.large.failure.code_137" - toLargeWoStv = "requests/102-103-etv0.2.3-getZkProof_GL.json.large.failure.code_137" - ) - - // The rename in case it is a success - var ( - successM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json.success" - successMWoStv = "requests-done/102-103-etv0.2.3-getZkProof_GL.json.success" - successtWoEtv = "requests-done/102-103-stv1.2.3-getZkProof_GL.json.success" - successL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json.large.success" - successWithFailM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json.success" - successWithFailL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json.large.success" - successWith2FailsM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json.success" - successWith2FailsL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json.large.success" - ) - - // The rename in case it is a panic (code = 2) - var ( - failM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json.failure.code_2" - failMWoStv = "requests-done/102-103-etv0.2.3-getZkProof_GL.json.failure.code_2" - failtWoEtv = "requests-done/102-103-stv1.2.3-getZkProof_GL.json.failure.code_2" - failL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json.large.failure.code_2" - failWithFailM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json.failure.code_2" - failWithFailL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json.large.failure.code_2" - failWith2FailsM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json.failure.code_2" - failWith2FailsL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json.large.failure.code_2" - ) - - testCases := []inpFileNamesCases{ - { - Ext: "", Fail: "code", ShouldMatch: true, - Fnames: []string{correctM, correctWithFailM, correctWith2FailsM, missingEtv, missingStv}, - Explainer: "happy path, case M", - ExpectedOutput: []string{respM, respWithFailM, respWith2FailsM, respM, respM}, - ExpToLarge: []string{toLargeM, toLargeWithFailM, toLargeWith2FailsM, toLargeWoEtv, toLargeWoStv}, - ExpSuccess: []string{successM, successWithFailM, successWith2FailsM, successtWoEtv, successMWoStv}, - ExpFailW2: []string{failM, failWithFailM, failWith2FailsM, failtWoEtv, failMWoStv}, - }, - { - Ext: "large", Fail: "code", ShouldMatch: true, - Fnames: []string{correctL, correctWithFailL, correctWith2FailsL}, - Explainer: "happy path, case L", - ExpectedOutput: []string{respL, respWithFailL, respWith2FailsL}, - ExpSuccess: []string{successL, successWithFailL, successWith2FailsL}, - ExpFailW2: []string{failL, failWithFailL, failWith2FailsL}, - }, - { - Ext: "", Fail: "code", ShouldMatch: false, - Fnames: []string{correctL, correctWithFailL, correctWith2FailsL}, - Explainer: "M does not pick the files reserved for L", - }, - { - Ext: "large", Fail: "code", ShouldMatch: false, - Fnames: []string{correctM, correctWithFailM, correctWith2FailsM}, - Explainer: "L does not pick the files reserved for M", - }, - { - Ext: "", Fail: "code", ShouldMatch: false, - Fnames: []string{notAPoint, badName}, - Explainer: "M does not pick obviously invalid files", - }, - { - Ext: "large", Fail: "code", ShouldMatch: false, - Fnames: []string{missingEtv, missingStv, notAPoint, badName}, - Explainer: "L does not pick obviously invalid files", - }, - } - - for _, c := range testCases { - conf := config.Config{} - conf.Version = "0.1.2" - conf.Conglomeration.CanRunFullLarge = c.Ext == "large" - - def, err := ConglomerateGLDefinition(&conf) - assert.NoError(t, err) - - t.Run(c.Explainer, func(t *testing.T) { - runInpFileTestCase(t, def, c) - }) - } -} - -func TestConglomerateDistMetadataInFileRegexp(t *testing.T) { - - var ( - correctM = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json" - correctL = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large" - correctWithFailM = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.failure.code_77" - correctWithFailL = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_77" - correctWith2FailsM = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.failure.code_77.failure.code_77" - correctWith2FailsL = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_77.failure.code_77" - missingEtv = "102-103-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json" - missingStv = "102-103-etv0.2.3-getZkProof_Bootstrap_DistMetadata.json" - notAPoint = "102-103-etv0.2.3-getZkProof_Bootstrap_DistMetadataAjson" - badName = "102-103-etv0.2.3-stv1.2.3-getAggregatedProof.json" - ) - - // The responses in case of success - var ( - respM = "responses/102-103-getZkProof.json" - respL = "responses/102-103-getZkProof.json" - // #nosec G101 -- Not a credential - respWithFailM = "responses/102-103-getZkProof.json" - // #nosec G101 -- Not a credential - respWithFailL = "responses/102-103-getZkProof.json" - // #nosec G101 -- Not a credential - respWith2FailsM = "responses/102-103-getZkProof.json" - // #nosec G101 -- Not a credential - respWith2FailsL = "responses/102-103-getZkProof.json" - ) - - // The rename in case it is deferred to the large prover - var ( - toLargeM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_137" - toLargeWithFailM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_137" - toLargeWith2FailsM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_137" - toLargeWoEtv = "requests/102-103-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_137" - toLargeWoStv = "requests/102-103-etv0.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_137" - ) - - // The rename in case it is a success - var ( - successM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.success" - successMWoStv = "requests-done/102-103-etv0.2.3-getZkProof_Bootstrap_DistMetadata.json.success" - successtWoEtv = "requests-done/102-103-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.success" - successL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.success" - successWithFailM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.success" - successWithFailL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.success" - successWith2FailsM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.success" - successWith2FailsL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.success" - ) - - // The rename in case it is a panic (code = 2) - var ( - failM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.failure.code_2" - failMWoStv = "requests-done/102-103-etv0.2.3-getZkProof_Bootstrap_DistMetadata.json.failure.code_2" - failtWoEtv = "requests-done/102-103-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.failure.code_2" - failL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_2" - failWithFailM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.failure.code_2" - failWithFailL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_2" - failWith2FailsM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.failure.code_2" - failWith2FailsL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_2" - ) - - testCases := []inpFileNamesCases{ - { - Ext: "", Fail: "code", ShouldMatch: true, - Fnames: []string{correctM, correctWithFailM, correctWith2FailsM, missingEtv, missingStv}, - Explainer: "happy path, case M", - ExpectedOutput: []string{respM, respWithFailM, respWith2FailsM, respM, respM}, - ExpToLarge: []string{toLargeM, toLargeWithFailM, toLargeWith2FailsM, toLargeWoEtv, toLargeWoStv}, - ExpSuccess: []string{successM, successWithFailM, successWith2FailsM, successtWoEtv, successMWoStv}, - ExpFailW2: []string{failM, failWithFailM, failWith2FailsM, failtWoEtv, failMWoStv}, - }, - { - Ext: "large", Fail: "code", ShouldMatch: true, - Fnames: []string{correctL, correctWithFailL, correctWith2FailsL}, - Explainer: "happy path, case L", - ExpectedOutput: []string{respL, respWithFailL, respWith2FailsL}, - ExpSuccess: []string{successL, successWithFailL, successWith2FailsL}, - ExpFailW2: []string{failL, failWithFailL, failWith2FailsL}, - }, - { - Ext: "", Fail: "code", ShouldMatch: false, - Fnames: []string{correctL, correctWithFailL, correctWith2FailsL}, - Explainer: "M does not pick the files reserved for L", - }, - { - Ext: "large", Fail: "code", ShouldMatch: false, - Fnames: []string{correctM, correctWithFailM, correctWith2FailsM}, - Explainer: "L does not pick the files reserved for M", - }, - { - Ext: "", Fail: "code", ShouldMatch: false, - Fnames: []string{notAPoint, badName}, - Explainer: "M does not pick obviously invalid files", - }, - { - Ext: "large", Fail: "code", ShouldMatch: false, - Fnames: []string{missingEtv, missingStv, notAPoint, badName}, - Explainer: "L does not pick obviously invalid files", - }, - } - - for _, c := range testCases { - conf := config.Config{} - conf.Version = "0.1.2" - conf.Conglomeration.CanRunFullLarge = c.Ext == "large" - - def, err := ConglomerateDistMetadataDefinition(&conf) - assert.NoError(t, err) - - t.Run(c.Explainer, func(t *testing.T) { - runInpFileTestCase(t, def, c) - }) - } -} - -func TestConglomerateLPPInFileRegexp(t *testing.T) { - - var ( - correctM = "102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json" - correctL = "102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json.large" - correctWithFailM = "102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json.failure.code_77" - correctWithFailL = "102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json.large.failure.code_77" - correctWith2FailsM = "102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json.failure.code_77.failure.code_77" - correctWith2FailsL = "102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json.large.failure.code_77.failure.code_77" - missingEtv = "102-103-stv1.2.3-getZkProof_LPP.json" - missingStv = "102-103-etv0.2.3-getZkProof_LPP.json" - notAPoint = "102-103-etv0.2.3-getZkProof_LPPAjson" - badName = "102-103-etv0.2.3-stv1.2.3-getAggregatedProof.json" - ) - - // The responses in case of success - var ( - respM = "responses/102-103-getZkProof.json" - respL = "responses/102-103-getZkProof.json" - // #nosec G101 -- Not a credential - respWithFailM = "responses/102-103-getZkProof.json" - // #nosec G101 -- Not a credential - respWithFailL = "responses/102-103-getZkProof.json" - // #nosec G101 -- Not a credential - respWith2FailsM = "responses/102-103-getZkProof.json" - // #nosec G101 -- Not a credential - respWith2FailsL = "responses/102-103-getZkProof.json" - ) - - // The rename in case it is deferred to the large prover - var ( - toLargeM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json.large.failure.code_137" - toLargeWithFailM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json.large.failure.code_137" - toLargeWith2FailsM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json.large.failure.code_137" - toLargeWoEtv = "requests/102-103-stv1.2.3-getZkProof_LPP.json.large.failure.code_137" - toLargeWoStv = "requests/102-103-etv0.2.3-getZkProof_LPP.json.large.failure.code_137" - ) - - // The rename in case it is a success - var ( - successM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json.success" - successMWoStv = "requests-done/102-103-etv0.2.3-getZkProof_LPP.json.success" - successtWoEtv = "requests-done/102-103-stv1.2.3-getZkProof_LPP.json.success" - successL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json.large.success" - successWithFailM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json.success" - successWithFailL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json.large.success" - successWith2FailsM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json.success" - successWith2FailsL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json.large.success" - ) - - // The rename in case it is a panic (code = 2) - var ( - failM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json.failure.code_2" - failMWoStv = "requests-done/102-103-etv0.2.3-getZkProof_LPP.json.failure.code_2" - failtWoEtv = "requests-done/102-103-stv1.2.3-getZkProof_LPP.json.failure.code_2" - failL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json.large.failure.code_2" - failWithFailM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json.failure.code_2" - failWithFailL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json.large.failure.code_2" - failWith2FailsM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json.failure.code_2" - failWith2FailsL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json.large.failure.code_2" - ) - - testCases := []inpFileNamesCases{ - { - Ext: "", Fail: "code", ShouldMatch: true, - Fnames: []string{correctM, correctWithFailM, correctWith2FailsM, missingEtv, missingStv}, - Explainer: "happy path, case M", - ExpectedOutput: []string{respM, respWithFailM, respWith2FailsM, respM, respM}, - ExpToLarge: []string{toLargeM, toLargeWithFailM, toLargeWith2FailsM, toLargeWoEtv, toLargeWoStv}, - ExpSuccess: []string{successM, successWithFailM, successWith2FailsM, successtWoEtv, successMWoStv}, - ExpFailW2: []string{failM, failWithFailM, failWith2FailsM, failtWoEtv, failMWoStv}, - }, - { - Ext: "large", Fail: "code", ShouldMatch: true, - Fnames: []string{correctL, correctWithFailL, correctWith2FailsL}, - Explainer: "happy path, case L", - ExpectedOutput: []string{respL, respWithFailL, respWith2FailsL}, - ExpSuccess: []string{successL, successWithFailL, successWith2FailsL}, - ExpFailW2: []string{failL, failWithFailL, failWith2FailsL}, - }, - { - Ext: "", Fail: "code", ShouldMatch: false, - Fnames: []string{correctL, correctWithFailL, correctWith2FailsL}, - Explainer: "M does not pick the files reserved for L", - }, - { - Ext: "large", Fail: "code", ShouldMatch: false, - Fnames: []string{correctM, correctWithFailM, correctWith2FailsM}, - Explainer: "L does not pick the files reserved for M", - }, - { - Ext: "", Fail: "code", ShouldMatch: false, - Fnames: []string{notAPoint, badName}, - Explainer: "M does not pick obviously invalid files", - }, - { - Ext: "large", Fail: "code", ShouldMatch: false, - Fnames: []string{missingEtv, missingStv, notAPoint, badName}, - Explainer: "L does not pick obviously invalid files", - }, - } - - for _, c := range testCases { - conf := config.Config{} - conf.Version = "0.1.2" - conf.Conglomeration.CanRunFullLarge = c.Ext == "large" - - def, err := ConglomerateLPPDefinition(&conf) - assert.NoError(t, err) - - t.Run(c.Explainer, func(t *testing.T) { - runInpFileTestCase(t, def, c) - }) - } -} +// import ( +// "testing" + +// "github.com/consensys/linea-monorepo/prover/config" +// "github.com/stretchr/testify/assert" +// ) + +// // This tests ensures that the naming convention is respected by the file-watcher +// // i.e., files with the right naming only are recognized. And the corresponding +// // output files are also recognized. +// func TestBootstrapSubModInFileRegexp(t *testing.T) { + +// var ( +// correctM = "102-103-etv0.2.3-stv1.2.3-getZkProof.json" +// correctL = "102-103-etv0.2.3-stv1.2.3-getZkProof.json.large" +// correctWithFailM = "102-103-etv0.2.3-stv1.2.3-getZkProof.json.failure.code_77" +// correctWithFailL = "102-103-etv0.2.3-stv1.2.3-getZkProof.json.large.failure.code_77" +// correctWith2FailsM = "102-103-etv0.2.3-stv1.2.3-getZkProof.json.failure.code_77.failure.code_77" +// correctWith2FailsL = "102-103-etv0.2.3-stv1.2.3-getZkProof.json.large.failure.code_77.failure.code_77" +// missingEtv = "102-103-stv1.2.3-getZkProof.json" +// missingStv = "102-103-etv0.2.3-getZkProof.json" +// notAPoint = "102-103-etv0.2.3-getZkProofAjson" +// badName = "102-103-etv0.2.3-stv1.2.3-getAggregatedProof.json" +// ) + +// // The responses in case of success +// var ( +// respM = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json" +// respL = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json" +// // #nosec G101 -- Not a credential +// respWithFailM = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json" +// // #nosec G101 -- Not a credential +// respWithFailL = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json" +// // #nosec G101 -- Not a credential +// respWith2FailsM = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json" +// // #nosec G101 -- Not a credential +// respWith2FailsL = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json" +// respWoEtv = "responses/102-103-etv-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json" +// respWoStv = "responses/102-103-etv0.2.3-stv-getZkProof_Bootstrap_GLSubmodule.json" +// ) + +// // The rename in case it is deferred to the large prover +// var ( +// toLargeM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof.json.large.failure.code_137" +// toLargeWithFailM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof.json.large.failure.code_137" +// toLargeWith2FailsM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof.json.large.failure.code_137" +// toLargeWoEtv = "requests/102-103-stv1.2.3-getZkProof.json.large.failure.code_137" +// toLargeWoStv = "requests/102-103-etv0.2.3-getZkProof.json.large.failure.code_137" +// ) + +// // The rename in case it is a success +// var ( +// successM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof.json.success" +// successMWoStv = "requests-done/102-103-etv0.2.3-getZkProof.json.success" +// successtWoEtv = "requests-done/102-103-stv1.2.3-getZkProof.json.success" +// successL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof.json.large.success" +// successWithFailM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof.json.success" +// successWithFailL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof.json.large.success" +// successWith2FailsM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof.json.success" +// successWith2FailsL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof.json.large.success" +// ) + +// // The rename in case it is a panic (code = 2) +// var ( +// failM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof.json.failure.code_2" +// failMWoStv = "requests-done/102-103-etv0.2.3-getZkProof.json.failure.code_2" +// failtWoEtv = "requests-done/102-103-stv1.2.3-getZkProof.json.failure.code_2" +// failL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof.json.large.failure.code_2" +// failWithFailM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof.json.failure.code_2" +// failWithFailL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof.json.large.failure.code_2" +// failWith2FailsM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof.json.failure.code_2" +// failWith2FailsL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof.json.large.failure.code_2" +// ) + +// testcase := []inpFileNamesCases{ +// { +// Ext: "", Fail: "code", ShouldMatch: true, +// Fnames: []string{correctM, correctWithFailM, correctWith2FailsM, missingEtv, missingStv}, +// Explainer: "happy path, case M", +// ExpectedOutput: []string{respM, respWithFailM, respWith2FailsM, respWoEtv, respWoStv}, +// ExpToLarge: []string{toLargeM, toLargeWithFailM, toLargeWith2FailsM, toLargeWoEtv, toLargeWoStv}, +// ExpSuccess: []string{successM, successWithFailM, successWith2FailsM, successtWoEtv, successMWoStv}, +// ExpFailW2: []string{failM, failWithFailM, failWith2FailsM, failtWoEtv, failMWoStv}, +// }, +// { +// Ext: "large", Fail: "code", ShouldMatch: true, +// Fnames: []string{correctL, correctWithFailL, correctWith2FailsL}, +// Explainer: "happy path, case L", +// ExpectedOutput: []string{respL, respWithFailL, respWith2FailsL}, +// ExpSuccess: []string{successL, successWithFailL, successWith2FailsL}, +// ExpFailW2: []string{failL, failWithFailL, failWith2FailsL}, +// }, +// { +// Ext: "", Fail: "code", ShouldMatch: false, +// Fnames: []string{correctL, correctWithFailL, correctWith2FailsL}, +// Explainer: "M does not pick the files reserved for L", +// }, +// { +// Ext: "large", Fail: "code", ShouldMatch: false, +// Fnames: []string{correctM, correctWithFailM, correctWith2FailsM}, +// Explainer: "L does not pick the files reserved for M", +// }, +// { +// Ext: "", Fail: "code", ShouldMatch: false, +// Fnames: []string{notAPoint, badName}, +// Explainer: "M does not pick obviously invalid files", +// }, +// { +// Ext: "large", Fail: "code", ShouldMatch: false, +// Fnames: []string{missingEtv, missingStv, notAPoint, badName}, +// Explainer: "L does not pick obviously invalid files", +// }, +// } + +// for _, c := range testcase { +// conf := config.Config{} +// conf.Version = "0.1.2" +// conf.Bootstrap.CanRunFullLarge = c.Ext == "large" + +// def, err := BootstrapGLSubModDefinition(&conf) +// assert.NoError(t, err) + +// t.Run(c.Explainer, func(t *testing.T) { +// runInpFileTestCase(t, def, c) +// }) +// } +// } + +// // This tests ensures that the naming convention is respected by the file-watcher +// // i.e., files with the right naming only are recognized. And the corresponding +// // output files are also recognized. +// func TestBootstrapMetaDataInFileRegexp(t *testing.T) { + +// var ( +// correctM = "102-103-etv0.2.3-stv1.2.3-getZkProof.json" +// correctL = "102-103-etv0.2.3-stv1.2.3-getZkProof.json.large" +// correctWithFailM = "102-103-etv0.2.3-stv1.2.3-getZkProof.json.failure.code_77" +// correctWithFailL = "102-103-etv0.2.3-stv1.2.3-getZkProof.json.large.failure.code_77" +// correctWith2FailsM = "102-103-etv0.2.3-stv1.2.3-getZkProof.json.failure.code_77.failure.code_77" +// correctWith2FailsL = "102-103-etv0.2.3-stv1.2.3-getZkProof.json.large.failure.code_77.failure.code_77" +// missingEtv = "102-103-stv1.2.3-getZkProof.json" +// missingStv = "102-103-etv0.2.3-getZkProof.json" +// notAPoint = "102-103-etv0.2.3-getZkProofAjson" +// badName = "102-103-etv0.2.3-stv1.2.3-getAggregatedProof.json" +// ) + +// // The responses in case of success +// var ( +// respM = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json" +// respL = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json" +// // #nosec G101 -- Not a credential +// respWithFailM = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json" +// // #nosec G101 -- Not a credential +// respWithFailL = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json" +// // #nosec G101 -- Not a credential +// respWith2FailsM = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json" +// // #nosec G101 -- Not a credential +// respWith2FailsL = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json" +// respWoEtv = "responses/102-103-etv-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json" +// respWoStv = "responses/102-103-etv0.2.3-stv-getZkProof_Bootstrap_DistMetadata.json" +// ) + +// // The rename in case it is deferred to the large prover +// var ( +// toLargeM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof.json.large.failure.code_137" +// toLargeWithFailM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof.json.large.failure.code_137" +// toLargeWith2FailsM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof.json.large.failure.code_137" +// toLargeWoEtv = "requests/102-103-stv1.2.3-getZkProof.json.large.failure.code_137" +// toLargeWoStv = "requests/102-103-etv0.2.3-getZkProof.json.large.failure.code_137" +// ) + +// // The rename in case it is a success +// var ( +// successM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof.json.success" +// successMWoStv = "requests-done/102-103-etv0.2.3-getZkProof.json.success" +// successtWoEtv = "requests-done/102-103-stv1.2.3-getZkProof.json.success" +// successL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof.json.large.success" +// successWithFailM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof.json.success" +// successWithFailL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof.json.large.success" +// successWith2FailsM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof.json.success" +// successWith2FailsL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof.json.large.success" +// ) + +// // The rename in case it is a panic (code = 2) +// var ( +// failM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof.json.failure.code_2" +// failMWoStv = "requests-done/102-103-etv0.2.3-getZkProof.json.failure.code_2" +// failtWoEtv = "requests-done/102-103-stv1.2.3-getZkProof.json.failure.code_2" +// failL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof.json.large.failure.code_2" +// failWithFailM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof.json.failure.code_2" +// failWithFailL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof.json.large.failure.code_2" +// failWith2FailsM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof.json.failure.code_2" +// failWith2FailsL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof.json.large.failure.code_2" +// ) + +// testcase := []inpFileNamesCases{ +// { +// Ext: "", Fail: "code", ShouldMatch: true, +// Fnames: []string{correctM, correctWithFailM, correctWith2FailsM, missingEtv, missingStv}, +// Explainer: "happy path, case M", +// ExpectedOutput: []string{respM, respWithFailM, respWith2FailsM, respWoEtv, respWoStv}, +// ExpToLarge: []string{toLargeM, toLargeWithFailM, toLargeWith2FailsM, toLargeWoEtv, toLargeWoStv}, +// ExpSuccess: []string{successM, successWithFailM, successWith2FailsM, successtWoEtv, successMWoStv}, +// ExpFailW2: []string{failM, failWithFailM, failWith2FailsM, failtWoEtv, failMWoStv}, +// }, +// { +// Ext: "large", Fail: "code", ShouldMatch: true, +// Fnames: []string{correctL, correctWithFailL, correctWith2FailsL}, +// Explainer: "happy path, case L", +// ExpectedOutput: []string{respL, respWithFailL, respWith2FailsL}, +// ExpSuccess: []string{successL, successWithFailL, successWith2FailsL}, +// ExpFailW2: []string{failL, failWithFailL, failWith2FailsL}, +// }, +// { +// Ext: "", Fail: "code", ShouldMatch: false, +// Fnames: []string{correctL, correctWithFailL, correctWith2FailsL}, +// Explainer: "M does not pick the files reserved for L", +// }, +// { +// Ext: "large", Fail: "code", ShouldMatch: false, +// Fnames: []string{correctM, correctWithFailM, correctWith2FailsM}, +// Explainer: "L does not pick the files reserved for M", +// }, +// { +// Ext: "", Fail: "code", ShouldMatch: false, +// Fnames: []string{notAPoint, badName}, +// Explainer: "M does not pick obviously invalid files", +// }, +// { +// Ext: "large", Fail: "code", ShouldMatch: false, +// Fnames: []string{missingEtv, missingStv, notAPoint, badName}, +// Explainer: "L does not pick obviously invalid files", +// }, +// } + +// for _, c := range testcase { +// conf := config.Config{} +// conf.Version = "0.1.2" +// conf.Bootstrap.CanRunFullLarge = c.Ext == "large" + +// def, err := BootstrapDistMetadataDefinition(&conf) +// assert.NoError(t, err) + +// t.Run(c.Explainer, func(t *testing.T) { +// runInpFileTestCase(t, def, c) +// }) +// } +// } + +// func TestGLRndBeaconInFileRegexp(t *testing.T) { + +// var ( +// correctM = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json" +// correctL = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large" +// correctWithFailM = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.failure.code_77" +// correctWithFailL = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.failure.code_77" +// correctWith2FailsM = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.failure.code_77.failure.code_77" +// correctWith2FailsL = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.failure.code_77.failure.code_77" +// missingEtv = "102-103-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json" +// missingStv = "102-103-etv0.2.3-getZkProof_Bootstrap_GLSubmodule.json" +// notAPoint = "102-103-etv0.2.3-getZkProof_Bootstrap_SubmoduleAjson" +// badName = "102-103-etv0.2.3-stv1.2.3-getAggregatedProof.json" +// ) + +// // The responses in case of success +// var ( +// respM = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json" +// respL = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json" +// // #nosec G101 -- Not a credential +// respWithFailM = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json" +// // #nosec G101 -- Not a credential +// respWithFailL = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json" +// // #nosec G101 -- Not a credential +// respWith2FailsM = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json" +// // #nosec G101 -- Not a credential +// respWith2FailsL = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json" +// respWoEtv = "responses/102-103-etv-stv1.2.3-getZkProof_GL_RndBeacon.json" +// respWoStv = "responses/102-103-etv0.2.3-stv-getZkProof_GL_RndBeacon.json" +// ) + +// // The rename in case it is deferred to the large prover +// var ( +// toLargeM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.failure.code_137" +// toLargeWithFailM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.failure.code_137" +// toLargeWith2FailsM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.failure.code_137" +// toLargeWoEtv = "requests/102-103-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.failure.code_137" +// toLargeWoStv = "requests/102-103-etv0.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.failure.code_137" +// ) + +// // The rename in case it is a success +// var ( +// successM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.success" +// successMWoStv = "requests-done/102-103-etv0.2.3-getZkProof_Bootstrap_GLSubmodule.json.success" +// successtWoEtv = "requests-done/102-103-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.success" +// successL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.success" +// successWithFailM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.success" +// successWithFailL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.success" +// successWith2FailsM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.success" +// successWith2FailsL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.success" +// ) + +// // The rename in case it is a panic (code = 2) +// var ( +// failM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.failure.code_2" +// failMWoStv = "requests-done/102-103-etv0.2.3-getZkProof_Bootstrap_GLSubmodule.json.failure.code_2" +// failtWoEtv = "requests-done/102-103-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.failure.code_2" +// failL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.failure.code_2" +// failWithFailM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.failure.code_2" +// failWithFailL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.failure.code_2" +// failWith2FailsM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.failure.code_2" +// failWith2FailsL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.failure.code_2" +// ) + +// testCases := []inpFileNamesCases{ +// { +// Ext: "", Fail: "code", ShouldMatch: true, +// Fnames: []string{correctM, correctWithFailM, correctWith2FailsM, missingEtv, missingStv}, +// Explainer: "happy path, case M", +// ExpectedOutput: []string{respM, respWithFailM, respWith2FailsM, respWoEtv, respWoStv}, +// ExpToLarge: []string{toLargeM, toLargeWithFailM, toLargeWith2FailsM, toLargeWoEtv, toLargeWoStv}, +// ExpSuccess: []string{successM, successWithFailM, successWith2FailsM, successtWoEtv, successMWoStv}, +// ExpFailW2: []string{failM, failWithFailM, failWith2FailsM, failtWoEtv, failMWoStv}, +// }, +// { +// Ext: "large", Fail: "code", ShouldMatch: true, +// Fnames: []string{correctL, correctWithFailL, correctWith2FailsL}, +// Explainer: "happy path, case L", +// ExpectedOutput: []string{respL, respWithFailL, respWith2FailsL}, +// ExpSuccess: []string{successL, successWithFailL, successWith2FailsL}, +// ExpFailW2: []string{failL, failWithFailL, failWith2FailsL}, +// }, +// { +// Ext: "", Fail: "code", ShouldMatch: false, +// Fnames: []string{correctL, correctWithFailL, correctWith2FailsL}, +// Explainer: "M does not pick the files reserved for L", +// }, +// { +// Ext: "large", Fail: "code", ShouldMatch: false, +// Fnames: []string{correctM, correctWithFailM, correctWith2FailsM}, +// Explainer: "L does not pick the files reserved for M", +// }, +// { +// Ext: "", Fail: "code", ShouldMatch: false, +// Fnames: []string{notAPoint, badName}, +// Explainer: "M does not pick obviously invalid files", +// }, +// { +// Ext: "large", Fail: "code", ShouldMatch: false, +// Fnames: []string{missingEtv, missingStv, notAPoint, badName}, +// Explainer: "L does not pick obviously invalid files", +// }, +// } + +// for _, c := range testCases { +// conf := config.Config{} +// conf.Version = "0.1.2" +// conf.GLExecution.CanRunFullLarge = c.Ext == "large" + +// def, err := GLRndBeaconDefinition(&conf) +// assert.NoError(t, err) + +// t.Run(c.Explainer, func(t *testing.T) { +// runInpFileTestCase(t, def, c) +// }) +// } +// } + +// func TestGLInFileRegexp(t *testing.T) { + +// var ( +// correctM = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json" +// correctL = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large" +// correctWithFailM = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.failure.code_77" +// correctWithFailL = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.failure.code_77" +// correctWith2FailsM = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.failure.code_77.failure.code_77" +// correctWith2FailsL = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.failure.code_77.failure.code_77" +// missingEtv = "102-103-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json" +// missingStv = "102-103-etv0.2.3-getZkProof_Bootstrap_GLSubmodule.json" +// notAPoint = "102-103-etv0.2.3-getZkProof_Bootstrap_SubmoduleAjson" +// badName = "102-103-etv0.2.3-stv1.2.3-getAggregatedProof.json" +// ) + +// // The responses in case of success +// var ( +// respM = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json" +// respL = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json" +// // #nosec G101 -- Not a credential +// respWithFailM = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json" +// // #nosec G101 -- Not a credential +// respWithFailL = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json" +// // #nosec G101 -- Not a credential +// respWith2FailsM = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json" +// // #nosec G101 -- Not a credential +// respWith2FailsL = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json" +// respWoEtv = "responses/102-103-etv-stv1.2.3-getZkProof_GL.json" +// respWoStv = "responses/102-103-etv0.2.3-stv-getZkProof_GL.json" +// ) + +// // The rename in case it is deferred to the large prover +// var ( +// toLargeM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.failure.code_137" +// toLargeWithFailM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.failure.code_137" +// toLargeWith2FailsM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.failure.code_137" +// toLargeWoEtv = "requests/102-103-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.failure.code_137" +// toLargeWoStv = "requests/102-103-etv0.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.failure.code_137" +// ) + +// // The rename in case it is a success +// var ( +// successM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.success" +// successMWoStv = "requests-done/102-103-etv0.2.3-getZkProof_Bootstrap_GLSubmodule.json.success" +// successtWoEtv = "requests-done/102-103-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.success" +// successL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.success" +// successWithFailM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.success" +// successWithFailL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.success" +// successWith2FailsM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.success" +// successWith2FailsL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.success" +// ) + +// // The rename in case it is a panic (code = 2) +// var ( +// failM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.failure.code_2" +// failMWoStv = "requests-done/102-103-etv0.2.3-getZkProof_Bootstrap_GLSubmodule.json.failure.code_2" +// failtWoEtv = "requests-done/102-103-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.failure.code_2" +// failL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.failure.code_2" +// failWithFailM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.failure.code_2" +// failWithFailL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.failure.code_2" +// failWith2FailsM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.failure.code_2" +// failWith2FailsL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.failure.code_2" +// ) + +// testCases := []inpFileNamesCases{ +// { +// Ext: "", Fail: "code", ShouldMatch: true, +// Fnames: []string{correctM, correctWithFailM, correctWith2FailsM, missingEtv, missingStv}, +// Explainer: "happy path, case M", +// ExpectedOutput: []string{respM, respWithFailM, respWith2FailsM, respWoEtv, respWoStv}, +// ExpToLarge: []string{toLargeM, toLargeWithFailM, toLargeWith2FailsM, toLargeWoEtv, toLargeWoStv}, +// ExpSuccess: []string{successM, successWithFailM, successWith2FailsM, successtWoEtv, successMWoStv}, +// ExpFailW2: []string{failM, failWithFailM, failWith2FailsM, failtWoEtv, failMWoStv}, +// }, +// { +// Ext: "large", Fail: "code", ShouldMatch: true, +// Fnames: []string{correctL, correctWithFailL, correctWith2FailsL}, +// Explainer: "happy path, case L", +// ExpectedOutput: []string{respL, respWithFailL, respWith2FailsL}, +// ExpSuccess: []string{successL, successWithFailL, successWith2FailsL}, +// ExpFailW2: []string{failL, failWithFailL, failWith2FailsL}, +// }, +// { +// Ext: "", Fail: "code", ShouldMatch: false, +// Fnames: []string{correctL, correctWithFailL, correctWith2FailsL}, +// Explainer: "M does not pick the files reserved for L", +// }, +// { +// Ext: "large", Fail: "code", ShouldMatch: false, +// Fnames: []string{correctM, correctWithFailM, correctWith2FailsM}, +// Explainer: "L does not pick the files reserved for M", +// }, +// { +// Ext: "", Fail: "code", ShouldMatch: false, +// Fnames: []string{notAPoint, badName}, +// Explainer: "M does not pick obviously invalid files", +// }, +// { +// Ext: "large", Fail: "code", ShouldMatch: false, +// Fnames: []string{missingEtv, missingStv, notAPoint, badName}, +// Explainer: "L does not pick obviously invalid files", +// }, +// } + +// for _, c := range testCases { +// conf := config.Config{} +// conf.Version = "0.1.2" +// conf.GLExecution.CanRunFullLarge = c.Ext == "large" + +// def, err := GLDefinition(&conf) +// assert.NoError(t, err) + +// t.Run(c.Explainer, func(t *testing.T) { +// runInpFileTestCase(t, def, c) +// }) +// } +// } + +// func TestBootstrapRndBeaconInFileRegexp(t *testing.T) { + +// var ( +// correctM = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json" +// correctL = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large" +// correctWithFailM = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.failure.code_77" +// correctWithFailL = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_77" +// correctWith2FailsM = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.failure.code_77.failure.code_77" +// correctWith2FailsL = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_77.failure.code_77" +// missingEtv = "102-103-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json" +// missingStv = "102-103-etv0.2.3-getZkProof_Bootstrap_DistMetadata.json" +// notAPoint = "102-103-etv0.2.3-getZkProof_Bootstrap_DistMetadataAjson" +// badName = "102-103-etv0.2.3-stv1.2.3-getAggregatedProof.json" +// ) + +// // The responses in case of success +// var ( +// respM = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json" +// respL = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json" +// // #nosec G101 -- Not a credential +// respWithFailM = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json" +// // #nosec G101 -- Not a credential +// respWithFailL = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json" +// // #nosec G101 -- Not a credential +// respWith2FailsM = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json" +// // #nosec G101 -- Not a credential +// respWith2FailsL = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json" +// respWoEtv = "responses/102-103-etv-stv1.2.3-getZkProof_RndBeacon.json" +// respWoStv = "responses/102-103-etv0.2.3-stv-getZkProof_RndBeacon.json" +// ) + +// // The rename in case it is deferred to the large prover +// var ( +// toLargeM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_137" +// toLargeWithFailM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_137" +// toLargeWith2FailsM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_137" +// toLargeWoEtv = "requests/102-103-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_137" +// toLargeWoStv = "requests/102-103-etv0.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_137" +// ) + +// // The rename in case it is a success +// var ( +// successM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.success" +// successMWoStv = "requests-done/102-103-etv0.2.3-getZkProof_Bootstrap_DistMetadata.json.success" +// successtWoEtv = "requests-done/102-103-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.success" +// successL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.success" +// successWithFailM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.success" +// successWithFailL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.success" +// successWith2FailsM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.success" +// successWith2FailsL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.success" +// ) + +// // The rename in case it is a panic (code = 2) +// var ( +// failM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.failure.code_2" +// failMWoStv = "requests-done/102-103-etv0.2.3-getZkProof_Bootstrap_DistMetadata.json.failure.code_2" +// failtWoEtv = "requests-done/102-103-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.failure.code_2" +// failL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_2" +// failWithFailM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.failure.code_2" +// failWithFailL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_2" +// failWith2FailsM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.failure.code_2" +// failWith2FailsL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_2" +// ) + +// testCases := []inpFileNamesCases{ +// { +// Ext: "", Fail: "code", ShouldMatch: true, +// Fnames: []string{correctM, correctWithFailM, correctWith2FailsM, missingEtv, missingStv}, +// Explainer: "happy path, case M", +// ExpectedOutput: []string{respM, respWithFailM, respWith2FailsM, respWoEtv, respWoStv}, +// ExpToLarge: []string{toLargeM, toLargeWithFailM, toLargeWith2FailsM, toLargeWoEtv, toLargeWoStv}, +// ExpSuccess: []string{successM, successWithFailM, successWith2FailsM, successtWoEtv, successMWoStv}, +// ExpFailW2: []string{failM, failWithFailM, failWith2FailsM, failtWoEtv, failMWoStv}, +// }, +// { +// Ext: "large", Fail: "code", ShouldMatch: true, +// Fnames: []string{correctL, correctWithFailL, correctWith2FailsL}, +// Explainer: "happy path, case L", +// ExpectedOutput: []string{respL, respWithFailL, respWith2FailsL}, +// ExpSuccess: []string{successL, successWithFailL, successWith2FailsL}, +// ExpFailW2: []string{failL, failWithFailL, failWith2FailsL}, +// }, +// { +// Ext: "", Fail: "code", ShouldMatch: false, +// Fnames: []string{correctL, correctWithFailL, correctWith2FailsL}, +// Explainer: "M does not pick the files reserved for L", +// }, +// { +// Ext: "large", Fail: "code", ShouldMatch: false, +// Fnames: []string{correctM, correctWithFailM, correctWith2FailsM}, +// Explainer: "L does not pick the files reserved for M", +// }, +// { +// Ext: "", Fail: "code", ShouldMatch: false, +// Fnames: []string{notAPoint, badName}, +// Explainer: "M does not pick obviously invalid files", +// }, +// { +// Ext: "large", Fail: "code", ShouldMatch: false, +// Fnames: []string{missingEtv, missingStv, notAPoint, badName}, +// Explainer: "L does not pick obviously invalid files", +// }, +// } + +// for _, c := range testCases { +// conf := config.Config{} +// conf.Version = "0.1.2" +// conf.RndBeacon.CanRunFullLarge = c.Ext == "large" + +// def, err := BootstrapRndBeaconDefinition(&conf) +// assert.NoError(t, err) + +// t.Run(c.Explainer, func(t *testing.T) { +// runInpFileTestCase(t, def, c) +// }) +// } +// } + +// func TestRndBeaconLPPInFileRegexp(t *testing.T) { + +// var ( +// correctM = "102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json" +// correctL = "102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.large" +// correctWithFailM = "102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.failure.code_77" +// correctWithFailL = "102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.large.failure.code_77" +// correctWith2FailsM = "102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.failure.code_77.failure.code_77" +// correctWith2FailsL = "102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.large.failure.code_77.failure.code_77" +// missingEtv = "102-103-stv1.2.3-getZkProof_GL_RndBeacon.json" +// missingStv = "102-103-etv0.2.3-getZkProof_GL_RndBeacon.json" +// notAPoint = "102-103-etv0.2.3-getZkProof_GL_RndBeaconAjson" +// badName = "102-103-etv0.2.3-stv1.2.3-getAggregatedProof.json" +// ) + +// // The responses in case of success +// var ( +// respM = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json" +// respL = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json" +// // #nosec G101 -- Not a credential +// respWithFailM = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json" +// // #nosec G101 -- Not a credential +// respWithFailL = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json" +// // #nosec G101 -- Not a credential +// respWith2FailsM = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json" +// // #nosec G101 -- Not a credential +// respWith2FailsL = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json" +// respWoEtv = "responses/102-103-etv-stv1.2.3-getZkProof_RndBeacon.json" +// respWoStv = "responses/102-103-etv0.2.3-stv-getZkProof_RndBeacon.json" +// ) + +// // The rename in case it is deferred to the large prover +// var ( +// toLargeM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.large.failure.code_137" +// toLargeWithFailM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.large.failure.code_137" +// toLargeWith2FailsM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.large.failure.code_137" +// toLargeWoEtv = "requests/102-103-stv1.2.3-getZkProof_GL_RndBeacon.json.large.failure.code_137" +// toLargeWoStv = "requests/102-103-etv0.2.3-getZkProof_GL_RndBeacon.json.large.failure.code_137" +// ) + +// // The rename in case it is a success +// var ( +// successM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.success" +// successMWoStv = "requests-done/102-103-etv0.2.3-getZkProof_GL_RndBeacon.json.success" +// successtWoEtv = "requests-done/102-103-stv1.2.3-getZkProof_GL_RndBeacon.json.success" +// successL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.large.success" +// successWithFailM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.success" +// successWithFailL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.large.success" +// successWith2FailsM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.success" +// successWith2FailsL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.large.success" +// ) + +// // The rename in case it is a panic (code = 2) +// var ( +// failM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.failure.code_2" +// failMWoStv = "requests-done/102-103-etv0.2.3-getZkProof_GL_RndBeacon.json.failure.code_2" +// failtWoEtv = "requests-done/102-103-stv1.2.3-getZkProof_GL_RndBeacon.json.failure.code_2" +// failL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.large.failure.code_2" +// failWithFailM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.failure.code_2" +// failWithFailL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.large.failure.code_2" +// failWith2FailsM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.failure.code_2" +// failWith2FailsL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.large.failure.code_2" +// ) + +// testCases := []inpFileNamesCases{ +// { +// Ext: "", Fail: "code", ShouldMatch: true, +// Fnames: []string{correctM, correctWithFailM, correctWith2FailsM, missingEtv, missingStv}, +// Explainer: "happy path, case M", +// ExpectedOutput: []string{respM, respWithFailM, respWith2FailsM, respWoEtv, respWoStv}, +// ExpToLarge: []string{toLargeM, toLargeWithFailM, toLargeWith2FailsM, toLargeWoEtv, toLargeWoStv}, +// ExpSuccess: []string{successM, successWithFailM, successWith2FailsM, successtWoEtv, successMWoStv}, +// ExpFailW2: []string{failM, failWithFailM, failWith2FailsM, failtWoEtv, failMWoStv}, +// }, +// { +// Ext: "large", Fail: "code", ShouldMatch: true, +// Fnames: []string{correctL, correctWithFailL, correctWith2FailsL}, +// Explainer: "happy path, case L", +// ExpectedOutput: []string{respL, respWithFailL, respWith2FailsL}, +// ExpSuccess: []string{successL, successWithFailL, successWith2FailsL}, +// ExpFailW2: []string{failL, failWithFailL, failWith2FailsL}, +// }, +// { +// Ext: "", Fail: "code", ShouldMatch: false, +// Fnames: []string{correctL, correctWithFailL, correctWith2FailsL}, +// Explainer: "M does not pick the files reserved for L", +// }, +// { +// Ext: "large", Fail: "code", ShouldMatch: false, +// Fnames: []string{correctM, correctWithFailM, correctWith2FailsM}, +// Explainer: "L does not pick the files reserved for M", +// }, +// { +// Ext: "", Fail: "code", ShouldMatch: false, +// Fnames: []string{notAPoint, badName}, +// Explainer: "M does not pick obviously invalid files", +// }, +// { +// Ext: "large", Fail: "code", ShouldMatch: false, +// Fnames: []string{missingEtv, missingStv, notAPoint, badName}, +// Explainer: "L does not pick obviously invalid files", +// }, +// } + +// for _, c := range testCases { +// conf := config.Config{} +// conf.Version = "0.1.2" +// conf.RndBeacon.CanRunFullLarge = c.Ext == "large" + +// def, err := RndBeaconLPPDefinition(&conf) +// assert.NoError(t, err) + +// t.Run(c.Explainer, func(t *testing.T) { +// runInpFileTestCase(t, def, c) +// }) +// } +// } + +// func TestLPPInFileRegexp(t *testing.T) { + +// var ( +// correctM = "102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json" +// correctL = "102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json.large" +// correctWithFailM = "102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json.failure.code_77" +// correctWithFailL = "102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json.large.failure.code_77" +// correctWith2FailsM = "102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json.failure.code_77.failure.code_77" +// correctWith2FailsL = "102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json.large.failure.code_77.failure.code_77" +// missingEtv = "102-103-stv1.2.3-getZkProof_RndBeacon.json" +// missingStv = "102-103-etv0.2.3-getZkProof_RndBeacon.json" +// notAPoint = "102-103-etv0.2.3-getZkProof_RndBeaconAjson" +// badName = "102-103-etv0.2.3-stv1.2.3-getAggregatedProof.json" +// ) + +// // The responses in case of success +// var ( +// respM = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json" +// respL = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json" +// // #nosec G101 -- Not a credential +// respWithFailM = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json" +// // #nosec G101 -- Not a credential +// respWithFailL = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json" +// // #nosec G101 -- Not a credential +// respWith2FailsM = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json" +// // #nosec G101 -- Not a credential +// respWith2FailsL = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json" +// respWoEtv = "responses/102-103-etv-stv1.2.3-getZkProof_LPP.json" +// respWoStv = "responses/102-103-etv0.2.3-stv-getZkProof_LPP.json" +// ) + +// // The rename in case it is deferred to the large prover +// var ( +// toLargeM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json.large.failure.code_137" +// toLargeWithFailM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json.large.failure.code_137" +// toLargeWith2FailsM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json.large.failure.code_137" +// toLargeWoEtv = "requests/102-103-stv1.2.3-getZkProof_RndBeacon.json.large.failure.code_137" +// toLargeWoStv = "requests/102-103-etv0.2.3-getZkProof_RndBeacon.json.large.failure.code_137" +// ) + +// // The rename in case it is a success +// var ( +// successM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json.success" +// successMWoStv = "requests-done/102-103-etv0.2.3-getZkProof_RndBeacon.json.success" +// successtWoEtv = "requests-done/102-103-stv1.2.3-getZkProof_RndBeacon.json.success" +// successL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json.large.success" +// successWithFailM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json.success" +// successWithFailL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json.large.success" +// successWith2FailsM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json.success" +// successWith2FailsL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json.large.success" +// ) + +// // The rename in case it is a panic (code = 2) +// var ( +// failM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json.failure.code_2" +// failMWoStv = "requests-done/102-103-etv0.2.3-getZkProof_RndBeacon.json.failure.code_2" +// failtWoEtv = "requests-done/102-103-stv1.2.3-getZkProof_RndBeacon.json.failure.code_2" +// failL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json.large.failure.code_2" +// failWithFailM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json.failure.code_2" +// failWithFailL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json.large.failure.code_2" +// failWith2FailsM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json.failure.code_2" +// failWith2FailsL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json.large.failure.code_2" +// ) + +// testCases := []inpFileNamesCases{ +// { +// Ext: "", Fail: "code", ShouldMatch: true, +// Fnames: []string{correctM, correctWithFailM, correctWith2FailsM, missingEtv, missingStv}, +// Explainer: "happy path, case M", +// ExpectedOutput: []string{respM, respWithFailM, respWith2FailsM, respWoEtv, respWoStv}, +// ExpToLarge: []string{toLargeM, toLargeWithFailM, toLargeWith2FailsM, toLargeWoEtv, toLargeWoStv}, +// ExpSuccess: []string{successM, successWithFailM, successWith2FailsM, successtWoEtv, successMWoStv}, +// ExpFailW2: []string{failM, failWithFailM, failWith2FailsM, failtWoEtv, failMWoStv}, +// }, +// { +// Ext: "large", Fail: "code", ShouldMatch: true, +// Fnames: []string{correctL, correctWithFailL, correctWith2FailsL}, +// Explainer: "happy path, case L", +// ExpectedOutput: []string{respL, respWithFailL, respWith2FailsL}, +// ExpSuccess: []string{successL, successWithFailL, successWith2FailsL}, +// ExpFailW2: []string{failL, failWithFailL, failWith2FailsL}, +// }, +// { +// Ext: "", Fail: "code", ShouldMatch: false, +// Fnames: []string{correctL, correctWithFailL, correctWith2FailsL}, +// Explainer: "M does not pick the files reserved for L", +// }, +// { +// Ext: "large", Fail: "code", ShouldMatch: false, +// Fnames: []string{correctM, correctWithFailM, correctWith2FailsM}, +// Explainer: "L does not pick the files reserved for M", +// }, +// { +// Ext: "", Fail: "code", ShouldMatch: false, +// Fnames: []string{notAPoint, badName}, +// Explainer: "M does not pick obviously invalid files", +// }, +// { +// Ext: "large", Fail: "code", ShouldMatch: false, +// Fnames: []string{missingEtv, missingStv, notAPoint, badName}, +// Explainer: "L does not pick obviously invalid files", +// }, +// } + +// for _, c := range testCases { +// conf := config.Config{} +// conf.Version = "0.1.2" +// conf.LPPExecution.CanRunFullLarge = c.Ext == "large" + +// def, err := LPPDefinition(&conf) +// assert.NoError(t, err) + +// t.Run(c.Explainer, func(t *testing.T) { +// runInpFileTestCase(t, def, c) +// }) +// } +// } + +// func TestConglomerateGLInFileRegexp(t *testing.T) { + +// var ( +// correctM = "102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json" +// correctL = "102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json.large" +// correctWithFailM = "102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json.failure.code_77" +// correctWithFailL = "102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json.large.failure.code_77" +// correctWith2FailsM = "102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json.failure.code_77.failure.code_77" +// correctWith2FailsL = "102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json.large.failure.code_77.failure.code_77" +// missingEtv = "102-103-stv1.2.3-getZkProof_GL.json" +// missingStv = "102-103-etv0.2.3-getZkProof_GL.json" +// notAPoint = "102-103-etv0.2.3-getZkProof_GLAjson" +// badName = "102-103-etv0.2.3-stv1.2.3-getAggregatedProof.json" +// ) + +// // The responses in case of success +// var ( +// respM = "responses/102-103-getZkProof.json" +// respL = "responses/102-103-getZkProof.json" +// // #nosec G101 -- Not a credential +// respWithFailM = "responses/102-103-getZkProof.json" +// // #nosec G101 -- Not a credential +// respWithFailL = "responses/102-103-getZkProof.json" +// // #nosec G101 -- Not a credential +// respWith2FailsM = "responses/102-103-getZkProof.json" +// // #nosec G101 -- Not a credential +// respWith2FailsL = "responses/102-103-getZkProof.json" +// ) + +// // The rename in case it is deferred to the large prover +// var ( +// toLargeM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json.large.failure.code_137" +// toLargeWithFailM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json.large.failure.code_137" +// toLargeWith2FailsM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json.large.failure.code_137" +// toLargeWoEtv = "requests/102-103-stv1.2.3-getZkProof_GL.json.large.failure.code_137" +// toLargeWoStv = "requests/102-103-etv0.2.3-getZkProof_GL.json.large.failure.code_137" +// ) + +// // The rename in case it is a success +// var ( +// successM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json.success" +// successMWoStv = "requests-done/102-103-etv0.2.3-getZkProof_GL.json.success" +// successtWoEtv = "requests-done/102-103-stv1.2.3-getZkProof_GL.json.success" +// successL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json.large.success" +// successWithFailM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json.success" +// successWithFailL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json.large.success" +// successWith2FailsM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json.success" +// successWith2FailsL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json.large.success" +// ) + +// // The rename in case it is a panic (code = 2) +// var ( +// failM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json.failure.code_2" +// failMWoStv = "requests-done/102-103-etv0.2.3-getZkProof_GL.json.failure.code_2" +// failtWoEtv = "requests-done/102-103-stv1.2.3-getZkProof_GL.json.failure.code_2" +// failL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json.large.failure.code_2" +// failWithFailM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json.failure.code_2" +// failWithFailL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json.large.failure.code_2" +// failWith2FailsM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json.failure.code_2" +// failWith2FailsL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json.large.failure.code_2" +// ) + +// testCases := []inpFileNamesCases{ +// { +// Ext: "", Fail: "code", ShouldMatch: true, +// Fnames: []string{correctM, correctWithFailM, correctWith2FailsM, missingEtv, missingStv}, +// Explainer: "happy path, case M", +// ExpectedOutput: []string{respM, respWithFailM, respWith2FailsM, respM, respM}, +// ExpToLarge: []string{toLargeM, toLargeWithFailM, toLargeWith2FailsM, toLargeWoEtv, toLargeWoStv}, +// ExpSuccess: []string{successM, successWithFailM, successWith2FailsM, successtWoEtv, successMWoStv}, +// ExpFailW2: []string{failM, failWithFailM, failWith2FailsM, failtWoEtv, failMWoStv}, +// }, +// { +// Ext: "large", Fail: "code", ShouldMatch: true, +// Fnames: []string{correctL, correctWithFailL, correctWith2FailsL}, +// Explainer: "happy path, case L", +// ExpectedOutput: []string{respL, respWithFailL, respWith2FailsL}, +// ExpSuccess: []string{successL, successWithFailL, successWith2FailsL}, +// ExpFailW2: []string{failL, failWithFailL, failWith2FailsL}, +// }, +// { +// Ext: "", Fail: "code", ShouldMatch: false, +// Fnames: []string{correctL, correctWithFailL, correctWith2FailsL}, +// Explainer: "M does not pick the files reserved for L", +// }, +// { +// Ext: "large", Fail: "code", ShouldMatch: false, +// Fnames: []string{correctM, correctWithFailM, correctWith2FailsM}, +// Explainer: "L does not pick the files reserved for M", +// }, +// { +// Ext: "", Fail: "code", ShouldMatch: false, +// Fnames: []string{notAPoint, badName}, +// Explainer: "M does not pick obviously invalid files", +// }, +// { +// Ext: "large", Fail: "code", ShouldMatch: false, +// Fnames: []string{missingEtv, missingStv, notAPoint, badName}, +// Explainer: "L does not pick obviously invalid files", +// }, +// } + +// for _, c := range testCases { +// conf := config.Config{} +// conf.Version = "0.1.2" +// conf.Conglomeration.CanRunFullLarge = c.Ext == "large" + +// def, err := ConglomerateGLDefinition(&conf) +// assert.NoError(t, err) + +// t.Run(c.Explainer, func(t *testing.T) { +// runInpFileTestCase(t, def, c) +// }) +// } +// } + +// func TestConglomerateDistMetadataInFileRegexp(t *testing.T) { + +// var ( +// correctM = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json" +// correctL = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large" +// correctWithFailM = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.failure.code_77" +// correctWithFailL = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_77" +// correctWith2FailsM = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.failure.code_77.failure.code_77" +// correctWith2FailsL = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_77.failure.code_77" +// missingEtv = "102-103-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json" +// missingStv = "102-103-etv0.2.3-getZkProof_Bootstrap_DistMetadata.json" +// notAPoint = "102-103-etv0.2.3-getZkProof_Bootstrap_DistMetadataAjson" +// badName = "102-103-etv0.2.3-stv1.2.3-getAggregatedProof.json" +// ) + +// // The responses in case of success +// var ( +// respM = "responses/102-103-getZkProof.json" +// respL = "responses/102-103-getZkProof.json" +// // #nosec G101 -- Not a credential +// respWithFailM = "responses/102-103-getZkProof.json" +// // #nosec G101 -- Not a credential +// respWithFailL = "responses/102-103-getZkProof.json" +// // #nosec G101 -- Not a credential +// respWith2FailsM = "responses/102-103-getZkProof.json" +// // #nosec G101 -- Not a credential +// respWith2FailsL = "responses/102-103-getZkProof.json" +// ) + +// // The rename in case it is deferred to the large prover +// var ( +// toLargeM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_137" +// toLargeWithFailM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_137" +// toLargeWith2FailsM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_137" +// toLargeWoEtv = "requests/102-103-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_137" +// toLargeWoStv = "requests/102-103-etv0.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_137" +// ) + +// // The rename in case it is a success +// var ( +// successM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.success" +// successMWoStv = "requests-done/102-103-etv0.2.3-getZkProof_Bootstrap_DistMetadata.json.success" +// successtWoEtv = "requests-done/102-103-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.success" +// successL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.success" +// successWithFailM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.success" +// successWithFailL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.success" +// successWith2FailsM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.success" +// successWith2FailsL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.success" +// ) + +// // The rename in case it is a panic (code = 2) +// var ( +// failM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.failure.code_2" +// failMWoStv = "requests-done/102-103-etv0.2.3-getZkProof_Bootstrap_DistMetadata.json.failure.code_2" +// failtWoEtv = "requests-done/102-103-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.failure.code_2" +// failL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_2" +// failWithFailM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.failure.code_2" +// failWithFailL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_2" +// failWith2FailsM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.failure.code_2" +// failWith2FailsL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_2" +// ) + +// testCases := []inpFileNamesCases{ +// { +// Ext: "", Fail: "code", ShouldMatch: true, +// Fnames: []string{correctM, correctWithFailM, correctWith2FailsM, missingEtv, missingStv}, +// Explainer: "happy path, case M", +// ExpectedOutput: []string{respM, respWithFailM, respWith2FailsM, respM, respM}, +// ExpToLarge: []string{toLargeM, toLargeWithFailM, toLargeWith2FailsM, toLargeWoEtv, toLargeWoStv}, +// ExpSuccess: []string{successM, successWithFailM, successWith2FailsM, successtWoEtv, successMWoStv}, +// ExpFailW2: []string{failM, failWithFailM, failWith2FailsM, failtWoEtv, failMWoStv}, +// }, +// { +// Ext: "large", Fail: "code", ShouldMatch: true, +// Fnames: []string{correctL, correctWithFailL, correctWith2FailsL}, +// Explainer: "happy path, case L", +// ExpectedOutput: []string{respL, respWithFailL, respWith2FailsL}, +// ExpSuccess: []string{successL, successWithFailL, successWith2FailsL}, +// ExpFailW2: []string{failL, failWithFailL, failWith2FailsL}, +// }, +// { +// Ext: "", Fail: "code", ShouldMatch: false, +// Fnames: []string{correctL, correctWithFailL, correctWith2FailsL}, +// Explainer: "M does not pick the files reserved for L", +// }, +// { +// Ext: "large", Fail: "code", ShouldMatch: false, +// Fnames: []string{correctM, correctWithFailM, correctWith2FailsM}, +// Explainer: "L does not pick the files reserved for M", +// }, +// { +// Ext: "", Fail: "code", ShouldMatch: false, +// Fnames: []string{notAPoint, badName}, +// Explainer: "M does not pick obviously invalid files", +// }, +// { +// Ext: "large", Fail: "code", ShouldMatch: false, +// Fnames: []string{missingEtv, missingStv, notAPoint, badName}, +// Explainer: "L does not pick obviously invalid files", +// }, +// } + +// for _, c := range testCases { +// conf := config.Config{} +// conf.Version = "0.1.2" +// conf.Conglomeration.CanRunFullLarge = c.Ext == "large" + +// def, err := ConglomerateDistMetadataDefinition(&conf) +// assert.NoError(t, err) + +// t.Run(c.Explainer, func(t *testing.T) { +// runInpFileTestCase(t, def, c) +// }) +// } +// } + +// func TestConglomerateLPPInFileRegexp(t *testing.T) { + +// var ( +// correctM = "102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json" +// correctL = "102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json.large" +// correctWithFailM = "102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json.failure.code_77" +// correctWithFailL = "102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json.large.failure.code_77" +// correctWith2FailsM = "102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json.failure.code_77.failure.code_77" +// correctWith2FailsL = "102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json.large.failure.code_77.failure.code_77" +// missingEtv = "102-103-stv1.2.3-getZkProof_LPP.json" +// missingStv = "102-103-etv0.2.3-getZkProof_LPP.json" +// notAPoint = "102-103-etv0.2.3-getZkProof_LPPAjson" +// badName = "102-103-etv0.2.3-stv1.2.3-getAggregatedProof.json" +// ) + +// // The responses in case of success +// var ( +// respM = "responses/102-103-getZkProof.json" +// respL = "responses/102-103-getZkProof.json" +// // #nosec G101 -- Not a credential +// respWithFailM = "responses/102-103-getZkProof.json" +// // #nosec G101 -- Not a credential +// respWithFailL = "responses/102-103-getZkProof.json" +// // #nosec G101 -- Not a credential +// respWith2FailsM = "responses/102-103-getZkProof.json" +// // #nosec G101 -- Not a credential +// respWith2FailsL = "responses/102-103-getZkProof.json" +// ) + +// // The rename in case it is deferred to the large prover +// var ( +// toLargeM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json.large.failure.code_137" +// toLargeWithFailM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json.large.failure.code_137" +// toLargeWith2FailsM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json.large.failure.code_137" +// toLargeWoEtv = "requests/102-103-stv1.2.3-getZkProof_LPP.json.large.failure.code_137" +// toLargeWoStv = "requests/102-103-etv0.2.3-getZkProof_LPP.json.large.failure.code_137" +// ) + +// // The rename in case it is a success +// var ( +// successM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json.success" +// successMWoStv = "requests-done/102-103-etv0.2.3-getZkProof_LPP.json.success" +// successtWoEtv = "requests-done/102-103-stv1.2.3-getZkProof_LPP.json.success" +// successL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json.large.success" +// successWithFailM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json.success" +// successWithFailL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json.large.success" +// successWith2FailsM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json.success" +// successWith2FailsL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json.large.success" +// ) + +// // The rename in case it is a panic (code = 2) +// var ( +// failM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json.failure.code_2" +// failMWoStv = "requests-done/102-103-etv0.2.3-getZkProof_LPP.json.failure.code_2" +// failtWoEtv = "requests-done/102-103-stv1.2.3-getZkProof_LPP.json.failure.code_2" +// failL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json.large.failure.code_2" +// failWithFailM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json.failure.code_2" +// failWithFailL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json.large.failure.code_2" +// failWith2FailsM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json.failure.code_2" +// failWith2FailsL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json.large.failure.code_2" +// ) + +// testCases := []inpFileNamesCases{ +// { +// Ext: "", Fail: "code", ShouldMatch: true, +// Fnames: []string{correctM, correctWithFailM, correctWith2FailsM, missingEtv, missingStv}, +// Explainer: "happy path, case M", +// ExpectedOutput: []string{respM, respWithFailM, respWith2FailsM, respM, respM}, +// ExpToLarge: []string{toLargeM, toLargeWithFailM, toLargeWith2FailsM, toLargeWoEtv, toLargeWoStv}, +// ExpSuccess: []string{successM, successWithFailM, successWith2FailsM, successtWoEtv, successMWoStv}, +// ExpFailW2: []string{failM, failWithFailM, failWith2FailsM, failtWoEtv, failMWoStv}, +// }, +// { +// Ext: "large", Fail: "code", ShouldMatch: true, +// Fnames: []string{correctL, correctWithFailL, correctWith2FailsL}, +// Explainer: "happy path, case L", +// ExpectedOutput: []string{respL, respWithFailL, respWith2FailsL}, +// ExpSuccess: []string{successL, successWithFailL, successWith2FailsL}, +// ExpFailW2: []string{failL, failWithFailL, failWith2FailsL}, +// }, +// { +// Ext: "", Fail: "code", ShouldMatch: false, +// Fnames: []string{correctL, correctWithFailL, correctWith2FailsL}, +// Explainer: "M does not pick the files reserved for L", +// }, +// { +// Ext: "large", Fail: "code", ShouldMatch: false, +// Fnames: []string{correctM, correctWithFailM, correctWith2FailsM}, +// Explainer: "L does not pick the files reserved for M", +// }, +// { +// Ext: "", Fail: "code", ShouldMatch: false, +// Fnames: []string{notAPoint, badName}, +// Explainer: "M does not pick obviously invalid files", +// }, +// { +// Ext: "large", Fail: "code", ShouldMatch: false, +// Fnames: []string{missingEtv, missingStv, notAPoint, badName}, +// Explainer: "L does not pick obviously invalid files", +// }, +// } + +// for _, c := range testCases { +// conf := config.Config{} +// conf.Version = "0.1.2" +// conf.Conglomeration.CanRunFullLarge = c.Ext == "large" + +// def, err := ConglomerateLPPDefinition(&conf) +// assert.NoError(t, err) + +// t.Run(c.Explainer, func(t *testing.T) { +// runInpFileTestCase(t, def, c) +// }) +// } +// } diff --git a/prover/cmd/controller/controller/jobs.go b/prover/cmd/controller/controller/jobs.go index 2d7e110a2..7cde7c6e5 100644 --- a/prover/cmd/controller/controller/jobs.go +++ b/prover/cmd/controller/controller/jobs.go @@ -8,6 +8,7 @@ import ( "strings" "github.com/consensys/linea-monorepo/prover/config" + "github.com/consensys/linea-monorepo/prover/utils" "github.com/dlclark/regexp2" "github.com/sirupsen/logrus" ) @@ -16,75 +17,101 @@ import ( type Job struct { // Configuration parameters relative to the job Def *JobDefinition + // Original name of the file when it was found - OriginalFile string + OriginalFile []string // Name of the locked file. If this value is set, it means that the job // was successfully locked. - LockedFile string + LockedFile []string + // Height of the file in the priority queue - Start int - End int + Start []int + End []int // Execution Trace version - Etv string + Etv []string // State Manager Trace version - Stv string + Stv []string - // Compressor version ccv - VersionCompressor string + // Compressor version + Cv []string // The hex string of the content hash - ContentHash string + ContentHash []string } // OutputFileRessouce collects all the data needed to fill the output template // file. -type OutputFileRessouce struct { +type OutputFileResource struct { Job } // Parse a filename into a Job. Returns an error if the file does not // corresponds to the specified template of the job type. -func NewJob(jdef *JobDefinition, filename string) (j *Job, err error) { - // Validate the filename against the inbound regexp - if ok, err := jdef.InputFileRegexp.MatchString(filename); !ok || err != nil { - return nil, fmt.Errorf( - "filename %v does not match the inbound regexp for `%v` job: `%v. "+ - "Err if any = %v", - filename, jdef.Name, jdef.InputFileRegexp.String(), err, - ) +func NewJob(jdef *JobDefinition, filenames []string) (j *Job, err error) { + m, n := len(jdef.InputFileRegexp), len(filenames) + if m != n { + return nil, fmt.Errorf(`length mis-match between the number of input files specified in the + job definition: %d and the function params: %d`, m, n) } - j = &Job{Def: jdef, OriginalFile: filename} - regs := jdef.ParamsRegexp + // Define job + j = &Job{ + Def: jdef, OriginalFile: make([]string, m), LockedFile: make([]string, m), + Start: make([]int, m), End: make([]int, m), Etv: make([]string, m), Stv: make([]string, m), + Cv: make([]string, m), ContentHash: make([]string, m), + } - // If the regexps in the job definition are provided, use them to extract - // the parameters of the job. If one regexp is provided but is invalid, this - // will panic. That is because since, we already matched the - // `InputFileRegexp`, we assume that the parameters regexp can only be - // matched. - j.Start = intIfRegexpNotNil(regs.Start, filename) - j.End = intIfRegexpNotNil(regs.End, filename) - j.VersionCompressor = stringIfRegexpNotNil(regs.Cv, filename) - j.Etv = stringIfRegexpNotNil(regs.Etv, filename) - j.Stv = stringIfRegexpNotNil(regs.Stv, filename) - j.ContentHash = stringIfRegexpNotNil(regs.ContentHash, filename) + for i := 0; i < m; i++ { + // Validate the filename against the inbound regexp + if ok, err := jdef.InputFileRegexp[i].MatchString(filenames[i]); !ok || err != nil { + return nil, fmt.Errorf( + "filename %v does not match the inbound regexp for `%v` job: `%v. "+ + "Err if any = %v", + filenames[i], jdef.Name, jdef.InputFileRegexp[i].String(), err, + ) + } + j.OriginalFile[i] = filenames[i] + + regs := jdef.ParamsRegexp + + // If the regexps in the job definition are provided, use them to extract + // the parameters of the job. If one regexp is provided but is invalid, this + // will panic. That is because since, we already matched the + // `InputFileRegexp`, we assume that the parameters regexp can only be + // matched. + j.Start[i] = intIfRegexpNotNil(regs[i].Start, filenames[i]) + j.End[i] = intIfRegexpNotNil(regs[i].End, filenames[i]) + j.Cv[i] = stringIfRegexpNotNil(regs[i].Cv, filenames[i]) + j.Etv[i] = stringIfRegexpNotNil(regs[i].Etv, filenames[i]) + j.Stv[i] = stringIfRegexpNotNil(regs[i].Stv, filenames[i]) + j.ContentHash[i] = stringIfRegexpNotNil(regs[i].ContentHash, filenames[i]) + } return j, nil } // Returns the full path to the inprogress file -func (j *Job) InProgressPath() string { - return filepath.Join(j.Def.dirFrom(), j.LockedFile) +func (j *Job) InProgressPath(idx int) string { + if err := j.Def.isValidReqRootDirIdx(idx); err != nil { + utils.Panic(err.Error()) + } + return filepath.Join(j.Def.dirFrom(idx), j.LockedFile[idx]) } -// Returns the name of the output file for the job -func (j *Job) ResponseFile() (s string, err error) { +// Returns the name of the output file for the job at the specified index +func (j *Job) ResponseFile(idx int) (s string, err error) { + + // Sanity check + if err := j.Def.isValidOutputFileIdx(idx); err != nil { + return "", err + } // Run the template + // REMARK: Check how it behaves on runtime w := &strings.Builder{} - err = j.Def.OutputFileTmpl.Execute(w, OutputFileRessouce{ + err = j.Def.OutputFileTmpl[idx].Execute(w, OutputFileResource{ Job: *j, }) if err != nil { @@ -98,14 +125,17 @@ func (j *Job) ResponseFile() (s string, err error) { s = strings.ReplaceAll(s, "--", "-") // Append the dir_to filepath - s = path.Join(j.Def.dirTo(), s) + s = path.Join(j.Def.dirTo(idx), s) return s, nil } // Returns the name of the output file for the job -func (j *Job) TmpResponseFile(c *config.Config) (s string) { - return path.Join(j.Def.dirTo(), "tmp-response-file."+c.Controller.LocalID+".json") +func (j *Job) TmpResponseFile(c *config.Config, idx int) (s string) { + if err := j.Def.isValidOutputFileIdx(idx); err != nil { + utils.Panic(err.Error()) + } + return path.Join(j.Def.dirTo(idx), "tmp-response-file."+c.Controller.LocalID+".json") } // This function returns the name of the input file, modified to indicate that it should be retried in "large mode". @@ -113,36 +143,41 @@ func (j *Job) TmpResponseFile(c *config.Config) (s string) { // However, this situation is unexpected because the configuration validation ensures that if an exit code requires // deferring the job to a larger machine, the suffix must be set. // Additionally, if the prover's status code is zero (indicating success), the function will return an error. -func (j *Job) DeferToLargeFile(status Status) (s string, err error) { +func (j *Job) DeferToLargeFile(status Status, idx int) (s string, err error) { + + // Sanity check + if err := j.Def.isValidReqRootDirIdx(idx); err != nil { + return "", err + } // It's an invariant of the executor to not forget to set the status if status.ExitCode == 0 { return "", fmt.Errorf( "cant defer to large %v, status code was zero", - j.OriginalFile, + j.OriginalFile[idx], ) } const suffixLarge = config.LargeSuffix - // Issue a warning if the file if the files name already contains the + // Issue a warning if the files name already contains the // suffix. We may be in a situation where the large prover is trying to // defer over the same file. It is very likely an error. We will still // rename it to "<...>.large.large". That way, the file will not be picked // up a second time by the same large prover, creating an infinite retry // loop. - if strings.HasSuffix(j.OriginalFile, suffixLarge) { + if strings.HasSuffix(j.OriginalFile[idx], suffixLarge) { logrus.Warnf( "Deferring the large machine but the input file `%v` already has"+ " the suffix %v. Still renaming it to %v, but it will likely"+ // Returns the name of the input file modified so that it is retried in " not be picked up again", - j.OriginalFile, suffixLarge, s, + j.OriginalFile[idx], suffixLarge, s, ) } // Remove the suffix .failure.code_[0-9]+ from all the strings of the input // file. That way we do not propagate the previous errors. - origFile, err := j.Def.FailureSuffix.Replace(j.OriginalFile, "", -1, -1) + origFile, err := j.Def.FailureSuffix.Replace(j.OriginalFile[idx], "", -1, -1) if err != nil { // he assumption here is that the above function may return an error // but this error can only depend on the regexp, the replacement, @@ -154,17 +189,22 @@ func (j *Job) DeferToLargeFile(status Status) (s string, err error) { return fmt.Sprintf( "%v/%v.%v.failure.%v_%v", - j.Def.dirFrom(), origFile, + j.Def.dirFrom(idx), origFile, suffixLarge, config.FailSuffix, status.ExitCode, ), nil } // Returns the done file following the jobs status -func (j *Job) DoneFile(status Status) string { +func (j *Job) DoneFile(status Status, idx int) string { + + // Sanity check + if err := j.Def.isValidReqRootDirIdx(idx); err != nil { + utils.Panic(err.Error()) + } // Remove the suffix .failure.code_[0-9]+ from all the strings - origFile, err := j.Def.FailureSuffix.Replace(j.OriginalFile, "", -1, -1) + origFile, err := j.Def.FailureSuffix.Replace(j.OriginalFile[idx], "", -1, -1) if err != nil { // he assumption here is that the above function may return an error // but this error can only depend on the regexp, the replacement, @@ -175,9 +215,9 @@ func (j *Job) DoneFile(status Status) string { } if status.ExitCode == CodeSuccess { - return fmt.Sprintf("%v/%v.%v", j.Def.dirDone(), origFile, config.SuccessSuffix) + return fmt.Sprintf("%v/%v.%v", j.Def.dirDone(idx), origFile, config.SuccessSuffix) } else { - return fmt.Sprintf("%v/%v.failure.%v_%v", j.Def.dirDone(), origFile, config.FailSuffix, status.ExitCode) + return fmt.Sprintf("%v/%v.failure.%v_%v", j.Def.dirDone(idx), origFile, config.FailSuffix, status.ExitCode) } } @@ -187,7 +227,7 @@ func (j *Job) DoneFile(status Status) string { // the priority of the job. The 100 value is chosen to make the score easy to // mentally compute. func (j *Job) Score() int { - return 100*j.End + j.Def.Priority + return 100*j.End[0] + j.Def.Priority } // If the regexp is provided and non-nil, return the first match and returns the From aa6be761e522588261c7333badca09a898200d10 Mon Sep 17 00:00:00 2001 From: srinathln7 Date: Tue, 4 Feb 2025 16:44:10 +0000 Subject: [PATCH 13/48] compile all controller/files except *_test --- .../cmd/controller/controller/controller.go | 170 +++++++++--------- prover/cmd/controller/controller/executor.go | 21 ++- .../controller/controller/job_definition.go | 18 +- prover/cmd/controller/controller/jobs.go | 44 ++--- 4 files changed, 132 insertions(+), 121 deletions(-) diff --git a/prover/cmd/controller/controller/controller.go b/prover/cmd/controller/controller/controller.go index 38744f8e0..528e94013 100644 --- a/prover/cmd/controller/controller/controller.go +++ b/prover/cmd/controller/controller/controller.go @@ -88,100 +88,106 @@ func runController(ctx context.Context, cfg *config.Config) { // Success case status.ExitCode == CodeSuccess: - // NB: we already check that the response filename can be - // generated prior to running the command. So this actually - // will not panic. - respFile, err := job.ResponseFile() - tmpRespFile := job.TmpResponseFile(cfg) - if err != nil { - formatStr := "Could not generate the response file: %v (original request file: %v)" - utils.Panic(formatStr, err, job.OriginalFile) - } - - logrus.Infof( - "Moving the response file from the tmp response file `%v`, to the final response file: `%v`", - tmpRespFile, respFile, - ) - - if err := os.Rename(tmpRespFile, respFile); err != nil { - // @Alex: it is unclear how the rename operation could fail - // here. If this happens, we prefer removing the tmp file. - // Note that the operation is an `mv -f` - os.Remove(tmpRespFile) - - cLog.Errorf( - "Error renaming %v to %v: %v, removed the tmp file", - tmpRespFile, respFile, err, + for idx := range job.OriginalFile { + // NB: we already check that the response filename can be + // generated prior to running the command. So this actually + // will not panic. + respFile, err := job.ResponseFile(idx) + tmpRespFile := job.TmpResponseFile(cfg, idx) + if err != nil { + formatStr := "Could not generate the response file: %v (original request file: %v)" + utils.Panic(formatStr, err, job.OriginalFile[idx]) + } + + logrus.Infof( + "Moving the response file from the tmp response file `%v`, to the final response file: `%v`", + tmpRespFile, respFile, ) - } - // Move the inprogress to the done directory - cLog.Infof( - "Moving %v to %v with the success prefix", - job.OriginalFile, job.Def.dirDone(), - ) - - jobDone := job.DoneFile(status) - if err := os.Rename(job.InProgressPath(), jobDone); err != nil { - // When that happens, the only thing left to do is to log - // the error and let the inprogress file where it is. It - // will likely require a human intervention. - cLog.Errorf( - "Error renaming %v to %v: %v", - job.InProgressPath(), jobDone, err, + if err := os.Rename(tmpRespFile, respFile); err != nil { + // @Alex: it is unclear how the rename operation could fail + // here. If this happens, we prefer removing the tmp file. + // Note that the operation is an `mv -f` + os.Remove(tmpRespFile) + + cLog.Errorf( + "Error renaming %v to %v: %v, removed the tmp file", + tmpRespFile, respFile, err, + ) + } + + // Move the inprogress to the done directory + cLog.Infof( + "Moving %v to %v with the success prefix", + job.OriginalFile[idx], job.Def.dirDone(idx), ) + + jobDone := job.DoneFile(status, idx) + if err := os.Rename(job.InProgressPath(idx), jobDone); err != nil { + // When that happens, the only thing left to do is to log + // the error and let the inprogress file where it is. It + // will likely require a human intervention. + cLog.Errorf( + "Error renaming %v to %v: %v", + job.InProgressPath(idx), jobDone, err, + ) + } } // Defer to the large prover case job.Def.Name == jobNameExecution && isIn(status.ExitCode, cfg.Controller.DeferToOtherLargeCodes): - cLog.Infof("Renaming %v for the large prover", job.OriginalFile) - // Move the inprogress file back in the from directory with - // the new suffix - toLargePath, err := job.DeferToLargeFile(status) - if err != nil { - // There are two possibilities of errors. (1), the status - // we success but the above cases prevents that. The other - // case is that the suffix was not provided. But, during - // the config validation, we check already that the suffix - // must be provided if the size of the list of - // deferToOtherLargeCodes is non-zero. If the size of the - // list was zero, then there would be no way to reach this - // portion of the code given that the current exit code - // cannot be part of the empty list. Thus, this section is - // unreachable. - cLog.Errorf( - "error deriving the to-large-name of %v: %v", - job.InProgressPath(), err, - ) - } - - if err := os.Rename(job.InProgressPath(), toLargePath); err != nil { - // When that happens, the only thing left to do is to log - // the error and let the inprogress file where it is. It - // will likely require a human intervention. - cLog.Errorf( - "error renaming %v to %v: %v", - job.InProgressPath(), toLargePath, err, - ) + for idx := range job.OriginalFile { + cLog.Infof("Renaming %v for the large prover", job.OriginalFile[idx]) + // Move the inprogress file back in the from directory with + // the new suffix + toLargePath, err := job.DeferToLargeFile(status, idx) + if err != nil { + // There are two possibilities of errors. (1), the status + // we success but the above cases prevents that. The other + // case is that the suffix was not provided. But, during + // the config validation, we check already that the suffix + // must be provided if the size of the list of + // deferToOtherLargeCodes is non-zero. If the size of the + // list was zero, then there would be no way to reach this + // portion of the code given that the current exit code + // cannot be part of the empty list. Thus, this section is + // unreachable. + cLog.Errorf( + "error deriving the to-large-name of %v: %v", + job.InProgressPath(idx), err, + ) + } + + if err := os.Rename(job.InProgressPath(idx), toLargePath); err != nil { + // When that happens, the only thing left to do is to log + // the error and let the inprogress file where it is. It + // will likely require a human intervention. + cLog.Errorf( + "error renaming %v to %v: %v", + job.InProgressPath(idx), toLargePath, err, + ) + } } // Failure case default: - // Move the inprogress to the done directory - cLog.Infof( - "Moving %v with in %v with a failure suffix for code %v", - job.OriginalFile, job.Def.dirDone(), status.ExitCode, - ) - - jobFailed := job.DoneFile(status) - if err := os.Rename(job.InProgressPath(), jobFailed); err != nil { - // When that happens, the only thing left to do is to log - // the error and let the inprogress file where it is. It - // will likely require a human intervention. - cLog.Errorf( - "Error renaming %v to %v: %v", - job.InProgressPath(), jobFailed, err, + for idx := range job.OriginalFile { + // Move the inprogress to the done directory + cLog.Infof( + "Moving %v with in %v with a failure suffix for code %v", + job.OriginalFile[idx], job.Def.dirDone(idx), status.ExitCode, ) + + jobFailed := job.DoneFile(status, idx) + if err := os.Rename(job.InProgressPath(idx), jobFailed); err != nil { + // When that happens, the only thing left to do is to log + // the error and let the inprogress file where it is. It + // will likely require a human intervention. + cLog.Errorf( + "Error renaming %v to %v: %v", + job.InProgressPath(idx), jobFailed, err, + ) + } } } } diff --git a/prover/cmd/controller/controller/executor.go b/prover/cmd/controller/controller/executor.go index 7fa244ee3..f62436b53 100644 --- a/prover/cmd/controller/controller/executor.go +++ b/prover/cmd/controller/controller/executor.go @@ -74,7 +74,9 @@ func (e *Executor) Run(job *Job) (status Status) { // if we are on a large instance and the job is execution with large suffix, // we directly run with large. // note: checking that locked job contains "large" is not super typesafe... - largeRun := job.Def.Name == jobNameExecution && e.Config.Execution.CanRunFullLarge && strings.Contains(job.LockedFile, config.LargeSuffix) + + // ASSUMED 0 index here + largeRun := job.Def.Name == jobNameExecution && e.Config.Execution.CanRunFullLarge && strings.Contains(job.LockedFile[0], config.LargeSuffix) // First, run the job normally cmd, err := e.buildCmd(job, largeRun) @@ -126,17 +128,19 @@ func (e *Executor) Run(job *Job) (status Status) { // Builds a command from a template to run, returns a status if it failed func (e *Executor) buildCmd(job *Job, large bool) (cmd string, err error) { - // The generates a name for the output file. Also attempts to generate the - // name of the final response file so that we can be sure it will be - // not fail being generated after having run the command. - if _, err := job.ResponseFile(); err != nil { + // Generate names for the output files. Also attempts to generate the + // names of the final response files so that we can be sure they will not + // fail being generated after having run the command. + + // ASSUMED 0 index here + if _, err := job.ResponseFile(0); err != nil { logrus.Errorf( "could not generate the tmp response filename for %s: %v", job.OriginalFile, err, ) return "", err } - outFile := job.TmpResponseFile(e.Config) + outFile := job.TmpResponseFile(e.Config, 0) tmpl := e.Config.Controller.WorkerCmdTmpl if large { @@ -146,7 +150,7 @@ func (e *Executor) buildCmd(job *Job, large bool) (cmd string, err error) { // use the template to generate the command resource := Resource{ ConfFile: fConfig, - InFile: job.InProgressPath(), + InFile: job.InProgressPath(0), // Assume 0 index OutFile: outFile, } @@ -192,7 +196,8 @@ func runCmd(cmd string, job *Job, retry bool) Status { pname := processName(job, cmd) - metrics.CollectPreProcess(job.Def.Name, job.Start, job.End, false) + // ASSUMED 0 index + metrics.CollectPreProcess(job.Def.Name, job.Start[0], job.End[0], false) // Starts a new process from our command startTime := time.Now() diff --git a/prover/cmd/controller/controller/job_definition.go b/prover/cmd/controller/controller/job_definition.go index cef5b8740..5a7e66596 100644 --- a/prover/cmd/controller/controller/job_definition.go +++ b/prover/cmd/controller/controller/job_definition.go @@ -250,23 +250,23 @@ func (jd *JobDefinition) isValidOutputFileIdx(idx int) error { return nil } -func (jd *JobDefinition) dirFrom(idx int) string { - if err := jd.isValidReqRootDirIdx(idx); err != nil { +func (jd *JobDefinition) dirFrom(ipIdx int) string { + if err := jd.isValidReqRootDirIdx(ipIdx); err != nil { utils.Panic(err.Error()) } - return filepath.Join(jd.RequestsRootDir[idx], config.RequestsFromSubDir) + return filepath.Join(jd.RequestsRootDir[ipIdx], config.RequestsFromSubDir) } -func (jd *JobDefinition) dirDone(idx int) string { - if err := jd.isValidReqRootDirIdx(idx); err != nil { +func (jd *JobDefinition) dirDone(ipIdx int) string { + if err := jd.isValidReqRootDirIdx(ipIdx); err != nil { utils.Panic(err.Error()) } - return filepath.Join(jd.RequestsRootDir[idx], config.RequestsDoneSubDir) + return filepath.Join(jd.RequestsRootDir[ipIdx], config.RequestsDoneSubDir) } -func (jd *JobDefinition) dirTo(idx int) string { - if err := jd.isValidReqRootDirIdx(idx); err != nil { +func (jd *JobDefinition) dirTo(ipIdx int) string { + if err := jd.isValidReqRootDirIdx(ipIdx); err != nil { utils.Panic(err.Error()) } - return filepath.Join(jd.RequestsRootDir[idx], config.RequestsToSubDir) + return filepath.Join(jd.RequestsRootDir[ipIdx], config.RequestsToSubDir) } diff --git a/prover/cmd/controller/controller/jobs.go b/prover/cmd/controller/controller/jobs.go index 7cde7c6e5..2ddcbdb1e 100644 --- a/prover/cmd/controller/controller/jobs.go +++ b/prover/cmd/controller/controller/jobs.go @@ -93,25 +93,25 @@ func NewJob(jdef *JobDefinition, filenames []string) (j *Job, err error) { } // Returns the full path to the inprogress file -func (j *Job) InProgressPath(idx int) string { - if err := j.Def.isValidReqRootDirIdx(idx); err != nil { +func (j *Job) InProgressPath(ipIdx int) string { + if err := j.Def.isValidReqRootDirIdx(ipIdx); err != nil { utils.Panic(err.Error()) } - return filepath.Join(j.Def.dirFrom(idx), j.LockedFile[idx]) + return filepath.Join(j.Def.dirFrom(ipIdx), j.LockedFile[ipIdx]) } // Returns the name of the output file for the job at the specified index -func (j *Job) ResponseFile(idx int) (s string, err error) { +func (j *Job) ResponseFile(opIdx int) (s string, err error) { // Sanity check - if err := j.Def.isValidOutputFileIdx(idx); err != nil { + if err := j.Def.isValidOutputFileIdx(opIdx); err != nil { return "", err } // Run the template // REMARK: Check how it behaves on runtime w := &strings.Builder{} - err = j.Def.OutputFileTmpl[idx].Execute(w, OutputFileResource{ + err = j.Def.OutputFileTmpl[opIdx].Execute(w, OutputFileResource{ Job: *j, }) if err != nil { @@ -125,17 +125,17 @@ func (j *Job) ResponseFile(idx int) (s string, err error) { s = strings.ReplaceAll(s, "--", "-") // Append the dir_to filepath - s = path.Join(j.Def.dirTo(idx), s) + s = path.Join(j.Def.dirTo(opIdx), s) return s, nil } // Returns the name of the output file for the job -func (j *Job) TmpResponseFile(c *config.Config, idx int) (s string) { - if err := j.Def.isValidOutputFileIdx(idx); err != nil { +func (j *Job) TmpResponseFile(c *config.Config, opIdx int) (s string) { + if err := j.Def.isValidOutputFileIdx(opIdx); err != nil { utils.Panic(err.Error()) } - return path.Join(j.Def.dirTo(idx), "tmp-response-file."+c.Controller.LocalID+".json") + return path.Join(j.Def.dirTo(opIdx), "tmp-response-file."+c.Controller.LocalID+".json") } // This function returns the name of the input file, modified to indicate that it should be retried in "large mode". @@ -143,10 +143,10 @@ func (j *Job) TmpResponseFile(c *config.Config, idx int) (s string) { // However, this situation is unexpected because the configuration validation ensures that if an exit code requires // deferring the job to a larger machine, the suffix must be set. // Additionally, if the prover's status code is zero (indicating success), the function will return an error. -func (j *Job) DeferToLargeFile(status Status, idx int) (s string, err error) { +func (j *Job) DeferToLargeFile(status Status, ipIdx int) (s string, err error) { // Sanity check - if err := j.Def.isValidReqRootDirIdx(idx); err != nil { + if err := j.Def.isValidReqRootDirIdx(ipIdx); err != nil { return "", err } @@ -154,7 +154,7 @@ func (j *Job) DeferToLargeFile(status Status, idx int) (s string, err error) { if status.ExitCode == 0 { return "", fmt.Errorf( "cant defer to large %v, status code was zero", - j.OriginalFile[idx], + j.OriginalFile[ipIdx], ) } @@ -166,18 +166,18 @@ func (j *Job) DeferToLargeFile(status Status, idx int) (s string, err error) { // rename it to "<...>.large.large". That way, the file will not be picked // up a second time by the same large prover, creating an infinite retry // loop. - if strings.HasSuffix(j.OriginalFile[idx], suffixLarge) { + if strings.HasSuffix(j.OriginalFile[ipIdx], suffixLarge) { logrus.Warnf( "Deferring the large machine but the input file `%v` already has"+ " the suffix %v. Still renaming it to %v, but it will likely"+ // Returns the name of the input file modified so that it is retried in " not be picked up again", - j.OriginalFile[idx], suffixLarge, s, + j.OriginalFile[ipIdx], suffixLarge, s, ) } // Remove the suffix .failure.code_[0-9]+ from all the strings of the input // file. That way we do not propagate the previous errors. - origFile, err := j.Def.FailureSuffix.Replace(j.OriginalFile[idx], "", -1, -1) + origFile, err := j.Def.FailureSuffix.Replace(j.OriginalFile[ipIdx], "", -1, -1) if err != nil { // he assumption here is that the above function may return an error // but this error can only depend on the regexp, the replacement, @@ -189,22 +189,22 @@ func (j *Job) DeferToLargeFile(status Status, idx int) (s string, err error) { return fmt.Sprintf( "%v/%v.%v.failure.%v_%v", - j.Def.dirFrom(idx), origFile, + j.Def.dirFrom(ipIdx), origFile, suffixLarge, config.FailSuffix, status.ExitCode, ), nil } // Returns the done file following the jobs status -func (j *Job) DoneFile(status Status, idx int) string { +func (j *Job) DoneFile(status Status, ipIdx int) string { // Sanity check - if err := j.Def.isValidReqRootDirIdx(idx); err != nil { + if err := j.Def.isValidReqRootDirIdx(ipIdx); err != nil { utils.Panic(err.Error()) } // Remove the suffix .failure.code_[0-9]+ from all the strings - origFile, err := j.Def.FailureSuffix.Replace(j.OriginalFile[idx], "", -1, -1) + origFile, err := j.Def.FailureSuffix.Replace(j.OriginalFile[ipIdx], "", -1, -1) if err != nil { // he assumption here is that the above function may return an error // but this error can only depend on the regexp, the replacement, @@ -215,9 +215,9 @@ func (j *Job) DoneFile(status Status, idx int) string { } if status.ExitCode == CodeSuccess { - return fmt.Sprintf("%v/%v.%v", j.Def.dirDone(idx), origFile, config.SuccessSuffix) + return fmt.Sprintf("%v/%v.%v", j.Def.dirDone(ipIdx), origFile, config.SuccessSuffix) } else { - return fmt.Sprintf("%v/%v.failure.%v_%v", j.Def.dirDone(idx), origFile, config.FailSuffix, status.ExitCode) + return fmt.Sprintf("%v/%v.failure.%v_%v", j.Def.dirDone(ipIdx), origFile, config.FailSuffix, status.ExitCode) } } From e73d6f7c1efe659d023d2691026c2e3a98f3a4e0 Mon Sep 17 00:00:00 2001 From: srinathln7 Date: Tue, 4 Feb 2025 19:42:58 +0000 Subject: [PATCH 14/48] job definition unit tests successful --- .../cmd/controller/controller/fs_watcher.go | 2 +- .../controller/controller/job_definition.go | 6 +- .../controller/job_definition_test.go | 162 +++++++++--------- prover/cmd/controller/controller/jobs.go | 5 +- 4 files changed, 91 insertions(+), 84 deletions(-) diff --git a/prover/cmd/controller/controller/fs_watcher.go b/prover/cmd/controller/controller/fs_watcher.go index 96677dd2a..69f3f0a2b 100644 --- a/prover/cmd/controller/controller/fs_watcher.go +++ b/prover/cmd/controller/controller/fs_watcher.go @@ -151,7 +151,7 @@ func (fs *FsWatcher) appendJobFromDef(jdef *JobDefinition, jobs *[]*Job) (err er // not parseable to the target JobType, it will return an error. job, err := NewJob(jdef, dirEntStr) if err != nil { - fs.Logger.Debugf("Found invalid file `%v` : %v", err) + fs.Logger.Debugf("Found invalid file `%v` : %v", dirEntStr, err) continue } diff --git a/prover/cmd/controller/controller/job_definition.go b/prover/cmd/controller/controller/job_definition.go index 5a7e66596..c947b208b 100644 --- a/prover/cmd/controller/controller/job_definition.go +++ b/prover/cmd/controller/controller/job_definition.go @@ -136,7 +136,7 @@ func ExecutionDefinition(conf *config.Config) JobDefinition { []string{conf.Execution.RequestsRootDir}, []string{inputFilePattern}, []string{"exec-output-file"}, - []string{"{{.Start}}-{{.End}}-getZkProof.json"}, + []string{"{{ index .Job.Start .Idx }}-{{ index .Job.End .Idx }}-getZkProof.json"}, []ParamsRegexp{paramsRegexp}, config.FailSuffix, ) @@ -168,7 +168,7 @@ func CompressionDefinition(conf *config.Config) JobDefinition { []string{conf.BlobDecompression.RequestsRootDir}, []string{inputFilePattern}, []string{"compress-output-file"}, - []string{"{{.Start}}-{{.End}}-{{.ContentHash}}getZkBlobCompressionProof.json"}, + []string{"{{ index .Job.Start .Idx }}-{{ index .Job.End .Idx }}-{{ index .Job.ContentHash .Idx }}-getZkBlobCompressionProof.json"}, []ParamsRegexp{paramsRegexp}, config.FailSuffix, ) @@ -199,7 +199,7 @@ func AggregatedDefinition(conf *config.Config) JobDefinition { []string{conf.Aggregation.RequestsRootDir}, []string{inputFilePattern}, []string{"agreg-output-file"}, - []string{"{{.Start}}-{{.End}}-{{.ContentHash}}-getZkAggregatedProof.json"}, + []string{"{{ index .Job.Start .Idx }}-{{ index .Job.End .Idx }}-{{ index .Job.ContentHash .Idx }}-getZkAggregatedProof.json"}, []ParamsRegexp{paramsRegexp}, config.FailSuffix, ) diff --git a/prover/cmd/controller/controller/job_definition_test.go b/prover/cmd/controller/controller/job_definition_test.go index 7fcb51d2c..5e3281be1 100644 --- a/prover/cmd/controller/controller/job_definition_test.go +++ b/prover/cmd/controller/controller/job_definition_test.go @@ -9,13 +9,72 @@ import ( type inpFileNamesCases struct { Ext, Fail string - Fnames []string + Fnames [][]string ShouldMatch bool Explainer string - ExpectedOutput []string - ExpToLarge []string - ExpSuccess []string - ExpFailW2 []string + ExpectedOutput [][]string + ExpToLarge [][]string + ExpSuccess [][]string + ExpFailW2 [][]string +} + +func runInpFileTestCase(t *testing.T, def *JobDefinition, c inpFileNamesCases) { + + for i, fnames := range c.Fnames { + + // NB: if the regexp matches but the fields cannot be parsed + // this will panic and fail the test. This is intentional. All + // errors must be caught by the input file regexp. + job, err := NewJob(def, fnames) + + if c.ShouldMatch { + if !assert.NoError(t, err, fnames) { + // stop there for this iteration + continue + } + + // Then try to format the response of the job + for idx := range fnames { + resp, err := job.ResponseFile(idx) + if assert.NoErrorf(t, err, "cannot produce a response for job %s", fnames[idx]) { + assert.Equal(t, c.ExpectedOutput[i][idx], resp, "wrong output file") + } + + // Try the name of the large one. If the case is specifying some + // expected values + if len(c.ExpToLarge) > 0 { + toLarge, err := job.DeferToLargeFile( + Status{ExitCode: 137}, idx, + ) + + if assert.NoError(t, err, "cannot produce name for the too large job") { + assert.Equal(t, c.ExpToLarge[i][idx], toLarge) + } + } + + // Try the success file + if len(c.ExpSuccess) > 0 { + toSuccess := job.DoneFile(Status{ExitCode: 0}, idx) + assert.Equal(t, c.ExpSuccess[i][idx], toSuccess) + } + + // Try the code 2 file + if len(c.ExpFailW2) > 0 { + toFail2 := job.DoneFile(Status{ExitCode: 2}, idx) + assert.Equal(t, c.ExpFailW2[i][idx], toFail2) + } + } + + } else { + for i := range fnames { + assert.Errorf( + t, err, fnames[i], + "%v should not match %s", + fnames, def.InputFileRegexp[i].String(), + ) + } + } + } } // This tests ensures that the naming convention is respected by the file-watcher @@ -86,39 +145,39 @@ func TestExecutionInFileRegexp(t *testing.T) { testcase := []inpFileNamesCases{ { Ext: "", Fail: "code", ShouldMatch: true, - Fnames: []string{correctM, correctWithFailM, correctWith2FailsM, missingEtv, missingStv}, + Fnames: [][]string{{correctM}, {correctWithFailM}, {correctWith2FailsM}, {missingEtv}, {missingStv}}, Explainer: "happy path, case M", - ExpectedOutput: []string{respM, respWithFailM, respWith2FailsM, respM, respM}, - ExpToLarge: []string{toLargeM, toLargeWithFailM, toLargeWith2FailsM, toLargeWoEtv, toLargeWoStv}, - ExpSuccess: []string{successM, successWithFailM, successWith2FailsM, successtWoEtv, successMWoStv}, - ExpFailW2: []string{failM, failWithFailM, failWith2FailsM, failtWoEtv, failMWoStv}, + ExpectedOutput: [][]string{{respM}, {respWithFailM}, {respWith2FailsM}, {respM}, {respM}}, + ExpToLarge: [][]string{{toLargeM}, {toLargeWithFailM}, {toLargeWith2FailsM}, {toLargeWoEtv}, {toLargeWoStv}}, + ExpSuccess: [][]string{{successM}, {successWithFailM}, {successWith2FailsM}, {successtWoEtv}, {successMWoStv}}, + ExpFailW2: [][]string{{failM}, {failWithFailM}, {failWith2FailsM}, {failtWoEtv}, {failMWoStv}}, }, { Ext: "large", Fail: "code", ShouldMatch: true, - Fnames: []string{correctL, correctWithFailL, correctWith2FailsL}, + Fnames: [][]string{{correctL}, {correctWithFailL}, {correctWith2FailsL}}, Explainer: "happy path, case L", - ExpectedOutput: []string{respL, respWithFailL, respWith2FailsL}, - ExpSuccess: []string{successL, successWithFailL, successWith2FailsL}, - ExpFailW2: []string{failL, failWithFailL, failWith2FailsL}, + ExpectedOutput: [][]string{{respL}, {respWithFailL}, {respWith2FailsL}}, + ExpSuccess: [][]string{{successL}, {successWithFailL}, {successWith2FailsL}}, + ExpFailW2: [][]string{{failL}, {failWithFailL}, {failWith2FailsL}}, }, { Ext: "", Fail: "code", ShouldMatch: false, - Fnames: []string{correctL, correctWithFailL, correctWith2FailsL}, + Fnames: [][]string{{correctL}, {correctWithFailL}, {correctWith2FailsL}}, Explainer: "M does not pick the files reserved for L", }, { Ext: "large", Fail: "code", ShouldMatch: false, - Fnames: []string{correctM, correctWithFailM, correctWith2FailsM}, + Fnames: [][]string{{correctM}, {correctWithFailM}, {correctWith2FailsM}}, Explainer: "L does not pick the files reserved for M", }, { Ext: "", Fail: "code", ShouldMatch: false, - Fnames: []string{notAPoint, badName}, + Fnames: [][]string{{notAPoint}, {badName}}, Explainer: "M does not pick obviously invalid files", }, { Ext: "large", Fail: "code", ShouldMatch: false, - Fnames: []string{missingEtv, missingStv, notAPoint, badName}, + Fnames: [][]string{{missingEtv}, {missingStv}, {notAPoint}, {badName}}, Explainer: "L does not pick obviously invalid files", }, } @@ -175,13 +234,13 @@ func TestCompressionInFileRegexp(t *testing.T) { testcase := []inpFileNamesCases{ { Ext: "", Fail: "code", ShouldMatch: true, - Fnames: []string{correctM, correctWithFailM, correctWith2FailsM, withBlobHash, withBlobHash0x, withDoubleDash, with0x, missingCv, missingBCv, missingCCv}, + Fnames: [][]string{{correctM}, {correctWithFailM}, {correctWith2FailsM}, {withBlobHash}, {withBlobHash0x}, {withDoubleDash}, {with0x}, {missingCv}, {missingBCv}, {missingCCv}}, Explainer: "happy path, case M", - ExpectedOutput: []string{respM, respWithFailM, respWith2FailsM, respWithBlobHash, respWithBlobHash0x, respWithNoDoubleDash, respWith0x, respM, respM, respM}, + ExpectedOutput: [][]string{{respM}, {respWithFailM}, {respWith2FailsM}, {respWithBlobHash}, {respWithBlobHash0x}, {respWithNoDoubleDash}, {respWith0x}, {respM}, {respM}, {respM}}, }, { Ext: "", Fail: "code", ShouldMatch: false, - Fnames: []string{etvNoCv, notAPoint, badName}, + Fnames: [][]string{{etvNoCv}, {notAPoint}, {badName}}, Explainer: "M does not pick obviously invalid files", }, } @@ -223,13 +282,13 @@ func TestAggregatedInFileRegexp(t *testing.T) { testcase := []inpFileNamesCases{ { Ext: "", Fail: "code", ShouldMatch: true, - Fnames: []string{correctM, correctWithFailM, correctWith2FailsM, missingContentHash}, + Fnames: [][]string{{correctM}, {correctWithFailM}, {correctWith2FailsM}, {missingContentHash}}, Explainer: "happy path, case M", - ExpectedOutput: []string{respWithContentHash, respWithContentHash, respWithContentHash, respM}, + ExpectedOutput: [][]string{{respWithContentHash}, {respWithContentHash}, {respWithContentHash}, {respM}}, }, { Ext: "", Fail: "code", ShouldMatch: false, - Fnames: []string{withEtv, notAPoint, badName}, + Fnames: [][]string{{withEtv}, {notAPoint}, {badName}}, Explainer: "M does not pick obviously invalid files", }, } @@ -249,61 +308,6 @@ func TestAggregatedInFileRegexp(t *testing.T) { } } -func runInpFileTestCase(t *testing.T, def *JobDefinition, c inpFileNamesCases) { - - for i, fname := range c.Fnames { - - // NB: if the regexp matches but the fields cannot be parsed - // this will panic and fail the test. This is intentional. All - // errors must be caught by the input file regexp. - job, err := NewJob(def, fname) - - if c.ShouldMatch { - if !assert.NoError(t, err, fname) { - // stop there for this iteration - continue - } - - // Then try to format the response of the job - resp, err := job.ResponseFile() - if assert.NoErrorf(t, err, "cannot produce a response for job %s", fname) { - assert.Equal(t, c.ExpectedOutput[i], resp, "wrong output file") - } - - // Try the name of the large one. If the case is specifying some - // expected values - if len(c.ExpToLarge) > 0 { - toLarge, err := job.DeferToLargeFile( - Status{ExitCode: 137}, - ) - - if assert.NoError(t, err, "cannot produce name for the too large job") { - assert.Equal(t, c.ExpToLarge[i], toLarge) - } - } - - // Try the success file - if len(c.ExpSuccess) > 0 { - toSuccess := job.DoneFile(Status{ExitCode: 0}) - assert.Equal(t, c.ExpSuccess[i], toSuccess) - } - - // Try the code 2 file - if len(c.ExpFailW2) > 0 { - toFail2 := job.DoneFile(Status{ExitCode: 2}) - assert.Equal(t, c.ExpFailW2[i], toFail2) - } - - } else { - assert.Errorf( - t, err, fname, - "%v should not match %s", - fname, def.InputFileRegexp.String(), - ) - } - } -} - func TestFailSuffixMatching(t *testing.T) { testcases := []struct { diff --git a/prover/cmd/controller/controller/jobs.go b/prover/cmd/controller/controller/jobs.go index 2ddcbdb1e..25ea01e30 100644 --- a/prover/cmd/controller/controller/jobs.go +++ b/prover/cmd/controller/controller/jobs.go @@ -44,7 +44,8 @@ type Job struct { // OutputFileRessouce collects all the data needed to fill the output template // file. type OutputFileResource struct { - Job + Job Job + Idx int } // Parse a filename into a Job. Returns an error if the file does not @@ -113,6 +114,7 @@ func (j *Job) ResponseFile(opIdx int) (s string, err error) { w := &strings.Builder{} err = j.Def.OutputFileTmpl[opIdx].Execute(w, OutputFileResource{ Job: *j, + Idx: opIdx, }) if err != nil { return "", err @@ -226,6 +228,7 @@ func (j *Job) DoneFile(status Status, ipIdx int) string { // 3 if the job is an aggregation job. The lower the score the higher will be // the priority of the job. The 100 value is chosen to make the score easy to // mentally compute. +// ASSUMED 0 index here func (j *Job) Score() int { return 100*j.End[0] + j.Def.Priority } From 278b157db020bd27221155a09a79febcfab80728 Mon Sep 17 00:00:00 2001 From: srinathln7 Date: Wed, 5 Feb 2025 10:08:23 +0000 Subject: [PATCH 15/48] executor test pass --- .../controller/controller/controller_test.go | 317 ----------------- prover/cmd/controller/controller/executor.go | 11 +- .../controller/controller/executor_test.go | 40 +-- .../controller/controller/fs_watcher_test.go | 318 ++++++++++++++++++ 4 files changed, 346 insertions(+), 340 deletions(-) diff --git a/prover/cmd/controller/controller/controller_test.go b/prover/cmd/controller/controller/controller_test.go index 567933071..5b586fbf6 100644 --- a/prover/cmd/controller/controller/controller_test.go +++ b/prover/cmd/controller/controller/controller_test.go @@ -2,16 +2,10 @@ package controller import ( "context" - "errors" - "fmt" - "io/fs" "os" - "path" "testing" - "text/template" "time" - "github.com/consensys/linea-monorepo/prover/config" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "golang.org/x/exp/slices" @@ -154,314 +148,3 @@ func TestRunCommand(t *testing.T) { } } - -func TestFileWatcherM(t *testing.T) { - - confM, _ := setupFsTest(t) - - // Create a list of files - eFrom := confM.Execution.DirFrom - cFrom := confM.BlobDecompression.DirFrom - aFrom := confM.Aggregation.DirFrom - - exitCode := 0 // we are not interesting in the exit code here - - // The jobs, declared in the order in which they are expected to be found - - // Name of the expected inprogress files - expectedFNames := []struct { - FName string - Skip bool - }{ - { - FName: createTestInputFile(eFrom(), 0, 1, execJob, exitCode), - }, - { - Skip: true, // wrong directory - FName: createTestInputFile(eFrom(), 0, 1, aggregationJob, exitCode), - }, - { - FName: createTestInputFile(cFrom(), 0, 1, compressionJob, exitCode), - }, - { - FName: createTestInputFile(eFrom(), 1, 2, execJob, exitCode), - }, - { - FName: createTestInputFile(cFrom(), 1, 2, compressionJob, exitCode), - }, - { - FName: createTestInputFile(aFrom(), 0, 2, aggregationJob, exitCode), - }, - { - Skip: true, // for large only - FName: createTestInputFile(eFrom(), 2, 4, execJob, exitCode, forLarge), - }, - { - FName: createTestInputFile(eFrom(), 4, 5, execJob, exitCode), - }, - { - FName: createTestInputFile(cFrom(), 2, 5, compressionJob, exitCode), - }, - { - FName: createTestInputFile(aFrom(), 2, 5, aggregationJob, exitCode), - }, - } - - fw := NewFsWatcher(confM) - - for _, f := range expectedFNames { - if f.Skip { - continue - } - found := fw.GetBest() - if assert.NotNil(t, found, "did not find the job") { - assert.Equal(t, f.FName, found.OriginalFile) - } - } - assert.Nil(t, fw.GetBest(), "the queue should be empty now") -} - -func TestFileWatcherL(t *testing.T) { - - _, confL := setupFsTest(t) - - // Create a list of files - eFrom := confL.Execution.DirFrom() - - exitCode := 0 // we are not interesting in the exit code here - - // The jobs, declared in the order in which they are expected to be found - - // Name of the expected inprogress files - expectedFNames := []struct { - FName string - Skip bool - }{ - { - Skip: true, // not large - FName: createTestInputFile(eFrom, 0, 1, execJob, exitCode), - }, - { - Skip: true, // wrong directory - FName: createTestInputFile(eFrom, 0, 1, aggregationJob, exitCode), - }, - { - FName: createTestInputFile(eFrom, 1, 2, execJob, exitCode, forLarge), - }, - { - FName: createTestInputFile(eFrom, 2, 4, execJob, exitCode, forLarge), - }, - { - Skip: true, // not large - FName: createTestInputFile(eFrom, 4, 5, execJob, exitCode), - }, - { - Skip: true, // wrong dir - FName: createTestInputFile(eFrom, 2, 5, compressionJob, exitCode), - }, - } - - fw := NewFsWatcher(confL) - - for _, f := range expectedFNames { - if f.Skip { - continue - } - found := fw.GetBest() - if assert.NotNil(t, found, "did not find the job") { - assert.Equal(t, f.FName, found.OriginalFile) - } - } - assert.Nil(t, fw.GetBest(), "the queue should be empty now") -} - -func setupFsTest(t *testing.T) (confM, confL *config.Config) { - - // Testdir is going to contain the whole test directory - testDir := t.TempDir() - - const ( - dirfrom = "prover-requests" - dirto = "prover-responses" - dirdone = "requests-done" - dirlogs = "logs" - proverM = "prover-full-M" - proverL = "prover-full-L" - execution = "execution" - compression = "compression" - aggregation = "aggregation" - ) - - // Create a configuration using temporary directories - cmd := ` -/bin/sh {{.InFile}} -CODE=$? -if [ $CODE -eq 0 ]; then - touch {{.OutFile}} -fi -exit $CODE -` - cmdLarge := ` - /bin/sh {{.InFile}} - CODE=$? - CODE=$(($CODE - 12)) - if [ $CODE -eq 0 ]; then - touch {{.OutFile}} - fi - exit $CODE - ` - - cmdLargeInternal := ` -/bin/sh {{.InFile}} -CODE=$? -CODE=$(($CODE - 10)) -if [ $CODE -eq 0 ]; then - touch {{.OutFile}} -fi -exit $CODE -` - - // For a prover M - confM = &config.Config{ - Version: "0.2.4", - - Controller: config.Controller{ - EnableExecution: true, - EnableBlobDecompression: true, - EnableAggregation: true, - LocalID: proverM, - Prometheus: config.Prometheus{Enabled: false}, - RetryDelays: []int{0, 1}, - WorkerCmd: cmd, - WorkerCmdLarge: cmdLargeInternal, - DeferToOtherLargeCodes: []int{12, 137}, - RetryLocallyWithLargeCodes: []int{10, 77}, - }, - - Execution: config.Execution{ - WithRequestDir: config.WithRequestDir{ - RequestsRootDir: path.Join(testDir, proverM, execution), - }, - }, - BlobDecompression: config.BlobDecompression{ - WithRequestDir: config.WithRequestDir{ - RequestsRootDir: path.Join(testDir, proverM, compression), - }, - }, - Aggregation: config.Aggregation{ - WithRequestDir: config.WithRequestDir{ - RequestsRootDir: path.Join(testDir, proverM, aggregation), - }, - }, - } - - _confL := *confM - confL = &_confL - confL.Controller.LocalID = proverL - confL.Controller.WorkerCmdLarge = cmdLarge - confL.Execution.CanRunFullLarge = true - - // confL = &config.GlobalConfig{ - // Version: "0.2.4", - - // Controller: config.Controller{ - // EnableExecution: true, - // EnableBlobDecompression: false, - // EnableAggregation: false, - // LocalID: proverL, - // Prometheus: config.Prometheus{Enabled: false}, - // RetryDelays: []int{0, 1}, - // WorkerCmd: cmdLarge, - // WorkerCmdLarge: cmdLarge, - // DeferToOtherLargeCodes: []int{12, 137}, - // RetryLocallyWithLargeCodes: []int{10, 77}, - // }, - // Execution: config.Execution{ - // WithRequestDir: config.WithRequestDir{ - // RequestsRootDir: path.Join(testDir, proverM, execution), - // }, - // CanRunFullLarge: true, - // }, - // BlobDecompression: config.BlobDecompression{ - // WithRequestDir: config.WithRequestDir{ - // RequestsRootDir: path.Join(testDir, proverM, compression), - // }, - // }, - // Aggregation: config.Aggregation{ - // WithRequestDir: config.WithRequestDir{ - // RequestsRootDir: path.Join(testDir, proverM, aggregation), - // }, - // }, - // } - - // ensure the template are parsed - confM.Controller.WorkerCmdTmpl = template.Must(template.New("worker").Parse(confM.Controller.WorkerCmd)) - confM.Controller.WorkerCmdLargeTmpl = template.Must(template.New("worker-large").Parse(confM.Controller.WorkerCmdLarge)) - confL.Controller.WorkerCmdTmpl = template.Must(template.New("worker").Parse(confL.Controller.WorkerCmd)) - confL.Controller.WorkerCmdLargeTmpl = template.Must(template.New("worker-large").Parse(confL.Controller.WorkerCmdLarge)) - - // Initialize the dirs (and give them all permissions). They will be - // wiped out after the test anyway. - permCode := fs.FileMode(0777) - err := errors.Join( - os.MkdirAll(confM.Execution.DirFrom(), permCode), - os.MkdirAll(confM.Execution.DirTo(), permCode), - os.MkdirAll(confM.Execution.DirDone(), permCode), - os.MkdirAll(confM.BlobDecompression.DirFrom(), permCode), - os.MkdirAll(confM.BlobDecompression.DirTo(), permCode), - os.MkdirAll(confM.BlobDecompression.DirDone(), permCode), - os.MkdirAll(confM.Aggregation.DirFrom(), permCode), - os.MkdirAll(confM.Aggregation.DirTo(), permCode), - os.MkdirAll(confM.Aggregation.DirDone(), permCode), - ) - - if err != nil { - t.Fatalf("could not create the temporary directories") - } - - return confM, confL -} - -const ( - execJob int = iota - compressionJob - aggregationJob - forLarge bool = true -) - -func createTestInputFile( - dirfrom string, - start, end, jobType, exitWith int, - large ...bool, -) (fname string) { - - // The filenames are expected to match the regexp pattern that we have in - // the job definition. - fmtString := "" - switch jobType { - case execJob: - fmtString = "%v-%v-etv0.1.2-stv1.2.3-getZkProof.json" - case compressionJob: - fmtString = "%v-%v-bcv0.1.2-ccv0.1.2-getZkBlobCompressionProof.json" - case aggregationJob: - fmtString = "%v-%v-deadbeef57-getZkAggregatedProof.json" - default: - panic("incorrect job type") - } - - filename := fmt.Sprintf(fmtString, start, end) - if len(large) > 0 && large[0] { - filename += ".large" - } - f, err := os.Create(path.Join(dirfrom, filename)) - if err != nil { - panic(err) - } - - // If called (with the test configuration (i.e. with sh), the file will - // immediately exit with the provided error code) - f.WriteString(fmt.Sprintf("#!/bin/sh\nexit %v", exitWith)) - f.Close() - - return filename -} diff --git a/prover/cmd/controller/controller/executor.go b/prover/cmd/controller/controller/executor.go index f62436b53..4516bb38a 100644 --- a/prover/cmd/controller/controller/executor.go +++ b/prover/cmd/controller/controller/executor.go @@ -42,7 +42,7 @@ type Status struct { type Resource struct { ConfFile string // The input and output file paths - InFile, OutFile string + InFile, OutFile []string } // The executor is responsible for running the commands specified by the jobs @@ -64,7 +64,8 @@ func NewExecutor(cfg *config.Config) *Executor { func (e *Executor) Run(job *Job) (status Status) { // The job should be locked - if len(job.LockedFile) == 0 { + // ASSUMED 0 index here + if len(job.LockedFile[0]) == 0 { return Status{ ExitCode: CodeFatal, What: "the job is not locked", @@ -140,6 +141,7 @@ func (e *Executor) buildCmd(job *Job, large bool) (cmd string, err error) { ) return "", err } + // ASSUMED 0 index here outFile := job.TmpResponseFile(e.Config, 0) tmpl := e.Config.Controller.WorkerCmdTmpl @@ -148,10 +150,11 @@ func (e *Executor) buildCmd(job *Job, large bool) (cmd string, err error) { } // use the template to generate the command + // ASSUMED 0 index resource := Resource{ ConfFile: fConfig, - InFile: job.InProgressPath(0), // Assume 0 index - OutFile: outFile, + InFile: []string{job.InProgressPath(0)}, + OutFile: []string{outFile}, } // Build the command and args from the job diff --git a/prover/cmd/controller/controller/executor_test.go b/prover/cmd/controller/controller/executor_test.go index d70112feb..e9fb8cf2e 100644 --- a/prover/cmd/controller/controller/executor_test.go +++ b/prover/cmd/controller/controller/executor_test.go @@ -18,11 +18,13 @@ func TestRetryWithLarge(t *testing.T) { // The template of the output file (returns a constant template with no // parameters) - OutputFileTmpl: template.Must( - template.New("output-file"). - Parse("output-fill-constant"), - ), - RequestsRootDir: "./testdata", + OutputFileTmpl: []*template.Template{ + template.Must( + template.New("output-file"). + Parse("output-fill-constant"), + ), + }, + RequestsRootDir: []string{"./testdata"}, } jobs := []struct { @@ -32,40 +34,40 @@ func TestRetryWithLarge(t *testing.T) { { Job: Job{ Def: &testDefinition, - LockedFile: "exit-0.sh", + LockedFile: []string{"exit-0.sh"}, // Not directly needed but helpful to track the process name - Start: 0, - End: 0, + Start: []int{0}, + End: []int{0}, }, ExpCode: 0, }, { Job: Job{ Def: &testDefinition, - LockedFile: "exit-1.sh", + LockedFile: []string{"exit-1.sh"}, // Not directly needed but helpful to track the process name - Start: 1, - End: 1, + Start: []int{1}, + End: []int{1}, }, ExpCode: 1, }, { Job: Job{ Def: &testDefinition, - LockedFile: "exit-77.sh", + LockedFile: []string{"exit-77.sh"}, // Not directly needed but helpful to track the process name - Start: 2, - End: 2, + Start: []int{2}, + End: []int{2}, }, ExpCode: 77 + 10, }, { Job: Job{ Def: &testDefinition, - LockedFile: "sigkill.sh", + LockedFile: []string{"sigkill.sh"}, // Not directly needed but helpful to track the process name - Start: 3, - End: 3, + Start: []int{3}, + End: []int{3}, }, ExpCode: 137, }, @@ -75,14 +77,14 @@ func TestRetryWithLarge(t *testing.T) { Controller: config.Controller{ WorkerCmdTmpl: template.Must( template.New("test-cmd"). - Parse("/bin/sh {{.InFile}}"), + Parse("/bin/sh {{index .InFile 0}}"), ), // And the large fields. The commands adds a +10 to the return code // to leave an evidence that the return code was obtained through // running the large command. WorkerCmdLargeTmpl: template.Must( template.New("test-cmd-large"). - Parse(`/bin/sh -c "/bin/sh {{.InFile}}"; exit $(($? + 10))`), + Parse(`/bin/sh -c "/bin/sh {{index .InFile 0}}"; exit $(($? + 10))`), ), RetryLocallyWithLargeCodes: config.DefaultRetryLocallyWithLargeCodes, }, diff --git a/prover/cmd/controller/controller/fs_watcher_test.go b/prover/cmd/controller/controller/fs_watcher_test.go index c3707ea9c..4388cb243 100644 --- a/prover/cmd/controller/controller/fs_watcher_test.go +++ b/prover/cmd/controller/controller/fs_watcher_test.go @@ -1,11 +1,25 @@ package controller import ( + "errors" + "fmt" + "io/fs" + "os" + "path" "testing" + "text/template" + "github.com/consensys/linea-monorepo/prover/config" "github.com/stretchr/testify/assert" ) +const ( + execJob int = iota + compressionJob + aggregationJob + forLarge bool = true +) + func TestLsName(t *testing.T) { dir := t.TempDir() @@ -24,3 +38,307 @@ func TestLsName(t *testing.T) { assert.NoErrorf(t, err, "error on tmp directory") assert.Emptyf(t, ls, "non empty dir") } + +func TestFileWatcherM(t *testing.T) { + + confM, _ := setupFsTest(t) + + // Create a list of files + eFrom := confM.Execution.DirFrom + cFrom := confM.BlobDecompression.DirFrom + aFrom := confM.Aggregation.DirFrom + + exitCode := 0 // we are not interesting in the exit code here + + // The jobs, declared in the order in which they are expected to be found + + // Name of the expected inprogress files + expectedFNames := []struct { + FName string + Skip bool + }{ + { + FName: createTestInputFile(eFrom(), 0, 1, execJob, exitCode), + }, + { + Skip: true, // wrong directory + FName: createTestInputFile(eFrom(), 0, 1, aggregationJob, exitCode), + }, + { + FName: createTestInputFile(cFrom(), 0, 1, compressionJob, exitCode), + }, + { + FName: createTestInputFile(eFrom(), 1, 2, execJob, exitCode), + }, + { + FName: createTestInputFile(cFrom(), 1, 2, compressionJob, exitCode), + }, + { + FName: createTestInputFile(aFrom(), 0, 2, aggregationJob, exitCode), + }, + { + Skip: true, // for large only + FName: createTestInputFile(eFrom(), 2, 4, execJob, exitCode, forLarge), + }, + { + FName: createTestInputFile(eFrom(), 4, 5, execJob, exitCode), + }, + { + FName: createTestInputFile(cFrom(), 2, 5, compressionJob, exitCode), + }, + { + FName: createTestInputFile(aFrom(), 2, 5, aggregationJob, exitCode), + }, + } + + fw := NewFsWatcher(confM) + + for _, f := range expectedFNames { + if f.Skip { + continue + } + found := fw.GetBest() + if assert.NotNil(t, found, "did not find the job") { + assert.Equal(t, f.FName, found.OriginalFile) + } + } + assert.Nil(t, fw.GetBest(), "the queue should be empty now") +} + +func TestFileWatcherL(t *testing.T) { + + _, confL := setupFsTest(t) + + // Create a list of files + eFrom := confL.Execution.DirFrom() + + exitCode := 0 // we are not interesting in the exit code here + + // The jobs, declared in the order in which they are expected to be found + + // Name of the expected inprogress files + expectedFNames := []struct { + FName string + Skip bool + }{ + { + Skip: true, // not large + FName: createTestInputFile(eFrom, 0, 1, execJob, exitCode), + }, + { + Skip: true, // wrong directory + FName: createTestInputFile(eFrom, 0, 1, aggregationJob, exitCode), + }, + { + FName: createTestInputFile(eFrom, 1, 2, execJob, exitCode, forLarge), + }, + { + FName: createTestInputFile(eFrom, 2, 4, execJob, exitCode, forLarge), + }, + { + Skip: true, // not large + FName: createTestInputFile(eFrom, 4, 5, execJob, exitCode), + }, + { + Skip: true, // wrong dir + FName: createTestInputFile(eFrom, 2, 5, compressionJob, exitCode), + }, + } + + fw := NewFsWatcher(confL) + + for _, f := range expectedFNames { + if f.Skip { + continue + } + found := fw.GetBest() + if assert.NotNil(t, found, "did not find the job") { + assert.Equal(t, f.FName, found.OriginalFile) + } + } + assert.Nil(t, fw.GetBest(), "the queue should be empty now") +} + +func setupFsTest(t *testing.T) (confM, confL *config.Config) { + + // Testdir is going to contain the whole test directory + testDir := t.TempDir() + + const ( + dirfrom = "prover-requests" + dirto = "prover-responses" + dirdone = "requests-done" + dirlogs = "logs" + proverM = "prover-full-M" + proverL = "prover-full-L" + execution = "execution" + compression = "compression" + aggregation = "aggregation" + ) + + // Create a configuration using temporary directories + cmd := ` +/bin/sh {{.InFile}} +CODE=$? +if [ $CODE -eq 0 ]; then + touch {{.OutFile}} +fi +exit $CODE +` + cmdLarge := ` + /bin/sh {{.InFile}} + CODE=$? + CODE=$(($CODE - 12)) + if [ $CODE -eq 0 ]; then + touch {{.OutFile}} + fi + exit $CODE + ` + + cmdLargeInternal := ` +/bin/sh {{.InFile}} +CODE=$? +CODE=$(($CODE - 10)) +if [ $CODE -eq 0 ]; then + touch {{.OutFile}} +fi +exit $CODE +` + + // For a prover M + confM = &config.Config{ + Version: "0.2.4", + + Controller: config.Controller{ + EnableExecution: true, + EnableBlobDecompression: true, + EnableAggregation: true, + LocalID: proverM, + Prometheus: config.Prometheus{Enabled: false}, + RetryDelays: []int{0, 1}, + WorkerCmd: cmd, + WorkerCmdLarge: cmdLargeInternal, + DeferToOtherLargeCodes: []int{12, 137}, + RetryLocallyWithLargeCodes: []int{10, 77}, + }, + + Execution: config.Execution{ + WithRequestDir: config.WithRequestDir{ + RequestsRootDir: path.Join(testDir, proverM, execution), + }, + }, + BlobDecompression: config.BlobDecompression{ + WithRequestDir: config.WithRequestDir{ + RequestsRootDir: path.Join(testDir, proverM, compression), + }, + }, + Aggregation: config.Aggregation{ + WithRequestDir: config.WithRequestDir{ + RequestsRootDir: path.Join(testDir, proverM, aggregation), + }, + }, + } + + _confL := *confM + confL = &_confL + confL.Controller.LocalID = proverL + confL.Controller.WorkerCmdLarge = cmdLarge + confL.Execution.CanRunFullLarge = true + + // confL = &config.GlobalConfig{ + // Version: "0.2.4", + + // Controller: config.Controller{ + // EnableExecution: true, + // EnableBlobDecompression: false, + // EnableAggregation: false, + // LocalID: proverL, + // Prometheus: config.Prometheus{Enabled: false}, + // RetryDelays: []int{0, 1}, + // WorkerCmd: cmdLarge, + // WorkerCmdLarge: cmdLarge, + // DeferToOtherLargeCodes: []int{12, 137}, + // RetryLocallyWithLargeCodes: []int{10, 77}, + // }, + // Execution: config.Execution{ + // WithRequestDir: config.WithRequestDir{ + // RequestsRootDir: path.Join(testDir, proverM, execution), + // }, + // CanRunFullLarge: true, + // }, + // BlobDecompression: config.BlobDecompression{ + // WithRequestDir: config.WithRequestDir{ + // RequestsRootDir: path.Join(testDir, proverM, compression), + // }, + // }, + // Aggregation: config.Aggregation{ + // WithRequestDir: config.WithRequestDir{ + // RequestsRootDir: path.Join(testDir, proverM, aggregation), + // }, + // }, + // } + + // ensure the template are parsed + confM.Controller.WorkerCmdTmpl = template.Must(template.New("worker").Parse(confM.Controller.WorkerCmd)) + confM.Controller.WorkerCmdLargeTmpl = template.Must(template.New("worker-large").Parse(confM.Controller.WorkerCmdLarge)) + confL.Controller.WorkerCmdTmpl = template.Must(template.New("worker").Parse(confL.Controller.WorkerCmd)) + confL.Controller.WorkerCmdLargeTmpl = template.Must(template.New("worker-large").Parse(confL.Controller.WorkerCmdLarge)) + + // Initialize the dirs (and give them all permissions). They will be + // wiped out after the test anyway. + permCode := fs.FileMode(0777) + err := errors.Join( + os.MkdirAll(confM.Execution.DirFrom(), permCode), + os.MkdirAll(confM.Execution.DirTo(), permCode), + os.MkdirAll(confM.Execution.DirDone(), permCode), + os.MkdirAll(confM.BlobDecompression.DirFrom(), permCode), + os.MkdirAll(confM.BlobDecompression.DirTo(), permCode), + os.MkdirAll(confM.BlobDecompression.DirDone(), permCode), + os.MkdirAll(confM.Aggregation.DirFrom(), permCode), + os.MkdirAll(confM.Aggregation.DirTo(), permCode), + os.MkdirAll(confM.Aggregation.DirDone(), permCode), + ) + + if err != nil { + t.Fatalf("could not create the temporary directories") + } + + return confM, confL +} + +func createTestInputFile( + dirfrom string, + start, end, jobType, exitWith int, + large ...bool, +) (fname string) { + + // The filenames are expected to match the regexp pattern that we have in + // the job definition. + fmtString := "" + switch jobType { + case execJob: + fmtString = "%v-%v-etv0.1.2-stv1.2.3-getZkProof.json" + case compressionJob: + fmtString = "%v-%v-bcv0.1.2-ccv0.1.2-getZkBlobCompressionProof.json" + case aggregationJob: + fmtString = "%v-%v-deadbeef57-getZkAggregatedProof.json" + default: + panic("incorrect job type") + } + + filename := fmt.Sprintf(fmtString, start, end) + if len(large) > 0 && large[0] { + filename += ".large" + } + f, err := os.Create(path.Join(dirfrom, filename)) + if err != nil { + panic(err) + } + + // If called (with the test configuration (i.e. with sh), the file will + // immediately exit with the provided error code) + f.WriteString(fmt.Sprintf("#!/bin/sh\nexit %v", exitWith)) + f.Close() + + return filename +} From e9d550d4342f8ce8412b9d3519b69b778e180116 Mon Sep 17 00:00:00 2001 From: srinathln7 Date: Wed, 5 Feb 2025 13:52:43 +0000 Subject: [PATCH 16/48] fs_watcher_test successful --- .../cmd/controller/controller/fs_watcher.go | 51 +++++----- .../controller/controller/fs_watcher_test.go | 92 ++++++++----------- 2 files changed, 61 insertions(+), 82 deletions(-) diff --git a/prover/cmd/controller/controller/fs_watcher.go b/prover/cmd/controller/controller/fs_watcher.go index 69f3f0a2b..f20047f9f 100644 --- a/prover/cmd/controller/controller/fs_watcher.go +++ b/prover/cmd/controller/controller/fs_watcher.go @@ -53,6 +53,8 @@ func NewFsWatcher(conf *config.Config) *FsWatcher { // Returns the list of jobs to perform by priorities. If no func (fs *FsWatcher) GetBest() (job *Job) { + fs.Logger.Debug("Starting GetBest") + // Fetches the full job list from all three directories. The fetching // operation will not ignore files if they are not in the expected // directory. For instance, if an aggregation file is in the directory @@ -73,7 +75,7 @@ func (fs *FsWatcher) GetBest() (job *Job) { // last job definition. jdef := &fs.JobToWatch[i] for j := range jdef.RequestsRootDir { - if err := fs.appendJobFromDef(jdef, &jobs); err != nil { + if err := fs.appendJobFromDef(jdef, &jobs, j); err != nil { fs.Logger.Errorf( "error trying to fetch job `%v` from dir %v: %v", jdef.Name, jdef.dirFrom(j), err, @@ -119,39 +121,34 @@ func (f *FsWatcher) lockBest(jobs []*Job) (pos int, success bool) { // Try appending a list of jobs that are parsed from a given directory. An error // is returned if the function fails to read the directory. -func (fs *FsWatcher) appendJobFromDef(jdef *JobDefinition, jobs *[]*Job) (err error) { +func (fs *FsWatcher) appendJobFromDef(jdef *JobDefinition, jobs *[]*Job, ipIdx int) (err error) { - numMatched := 0 - // Search and append the valid files into the list. - var dirEntStr []string - for idx := range jdef.RequestsRootDir { - dirFrom := jdef.dirFrom(idx) - fs.Logger.Tracef("Seeking jobs for %v in %v", jdef.Name, dirFrom) + dirFrom := jdef.dirFrom(ipIdx) + fs.Logger.Tracef("Seeking jobs for %v in %v", jdef.Name, dirFrom) - // This will fail if the provided directory is not a directory - dirents, err := lsname(dirFrom) - if err != nil { - return fmt.Errorf("cannot ls `%s` : %v", dirFrom, err) - } - - for _, dirent := range dirents { + // This will fail if the provided directory is not a directory + dirents, err := lsname(dirFrom) + if err != nil { + return fmt.Errorf("cannot ls `%s` : %v", dirFrom, err) + } + numMatched := 0 - fs.Logger.Tracef("Examining entry %s in %s", dirFrom, dirent.Name()) + // Search and append the valid files into the list. + for _, dirent := range dirents { - // Ignore directories - if !dirent.Type().IsRegular() { - fs.Logger.Debugf("Ignoring directory `%s`", dirent.Name()) - continue - } + fs.Logger.Tracef("Examining entry %s in %s", dirFrom, dirent.Name()) - dirEntStr = append(dirEntStr, dirent.Name()) + // Ignore directories + if !dirent.Type().IsRegular() { + fs.Logger.Debugf("Ignoring directory `%s`", dirent.Name()) + continue } - // Attempt to construct a job from the filenames. If the filenames is + // Attempt to construct a job from the filename. If the filename is // not parseable to the target JobType, it will return an error. - job, err := NewJob(jdef, dirEntStr) + job, err := NewJob(jdef, []string{dirent.Name()}) if err != nil { - fs.Logger.Debugf("Found invalid file `%v` : %v", dirEntStr, err) + fs.Logger.Debugf("Found invalid file `%v` : %v", dirent.Name(), err) continue } @@ -160,8 +157,10 @@ func (fs *FsWatcher) appendJobFromDef(jdef *JobDefinition, jobs *[]*Job) (err er *jobs = append(*jobs, job) numMatched++ } + // Pass prometheus metrics - metrics.CollectFS(jdef.Name, len(dirEntStr), numMatched) + metrics.CollectFS(jdef.Name, len(dirents), numMatched) + return nil } diff --git a/prover/cmd/controller/controller/fs_watcher_test.go b/prover/cmd/controller/controller/fs_watcher_test.go index 4388cb243..8ffa44a6f 100644 --- a/prover/cmd/controller/controller/fs_watcher_test.go +++ b/prover/cmd/controller/controller/fs_watcher_test.go @@ -21,7 +21,6 @@ const ( ) func TestLsName(t *testing.T) { - dir := t.TempDir() // When the dir doesn't exist we should return an error @@ -40,7 +39,6 @@ func TestLsName(t *testing.T) { } func TestFileWatcherM(t *testing.T) { - confM, _ := setupFsTest(t) // Create a list of files @@ -48,11 +46,11 @@ func TestFileWatcherM(t *testing.T) { cFrom := confM.BlobDecompression.DirFrom aFrom := confM.Aggregation.DirFrom - exitCode := 0 // we are not interesting in the exit code here + exitCode := 0 // we are not interested in the exit code here // The jobs, declared in the order in which they are expected to be found - // Name of the expected inprogress files + // Name of the expected in-progress files expectedFNames := []struct { FName string Skip bool @@ -92,31 +90,40 @@ func TestFileWatcherM(t *testing.T) { } fw := NewFsWatcher(confM) + // t.Logf("File System Watch Jobs to watch: %v", len(fw.JobToWatch)) + // t.Logf("File System Watch Jobs file names: %v", fw.JobToWatch[0].RequestsRootDir) + // t.Logf("File System Watch Jobs file names: %v", fw.JobToWatch[0].RequestsRootDir[0]) + + // t.Logf("File System Watch Jobs file names: %v", fw.JobToWatch[1].RequestsRootDir) + // t.Logf("File System Watch Jobs file names: %v", fw.JobToWatch[2].RequestsRootDir) for _, f := range expectedFNames { if f.Skip { continue } + t.Logf("Looking for job with file: %s", f.FName) found := fw.GetBest() + if found == nil { + t.Logf("Did not find the job for file: %s", f.FName) + } if assert.NotNil(t, found, "did not find the job") { - assert.Equal(t, f.FName, found.OriginalFile) + assert.Equal(t, f.FName, found.OriginalFile[0]) // ASSUMED 0 index here } } assert.Nil(t, fw.GetBest(), "the queue should be empty now") } func TestFileWatcherL(t *testing.T) { - _, confL := setupFsTest(t) // Create a list of files eFrom := confL.Execution.DirFrom() - exitCode := 0 // we are not interesting in the exit code here + exitCode := 0 // we are not interested in the exit code here // The jobs, declared in the order in which they are expected to be found - // Name of the expected inprogress files + // Name of the expected in-progress files expectedFNames := []struct { FName string Skip bool @@ -151,16 +158,20 @@ func TestFileWatcherL(t *testing.T) { if f.Skip { continue } + t.Logf("Looking for job with file: %s", f.FName) found := fw.GetBest() + if found == nil { + t.Logf("Did not find the job for file: %s", f.FName) + } if assert.NotNil(t, found, "did not find the job") { - assert.Equal(t, f.FName, found.OriginalFile) + assert.Equal(t, f.FName, found.OriginalFile[0]) // ASSUMED 0 index here } } assert.Nil(t, fw.GetBest(), "the queue should be empty now") } +// Sets up the test environment by creating temporary directories and configurations for the prover. func setupFsTest(t *testing.T) (confM, confL *config.Config) { - // Testdir is going to contain the whole test directory testDir := t.TempDir() @@ -177,30 +188,32 @@ func setupFsTest(t *testing.T) (confM, confL *config.Config) { ) // Create a configuration using temporary directories + // Defines three command templates for different types of jobs. + // These templates will be used to create shell commands for the worker processes. cmd := ` -/bin/sh {{.InFile}} +/bin/sh {{index .InFile 0}} CODE=$? if [ $CODE -eq 0 ]; then - touch {{.OutFile}} + touch {{index .OutFile 0}} fi exit $CODE ` cmdLarge := ` - /bin/sh {{.InFile}} - CODE=$? - CODE=$(($CODE - 12)) - if [ $CODE -eq 0 ]; then - touch {{.OutFile}} - fi - exit $CODE - ` +/bin/sh {{index .InFile 0}} +CODE=$? +CODE=$(($CODE - 12)) +if [ $CODE -eq 0 ]; then + touch {{index .OutFile 0}} +fi +exit $CODE +` cmdLargeInternal := ` -/bin/sh {{.InFile}} +/bin/sh {{index .InFile 0}} CODE=$? CODE=$(($CODE - 10)) if [ $CODE -eq 0 ]; then - touch {{.OutFile}} + touch {{index .OutFile 0}} fi exit $CODE ` @@ -245,39 +258,6 @@ exit $CODE confL.Controller.WorkerCmdLarge = cmdLarge confL.Execution.CanRunFullLarge = true - // confL = &config.GlobalConfig{ - // Version: "0.2.4", - - // Controller: config.Controller{ - // EnableExecution: true, - // EnableBlobDecompression: false, - // EnableAggregation: false, - // LocalID: proverL, - // Prometheus: config.Prometheus{Enabled: false}, - // RetryDelays: []int{0, 1}, - // WorkerCmd: cmdLarge, - // WorkerCmdLarge: cmdLarge, - // DeferToOtherLargeCodes: []int{12, 137}, - // RetryLocallyWithLargeCodes: []int{10, 77}, - // }, - // Execution: config.Execution{ - // WithRequestDir: config.WithRequestDir{ - // RequestsRootDir: path.Join(testDir, proverM, execution), - // }, - // CanRunFullLarge: true, - // }, - // BlobDecompression: config.BlobDecompression{ - // WithRequestDir: config.WithRequestDir{ - // RequestsRootDir: path.Join(testDir, proverM, compression), - // }, - // }, - // Aggregation: config.Aggregation{ - // WithRequestDir: config.WithRequestDir{ - // RequestsRootDir: path.Join(testDir, proverM, aggregation), - // }, - // }, - // } - // ensure the template are parsed confM.Controller.WorkerCmdTmpl = template.Must(template.New("worker").Parse(confM.Controller.WorkerCmd)) confM.Controller.WorkerCmdLargeTmpl = template.Must(template.New("worker-large").Parse(confM.Controller.WorkerCmdLarge)) @@ -306,12 +286,12 @@ exit $CODE return confM, confL } +// Creates test input files with specific filenames and exit codes to simulate job files for the file system watcher. func createTestInputFile( dirfrom string, start, end, jobType, exitWith int, large ...bool, ) (fname string) { - // The filenames are expected to match the regexp pattern that we have in // the job definition. fmtString := "" From 82a48a5f496c921d5c45043403492a866a562c3d Mon Sep 17 00:00:00 2001 From: srinathln7 Date: Wed, 5 Feb 2025 14:13:40 +0000 Subject: [PATCH 17/48] controller unit tests pass --- prover/cmd/controller/controller/controller.go | 4 ++-- prover/cmd/controller/controller/job_definition.go | 6 +++--- prover/cmd/controller/controller/jobs.go | 6 +++--- 3 files changed, 8 insertions(+), 8 deletions(-) diff --git a/prover/cmd/controller/controller/controller.go b/prover/cmd/controller/controller/controller.go index 528e94013..999bfcaaa 100644 --- a/prover/cmd/controller/controller/controller.go +++ b/prover/cmd/controller/controller/controller.go @@ -70,9 +70,9 @@ func runController(ctx context.Context, cfg *config.Config) { numRetrySoFar++ noJobFoundMsg := "found no jobs in the queue" if numRetrySoFar > 5 { - cLog.Debugf(noJobFoundMsg) + cLog.Debugf("%v", noJobFoundMsg) } else { - cLog.Infof(noJobFoundMsg) + cLog.Infof("%v", noJobFoundMsg) } continue } diff --git a/prover/cmd/controller/controller/job_definition.go b/prover/cmd/controller/controller/job_definition.go index c947b208b..654f5f2b7 100644 --- a/prover/cmd/controller/controller/job_definition.go +++ b/prover/cmd/controller/controller/job_definition.go @@ -252,21 +252,21 @@ func (jd *JobDefinition) isValidOutputFileIdx(idx int) error { func (jd *JobDefinition) dirFrom(ipIdx int) string { if err := jd.isValidReqRootDirIdx(ipIdx); err != nil { - utils.Panic(err.Error()) + utils.Panic("dirFrom:%v", err.Error()) } return filepath.Join(jd.RequestsRootDir[ipIdx], config.RequestsFromSubDir) } func (jd *JobDefinition) dirDone(ipIdx int) string { if err := jd.isValidReqRootDirIdx(ipIdx); err != nil { - utils.Panic(err.Error()) + utils.Panic("dirDone:%v", err.Error()) } return filepath.Join(jd.RequestsRootDir[ipIdx], config.RequestsDoneSubDir) } func (jd *JobDefinition) dirTo(ipIdx int) string { if err := jd.isValidReqRootDirIdx(ipIdx); err != nil { - utils.Panic(err.Error()) + utils.Panic("dirTo:%v", err.Error()) } return filepath.Join(jd.RequestsRootDir[ipIdx], config.RequestsToSubDir) } diff --git a/prover/cmd/controller/controller/jobs.go b/prover/cmd/controller/controller/jobs.go index 25ea01e30..7a0b74a9d 100644 --- a/prover/cmd/controller/controller/jobs.go +++ b/prover/cmd/controller/controller/jobs.go @@ -96,7 +96,7 @@ func NewJob(jdef *JobDefinition, filenames []string) (j *Job, err error) { // Returns the full path to the inprogress file func (j *Job) InProgressPath(ipIdx int) string { if err := j.Def.isValidReqRootDirIdx(ipIdx); err != nil { - utils.Panic(err.Error()) + utils.Panic("InProgressPath panic:%v", err.Error()) } return filepath.Join(j.Def.dirFrom(ipIdx), j.LockedFile[ipIdx]) } @@ -135,7 +135,7 @@ func (j *Job) ResponseFile(opIdx int) (s string, err error) { // Returns the name of the output file for the job func (j *Job) TmpResponseFile(c *config.Config, opIdx int) (s string) { if err := j.Def.isValidOutputFileIdx(opIdx); err != nil { - utils.Panic(err.Error()) + utils.Panic("TmpResponseFile panic:%v", err.Error()) } return path.Join(j.Def.dirTo(opIdx), "tmp-response-file."+c.Controller.LocalID+".json") } @@ -202,7 +202,7 @@ func (j *Job) DoneFile(status Status, ipIdx int) string { // Sanity check if err := j.Def.isValidReqRootDirIdx(ipIdx); err != nil { - utils.Panic(err.Error()) + utils.Panic("DoneFile panic:%v", err.Error()) } // Remove the suffix .failure.code_[0-9]+ from all the strings From 548e05387a7756724b787dad64f3a72867e02442 Mon Sep 17 00:00:00 2001 From: srinathln7 Date: Wed, 5 Feb 2025 14:45:27 +0000 Subject: [PATCH 18/48] remove 0 idx assumption in fs_watcher_test.go --- .../controller/controller/fs_watcher_test.go | 48 ++++++++----------- prover/cmd/controller/controller/jobs.go | 7 ++- 2 files changed, 24 insertions(+), 31 deletions(-) diff --git a/prover/cmd/controller/controller/fs_watcher_test.go b/prover/cmd/controller/controller/fs_watcher_test.go index 8ffa44a6f..fd57f7de3 100644 --- a/prover/cmd/controller/controller/fs_watcher_test.go +++ b/prover/cmd/controller/controller/fs_watcher_test.go @@ -52,51 +52,44 @@ func TestFileWatcherM(t *testing.T) { // Name of the expected in-progress files expectedFNames := []struct { - FName string + FName []string Skip bool }{ { - FName: createTestInputFile(eFrom(), 0, 1, execJob, exitCode), + FName: []string{createTestInputFile(eFrom(), 0, 1, execJob, exitCode)}, }, { Skip: true, // wrong directory - FName: createTestInputFile(eFrom(), 0, 1, aggregationJob, exitCode), + FName: []string{createTestInputFile(eFrom(), 0, 1, aggregationJob, exitCode)}, }, { - FName: createTestInputFile(cFrom(), 0, 1, compressionJob, exitCode), + FName: []string{createTestInputFile(cFrom(), 0, 1, compressionJob, exitCode)}, }, { - FName: createTestInputFile(eFrom(), 1, 2, execJob, exitCode), + FName: []string{createTestInputFile(eFrom(), 1, 2, execJob, exitCode)}, }, { - FName: createTestInputFile(cFrom(), 1, 2, compressionJob, exitCode), + FName: []string{createTestInputFile(cFrom(), 1, 2, compressionJob, exitCode)}, }, { - FName: createTestInputFile(aFrom(), 0, 2, aggregationJob, exitCode), + FName: []string{createTestInputFile(aFrom(), 0, 2, aggregationJob, exitCode)}, }, { Skip: true, // for large only - FName: createTestInputFile(eFrom(), 2, 4, execJob, exitCode, forLarge), + FName: []string{createTestInputFile(eFrom(), 2, 4, execJob, exitCode, forLarge)}, }, { - FName: createTestInputFile(eFrom(), 4, 5, execJob, exitCode), + FName: []string{createTestInputFile(eFrom(), 4, 5, execJob, exitCode)}, }, { - FName: createTestInputFile(cFrom(), 2, 5, compressionJob, exitCode), + FName: []string{createTestInputFile(cFrom(), 2, 5, compressionJob, exitCode)}, }, { - FName: createTestInputFile(aFrom(), 2, 5, aggregationJob, exitCode), + FName: []string{createTestInputFile(aFrom(), 2, 5, aggregationJob, exitCode)}, }, } fw := NewFsWatcher(confM) - // t.Logf("File System Watch Jobs to watch: %v", len(fw.JobToWatch)) - // t.Logf("File System Watch Jobs file names: %v", fw.JobToWatch[0].RequestsRootDir) - // t.Logf("File System Watch Jobs file names: %v", fw.JobToWatch[0].RequestsRootDir[0]) - - // t.Logf("File System Watch Jobs file names: %v", fw.JobToWatch[1].RequestsRootDir) - // t.Logf("File System Watch Jobs file names: %v", fw.JobToWatch[2].RequestsRootDir) - for _, f := range expectedFNames { if f.Skip { continue @@ -107,10 +100,11 @@ func TestFileWatcherM(t *testing.T) { t.Logf("Did not find the job for file: %s", f.FName) } if assert.NotNil(t, found, "did not find the job") { - assert.Equal(t, f.FName, found.OriginalFile[0]) // ASSUMED 0 index here + assert.Equal(t, f.FName, found.OriginalFile) } } assert.Nil(t, fw.GetBest(), "the queue should be empty now") + } func TestFileWatcherL(t *testing.T) { @@ -125,30 +119,30 @@ func TestFileWatcherL(t *testing.T) { // Name of the expected in-progress files expectedFNames := []struct { - FName string + FName []string Skip bool }{ { Skip: true, // not large - FName: createTestInputFile(eFrom, 0, 1, execJob, exitCode), + FName: []string{createTestInputFile(eFrom, 0, 1, execJob, exitCode)}, }, { Skip: true, // wrong directory - FName: createTestInputFile(eFrom, 0, 1, aggregationJob, exitCode), + FName: []string{createTestInputFile(eFrom, 0, 1, aggregationJob, exitCode)}, }, { - FName: createTestInputFile(eFrom, 1, 2, execJob, exitCode, forLarge), + FName: []string{createTestInputFile(eFrom, 1, 2, execJob, exitCode, forLarge)}, }, { - FName: createTestInputFile(eFrom, 2, 4, execJob, exitCode, forLarge), + FName: []string{createTestInputFile(eFrom, 2, 4, execJob, exitCode, forLarge)}, }, { Skip: true, // not large - FName: createTestInputFile(eFrom, 4, 5, execJob, exitCode), + FName: []string{createTestInputFile(eFrom, 4, 5, execJob, exitCode)}, }, { Skip: true, // wrong dir - FName: createTestInputFile(eFrom, 2, 5, compressionJob, exitCode), + FName: []string{createTestInputFile(eFrom, 2, 5, compressionJob, exitCode)}, }, } @@ -164,7 +158,7 @@ func TestFileWatcherL(t *testing.T) { t.Logf("Did not find the job for file: %s", f.FName) } if assert.NotNil(t, found, "did not find the job") { - assert.Equal(t, f.FName, found.OriginalFile[0]) // ASSUMED 0 index here + assert.Equal(t, f.FName, found.OriginalFile) } } assert.Nil(t, fw.GetBest(), "the queue should be empty now") diff --git a/prover/cmd/controller/controller/jobs.go b/prover/cmd/controller/controller/jobs.go index 7a0b74a9d..379c57c80 100644 --- a/prover/cmd/controller/controller/jobs.go +++ b/prover/cmd/controller/controller/jobs.go @@ -224,11 +224,10 @@ func (j *Job) DoneFile(status Status, ipIdx int) string { } // Returns the score of a JOB. The score is obtained as 100*job.Stop + P, where -// P is 1 if the job is an execution job, 2 if the job is a compression job and -// 3 if the job is an aggregation job. The lower the score the higher will be +// P is 0 if the job is an execution job, 1 if the job is a compression job and +// 2 if the job is an aggregation job. The lower the score the higher will be // the priority of the job. The 100 value is chosen to make the score easy to -// mentally compute. -// ASSUMED 0 index here +// mentally compute. ASSUMED 0 index here func (j *Job) Score() int { return 100*j.End[0] + j.Def.Priority } From 96aa669b10386871f8f3d51b616adc537f2d233c Mon Sep 17 00:00:00 2001 From: srinathln7 Date: Thu, 6 Feb 2025 10:00:58 +0000 Subject: [PATCH 19/48] init limitless job def --- prover/cmd/controller/controller/executor.go | 1 + .../controller/controller/job_definition.go | 21 +- .../controller/job_definition_limitless.go | 389 +++++++----------- 3 files changed, 165 insertions(+), 246 deletions(-) diff --git a/prover/cmd/controller/controller/executor.go b/prover/cmd/controller/controller/executor.go index 4516bb38a..20f1f31d5 100644 --- a/prover/cmd/controller/controller/executor.go +++ b/prover/cmd/controller/controller/executor.go @@ -65,6 +65,7 @@ func (e *Executor) Run(job *Job) (status Status) { // The job should be locked // ASSUMED 0 index here + if len(job.LockedFile[0]) == 0 { return Status{ ExitCode: CodeFatal, diff --git a/prover/cmd/controller/controller/job_definition.go b/prover/cmd/controller/controller/job_definition.go index 654f5f2b7..f89c45f05 100644 --- a/prover/cmd/controller/controller/job_definition.go +++ b/prover/cmd/controller/controller/job_definition.go @@ -123,13 +123,6 @@ func ExecutionDefinition(conf *config.Config) JobDefinition { config.FailSuffix, ) - paramsRegexp := ParamsRegexp{ - Start: regexp2.MustCompile(`^[0-9]+`, regexp2.None), - End: regexp2.MustCompile(`(?<=^[0-9]+-)[0-9]+`, regexp2.None), - Etv: matchVersionWithPrefix("etv"), - Stv: matchVersionWithPrefix("stv"), - } - jobDef, err := commonJobDefinition( jobNameExecution, 0, @@ -137,7 +130,7 @@ func ExecutionDefinition(conf *config.Config) JobDefinition { []string{inputFilePattern}, []string{"exec-output-file"}, []string{"{{ index .Job.Start .Idx }}-{{ index .Job.End .Idx }}-getZkProof.json"}, - []ParamsRegexp{paramsRegexp}, + cmnExecParamsRegexp(), config.FailSuffix, ) if err != nil { @@ -270,3 +263,15 @@ func (jd *JobDefinition) dirTo(ipIdx int) string { } return filepath.Join(jd.RequestsRootDir[ipIdx], config.RequestsToSubDir) } + +func cmnExecParamsRegexp() []ParamsRegexp { + paramsRegexp := []ParamsRegexp{ + { + Start: regexp2.MustCompile(`^[0-9]+`, regexp2.None), + End: regexp2.MustCompile(`(?<=^[0-9]+-)[0-9]+`, regexp2.None), + Etv: matchVersionWithPrefix("etv"), + Stv: matchVersionWithPrefix("stv"), + }, + } + return paramsRegexp +} diff --git a/prover/cmd/controller/controller/job_definition_limitless.go b/prover/cmd/controller/controller/job_definition_limitless.go index ea1d96750..0c840afe6 100644 --- a/prover/cmd/controller/controller/job_definition_limitless.go +++ b/prover/cmd/controller/controller/job_definition_limitless.go @@ -1,240 +1,153 @@ package controller -// import ( -// "fmt" - -// "github.com/consensys/linea-monorepo/prover/config" -// "github.com/dlclark/regexp2" -// ) - -// // Job definitions are defined such that each job has a single request and response file -// // These jobs will execute asynchronously based on their set priorities -// const ( -// // Bootstrap -// job_Exec_Bootstrap_GLSubmodule = "exec-bootstrap-GLsubmodule" -// job_Exec_Bootstrap_DistMetadata = "exec-bootstrap-metadata" - -// // Global-Local subprovers -// job_Exec_GL_RndBeacon = "exec-GL-rndbeacon" -// job_Exec_GL = "exec-GL" - -// // Random Beacon -// job_Exec_RndBeacon_LPP = "exec-rndbeacon" -// job_Exec_Bootstrap_RndBeacon = "exec-bootstrap-rndbeacon" - -// // LPP-subprovers -// job_Exec_LPP = "exec-LPP" - -// // Conglomerator -// job_Exec_Congolomerate_LPP = "exec-conglo-LPP" -// job_Exec_Congolomerate_GL = "exec-conglo-GL" -// job_Exec_Congolomerate_Bootstrap_Metadata = "exec-conglo-metadata" -// ) - -// // Priorities -// const ( -// priority_Exec_Bootstrap_GLSubmodule = 0 -// priority_Exec_Bootstrap_DistMetadata = 0 - -// priority_Exec_GL_RndBeacon = 1 -// priority_Exec_GL = 1 - -// priority_Exec_RndBeacon_LPP = 2 -// priority_Exec_Bootstrap_RndBeacon = 2 - -// priority_Exec_LPP = 3 - -// priority_Exec_Congolomerate_LPP = 4 -// priority_Exec_Congolomerate_GL = 4 -// priority_Exec_Congolomerate_Metadata = 4 -// ) - -// // Input file patterns -// const ( -// // Bootstrap I/p file is the usual execution req. file -// exec_Bootstrap_InputPattern = `^[0-9]+-[0-9]+(-etv[0-9\.]+)?(-stv[0-9\.]+)?-getZkProof\.json%v(\.failure\.%v_[0-9]+)*$` - -// // GL input -// exec_Bootstrap_GL_InputPattern = `^[0-9]+-[0-9]+(-etv[0-9\.]+)?(-stv[0-9\.]+)?-getZkProof_Bootstrap_GLSubmodule\.json%v(\.failure\.%v_[0-9]+)*$` - -// // Rnd Beacon I/p -// exec_Bootstrap_RndBeacon_InputPattern = `^[0-9]+-[0-9]+(-etv[0-9\.]+)?(-stv[0-9\.]+)?-getZkProof_Bootstrap_DistMetadata\.json%v(\.failure\.%v_[0-9]+)*$` -// exec_GL_RndBeacon_InputPattern = `^[0-9]+-[0-9]+(-etv[0-9\.]+)?(-stv[0-9\.]+)?-getZkProof_GL_RndBeacon\.json%v(\.failure\.%v_[0-9]+)*$` - -// // LPP Input -// exec_LPP_InputPattern = `^[0-9]+-[0-9]+(-etv[0-9\.]+)?(-stv[0-9\.]+)?-getZkProof_RndBeacon\.json%v(\.failure\.%v_[0-9]+)*$` - -// // Conglomerator Input -// exec_Conglomerate_GL_InputPattern = `^[0-9]+-[0-9]+(-etv[0-9\.]+)?(-stv[0-9\.]+)?-getZkProof_GL\.json%v(\.failure\.%v_[0-9]+)*$` -// exec_Conglomerate_LPP_InputPattern = `^[0-9]+-[0-9]+(-etv[0-9\.]+)?(-stv[0-9\.]+)?-getZkProof_LPP\.json%v(\.failure\.%v_[0-9]+)*$` -// exec_Conglomerate_Bootstrap_DistMetadata_InputPattern = `^[0-9]+-[0-9]+(-etv[0-9\.]+)?(-stv[0-9\.]+)?-getZkProof_Bootstrap_DistMetadata\.json%v(\.failure\.%v_[0-9]+)*$` -// ) - -// // Ouput File patterns and templates -// const ( -// exec_Bootstrap_GLSubmodule_File = "{{.Start}}-{{.End}}-etv{{.Etv}}-stv{{.Stv}}-getZkProof_Bootstrap_GLSubmodule.json" -// exec_Bootstrap_Submodule_Tmpl = "exec-bootstrap-GLsubmodule-req-file" - -// exec_Bootstrap_DistMetadata_File = "{{.Start}}-{{.End}}-etv{{.Etv}}-stv{{.Stv}}-getZkProof_Bootstrap_DistMetadata.json" -// exec_Bootstrap_DistMetadata_Tmpl = "exec-bootstrap-submodule-distmetadata-file" - -// // Global-Local subprovers -// exec_GL_RndBeacon_File = "{{.Start}}-{{.End}}-etv{{.Etv}}-stv{{.Stv}}-getZkProof_GL_RndBeacon.json" -// exec_GL_RndBeacon_Tmpl = "exec-GL-Beacon-file" - -// exec_GL_File = "{{.Start}}-{{.End}}-etv{{.Etv}}-stv{{.Stv}}-getZkProof_GL.json" -// exec_GL_Tmpl = "exec-GL-output-file" - -// // Random Beacon -// exec_RndBeacon_File = "{{.Start}}-{{.End}}-etv{{.Etv}}-stv{{.Stv}}-getZkProof_RndBeacon.json" -// exec_RndBeacon_Tmpl = "exec-rndbeacon-output-file" - -// // LPP-subprovers -// exec_LPP_File = "{{.Start}}-{{.End}}-etv{{.Etv}}-stv{{.Stv}}-getZkProof_LPP.json" -// exec_LPP_Tmpl = "exec-LPP-output-file" - -// // Conglomerator -// exec_Congolomerate_File = "{{.Start}}-{{.End}}-getZkProof.json" -// exec_Congolomerate_Tmpl = "exec-output-file" -// ) - -// // createJobDefinition creates a new JobDefinition with the provided parameters. -// // It sets up the job's name, priority, request directory, input file pattern, and output template. -// // The function returns a pointer to the JobDefinition and an error if any occurs during the setup. -// func createJobDefinition(name string, priority int, -// reqRootDir, inputFilePattern string, -// outputTmpl, outputFileName string) (*JobDefinition, error) { - -// return &JobDefinition{ -// Name: name, -// Priority: priority, - -// // Primary and Secondary Request (Input) Files -// RequestsRootDir: reqRootDir, -// InputFileRegexp: regexp2.MustCompile(inputFilePattern, regexp2.None), - -// // Output Templates -// OutputFileTmpl: tmplMustCompile(outputTmpl, outputFileName), - -// ParamsRegexp: struct { -// Start *regexp2.Regexp -// End *regexp2.Regexp -// Stv *regexp2.Regexp -// Etv *regexp2.Regexp -// Cv *regexp2.Regexp -// ContentHash *regexp2.Regexp -// }{ -// Start: regexp2.MustCompile(`^[0-9]+`, regexp2.None), -// End: regexp2.MustCompile(`(?<=^[0-9]+-)[0-9]+`, regexp2.None), -// Etv: matchVersionWithPrefix("etv"), -// Stv: matchVersionWithPrefix("stv"), -// }, -// FailureSuffix: matchFailureSuffix(config.FailSuffix), -// }, nil -// } - -// // BootstrapGLSubModDefinition creates a job definition for the Bootstrap GL Submodule job. -// // It sets the input file pattern based on the configuration and creates the job definition -// // with the appropriate parameters. -// func BootstrapGLSubModDefinition(conf *config.Config) (*JobDefinition, error) { -// inpFileExt := "" -// if conf.Bootstrap.CanRunFullLarge { -// inpFileExt = fmt.Sprintf(`\.%v`, config.LargeSuffix) -// } -// inputFilePattern := fmt.Sprintf(exec_Bootstrap_InputPattern, inpFileExt, config.FailSuffix) -// return createJobDefinition(job_Exec_Bootstrap_GLSubmodule, priority_Exec_Bootstrap_GLSubmodule, -// conf.Bootstrap.RequestsRootDir, inputFilePattern, exec_Bootstrap_Submodule_Tmpl, exec_Bootstrap_GLSubmodule_File) -// } - -// // BootstrapDistMetadataDefinition creates a job definition for the Bootstrap Metadata job. -// // It sets the input file pattern based on the configuration and creates the job definition -// // with the appropriate parameters. -// func BootstrapDistMetadataDefinition(conf *config.Config) (*JobDefinition, error) { -// inpFileExt := "" -// if conf.Bootstrap.CanRunFullLarge { -// inpFileExt = fmt.Sprintf(`\.%v`, config.LargeSuffix) -// } -// inputFilePattern := fmt.Sprintf(exec_Bootstrap_InputPattern, inpFileExt, config.FailSuffix) -// return createJobDefinition(job_Exec_Bootstrap_DistMetadata, priority_Exec_Bootstrap_DistMetadata, -// conf.Bootstrap.RequestsRootDir, inputFilePattern, exec_Bootstrap_DistMetadata_Tmpl, exec_Bootstrap_DistMetadata_File) -// } - -// func GLRndBeaconDefinition(conf *config.Config) (*JobDefinition, error) { -// inpFileExt := "" -// if conf.GLExecution.CanRunFullLarge { -// inpFileExt = fmt.Sprintf(`\.%v`, config.LargeSuffix) -// } -// inputFilePattern := fmt.Sprintf(exec_Bootstrap_GL_InputPattern, inpFileExt, config.FailSuffix) -// return createJobDefinition(job_Exec_GL_RndBeacon, priority_Exec_GL_RndBeacon, -// conf.GLExecution.RequestsRootDir, inputFilePattern, exec_GL_RndBeacon_Tmpl, exec_GL_RndBeacon_File) -// } - -// func GLDefinition(conf *config.Config) (*JobDefinition, error) { -// inpFileExt := "" -// if conf.GLExecution.CanRunFullLarge { -// inpFileExt = fmt.Sprintf(`\.%v`, config.LargeSuffix) -// } -// inputFilePattern := fmt.Sprintf(exec_Bootstrap_GL_InputPattern, inpFileExt, config.FailSuffix) -// return createJobDefinition(job_Exec_GL, priority_Exec_GL, -// conf.GLExecution.RequestsRootDir, inputFilePattern, exec_GL_Tmpl, exec_GL_File) -// } - -// func BootstrapRndBeaconDefinition(conf *config.Config) (*JobDefinition, error) { -// inpFileExt := "" -// if conf.RndBeacon.CanRunFullLarge { -// inpFileExt = fmt.Sprintf(`\.%v`, config.LargeSuffix) -// } -// inputFilePattern := fmt.Sprintf(exec_Bootstrap_RndBeacon_InputPattern, inpFileExt, config.FailSuffix) -// return createJobDefinition(job_Exec_Bootstrap_RndBeacon, priority_Exec_Bootstrap_RndBeacon, -// conf.RndBeacon.MetaData.RequestsRootDir, inputFilePattern, exec_RndBeacon_Tmpl, exec_RndBeacon_File) -// } - -// func RndBeaconLPPDefinition(conf *config.Config) (*JobDefinition, error) { -// inpFileExt := "" -// if conf.RndBeacon.CanRunFullLarge { -// inpFileExt = fmt.Sprintf(`\.%v`, config.LargeSuffix) -// } -// inputFilePattern := fmt.Sprintf(exec_GL_RndBeacon_InputPattern, inpFileExt, config.FailSuffix) -// return createJobDefinition(job_Exec_RndBeacon_LPP, priority_Exec_RndBeacon_LPP, -// conf.RndBeacon.GL.RequestsRootDir, inputFilePattern, exec_RndBeacon_Tmpl, exec_RndBeacon_File) -// } - -// func LPPDefinition(conf *config.Config) (*JobDefinition, error) { -// inpFileExt := "" -// if conf.LPPExecution.CanRunFullLarge { -// inpFileExt = fmt.Sprintf(`\.%v`, config.LargeSuffix) -// } -// inputFilePattern := fmt.Sprintf(exec_LPP_InputPattern, inpFileExt, config.FailSuffix) -// return createJobDefinition(job_Exec_LPP, priority_Exec_LPP, -// conf.LPPExecution.RequestsRootDir, inputFilePattern, exec_LPP_Tmpl, exec_LPP_File) -// } - -// func ConglomerateDistMetadataDefinition(conf *config.Config) (*JobDefinition, error) { -// inpFileExt := "" -// if conf.Conglomeration.CanRunFullLarge { -// inpFileExt = fmt.Sprintf(`\.%v`, config.LargeSuffix) -// } -// inputFilePattern := fmt.Sprintf(exec_Conglomerate_Bootstrap_DistMetadata_InputPattern, inpFileExt, config.FailSuffix) -// return createJobDefinition(job_Exec_Congolomerate_Bootstrap_Metadata, priority_Exec_Congolomerate_Metadata, -// conf.Conglomeration.BootstrapMetadata.RequestsRootDir, inputFilePattern, exec_Congolomerate_Tmpl, exec_Congolomerate_File) -// } - -// func ConglomerateGLDefinition(conf *config.Config) (*JobDefinition, error) { -// inpFileExt := "" -// if conf.Conglomeration.CanRunFullLarge { -// inpFileExt = fmt.Sprintf(`\.%v`, config.LargeSuffix) -// } -// inputFilePattern := fmt.Sprintf(exec_Conglomerate_GL_InputPattern, inpFileExt, config.FailSuffix) -// return createJobDefinition(job_Exec_Congolomerate_GL, priority_Exec_Congolomerate_GL, -// conf.Conglomeration.GL.RequestsRootDir, inputFilePattern, exec_Congolomerate_Tmpl, exec_Congolomerate_File) -// } - -// func ConglomerateLPPDefinition(conf *config.Config) (*JobDefinition, error) { -// inpFileExt := "" -// if conf.Conglomeration.CanRunFullLarge { -// inpFileExt = fmt.Sprintf(`\.%v`, config.LargeSuffix) -// } -// inputFilePattern := fmt.Sprintf(exec_Conglomerate_LPP_InputPattern, inpFileExt, config.FailSuffix) -// return createJobDefinition(job_Exec_Congolomerate_LPP, priority_Exec_Congolomerate_LPP, -// conf.Conglomeration.LPP.RequestsRootDir, inputFilePattern, exec_Congolomerate_Tmpl, exec_Congolomerate_File) -// } +import ( + "fmt" + + "github.com/consensys/linea-monorepo/prover/config" +) + +const ( + // Job definitions + jobExecBootstrap = "execBootstrap" + jobExecGL = "execGL" + jobExecRndBeacon = "execRndbeacon" + jobExecLPP = "execLPP" + jobExecCongolomerateLPP = "execConglomeration" + + // Priorities + priorityExecBootstrap = 0 + priorityExecGL = 1 + priorityExecRndBeacon = 2 + priorityExecLPP = 3 + priorityExecCongolomeration = 4 + + // Input file patterns + execBootstrapInputPattern = `^[0-9]+-[0-9]+(-etv[0-9\.]+)?(-stv[0-9\.]+)?-getZkProof\.json%v(\.failure\.%v_[0-9]+)*$` + execBootstrapGLInputPattern = `^[0-9]+-[0-9]+(-etv[0-9\.]+)?(-stv[0-9\.]+)?-getZkProof_Bootstrap_GLSubmodule\.json%v(\.failure\.%v_[0-9]+)*$` + execBootstrapRndBeaconInputPattern = `^[0-9]+-[0-9]+(-etv[0-9\.]+)?(-stv[0-9\.]+)?-getZkProof_Bootstrap_DistMetadata\.json%v(\.failure\.%v_[0-9]+)*$` + execGLRndBeaconInputPattern = `^[0-9]+-[0-9]+(-etv[0-9\.]+)?(-stv[0-9\.]+)?-getZkProof_GL_RndBeacon\.json%v(\.failure\.%v_[0-9]+)*$` + execLPPInputPattern = `^[0-9]+-[0-9]+(-etv[0-9\.]+)?(-stv[0-9\.]+)?-getZkProof_RndBeacon\.json%v(\.failure\.%v_[0-9]+)*$` + execConglomerateGLInputPattern = `^[0-9]+-[0-9]+(-etv[0-9\.]+)?(-stv[0-9\.]+)?-getZkProof_GL\.json%v(\.failure\.%v_[0-9]+)*$` + execConglomerateLPPInputPattern = `^[0-9]+-[0-9]+(-etv[0-9\.]+)?(-stv[0-9\.]+)?-getZkProof_LPP\.json%v(\.failure\.%v_[0-9]+)*$` + execConglomerateBootstrapDistMetadataPattern = `^[0-9]+-[0-9]+(-etv[0-9\.]+)?(-stv[0-9\.]+)?-getZkProof_Bootstrap_DistMetadata\.json%v(\.failure\.%v_[0-9]+)*$` + + // Output file templates and patterns + execBootstrapGLSubmoduleTemplate = "execBootstrapGLSubmoduleReqFile" + execBootstrapGLSubmoduleFile = "{{ index .Job.Start .Idx }}-{{ index .Job.End .Idx }}-etv{{.Etv}}-stv{{.Stv}}-getZkProof_Bootstrap_GLSubmodule.json" + execBootstrapDistMetadataTemplate = "execBootstrapSubmoduleDistMetadataFile" + execBootstrapDistMetadataFile = "{{ index .Job.Start .Idx }}-{{ index .Job.End .Idx }}-etv{{.Etv}}-stv{{.Stv}}-getZkProof_Bootstrap_DistMetadata.json" + execGLRndBeaconTemplate = "execGLBeaconFile" + execGLRndBeaconFile = "{{ index .Job.Start .Idx }}-{{ index .Job.End .Idx }}-etv{{.Etv}}-stv{{.Stv}}-getZkProof_GL_RndBeacon.json" + execGLTemplate = "execGLOutputFile" + execGLFile = "{{ index .Job.Start .Idx }}-{{ index .Job.End .Idx }}-etv{{.Etv}}-stv{{.Stv}}-getZkProof_GL.json" + execRndBeaconTemplate = "execRndBeaconOutputFile" + execRndBeaconFile = "{{ index .Job.Start .Idx }}-{{ index .Job.End .Idx }}-etv{{.Etv}}-stv{{.Stv}}-getZkProof_RndBeacon.json" + execLPPTemplate = "execLPPOutputFile" + execLPPFile = "{{ index .Job.Start .Idx }}-{{ index .Job.End .Idx }}-etv{{.Etv}}-stv{{.Stv}}-getZkProof_LPP.json" + execConglomerateTemplate = "execOutputFile" + execConglomerateFile = "{{ index .Job.Start .Idx }}-{{ index .Job.End .Idx }}-getZkProof.json" +) + +func ExecBootstrapDefinition(conf *config.Config) (*JobDefinition, error) { + inpFileExt := "" + if conf.Bootstrap.CanRunFullLarge { + inpFileExt = fmt.Sprintf(`\.%v`, config.LargeSuffix) + } + + // Input files + reqDirs := []string{conf.Bootstrap.RequestsRootDir} + inputFilePatterns := []string{fmt.Sprintf(execBootstrapInputPattern, inpFileExt, config.FailSuffix)} + + // Output files + outputTmpls := []string{execBootstrapGLSubmoduleTemplate, execBootstrapDistMetadataTemplate} + outputFiles := []string{execBootstrapGLSubmoduleFile, execBootstrapDistMetadataFile} + + return commonJobDefinition(jobExecBootstrap, priorityExecBootstrap, + reqDirs, inputFilePatterns, outputTmpls, outputFiles, cmnExecParamsRegexp(), config.FailSuffix) +} + +func ExecGLDefinition(conf *config.Config) (*JobDefinition, error) { + inpFileExt := "" + if conf.GLExecution.CanRunFullLarge { + inpFileExt = fmt.Sprintf(`\.%v`, config.LargeSuffix) + } + + // Input files + reqDirs := []string{conf.GLExecution.RequestsRootDir} + inputFilePatterns := []string{fmt.Sprintf(execBootstrapGLInputPattern, inpFileExt, config.FailSuffix)} + + // Output files + outputTmpls := []string{execGLRndBeaconTemplate, execGLTemplate} + outputFiles := []string{execGLRndBeaconFile, execGLFile} + + return commonJobDefinition(jobExecGL, priorityExecGL, + reqDirs, inputFilePatterns, outputTmpls, outputFiles, cmnExecParamsRegexp(), config.FailSuffix) +} + +func ExecRndBeaconDefinition(conf *config.Config) (*JobDefinition, error) { + inpFileExt := "" + if conf.RndBeacon.CanRunFullLarge { + inpFileExt = fmt.Sprintf(`\.%v`, config.LargeSuffix) + } + + // Input files + reqDirs := []string{ + conf.RndBeacon.MetaData.RequestsRootDir, + conf.RndBeacon.GL.RequestsRootDir, + } + inputFilePatterns := []string{ + fmt.Sprintf(execBootstrapRndBeaconInputPattern, inpFileExt, config.FailSuffix), + fmt.Sprintf(execGLRndBeaconInputPattern, inpFileExt, config.FailSuffix), + } + + // Output files + outputTmpls := []string{execRndBeaconTemplate} + outputFiles := []string{execRndBeaconFile} + + return commonJobDefinition(jobExecRndBeacon, priorityExecRndBeacon, + reqDirs, inputFilePatterns, outputTmpls, outputFiles, cmnExecParamsRegexp(), config.FailSuffix) +} + +func ExecLPPDefinition(conf *config.Config) (*JobDefinition, error) { + inpFileExt := "" + if conf.LPPExecution.CanRunFullLarge { + inpFileExt = fmt.Sprintf(`\.%v`, config.LargeSuffix) + } + + // Input files + reqDirs := []string{conf.LPPExecution.RequestsRootDir} + inputFilePatterns := []string{fmt.Sprintf(execLPPInputPattern, inpFileExt, config.FailSuffix)} + + // Output files + outputTmpls := []string{execLPPTemplate} + outputFiles := []string{execLPPFile} + + return commonJobDefinition(jobExecLPP, priorityExecLPP, + reqDirs, inputFilePatterns, outputTmpls, outputFiles, cmnExecParamsRegexp(), config.FailSuffix) +} + +func ExecConglomerationDefinition(conf *config.Config) (*JobDefinition, error) { + inpFileExt := "" + if conf.Conglomeration.CanRunFullLarge { + inpFileExt = fmt.Sprintf(`\.%v`, config.LargeSuffix) + } + + // Input files + reqDirs := []string{ + conf.Conglomeration.BootstrapMetadata.RequestsRootDir, + conf.Conglomeration.GL.RequestsRootDir, + conf.Conglomeration.LPP.RequestsRootDir, + } + inputFilePatterns := []string{ + fmt.Sprintf(execConglomerateBootstrapDistMetadataPattern, inpFileExt, config.FailSuffix), + fmt.Sprintf(execConglomerateGLInputPattern, inpFileExt, config.FailSuffix), + fmt.Sprintf(execConglomerateLPPInputPattern, inpFileExt, config.FailSuffix), + } + + // Output files + outputTmpls := []string{execConglomerateTemplate} + outputFiles := []string{execConglomerateFile} + + return commonJobDefinition(jobExecCongolomerateLPP, priorityExecCongolomeration, + reqDirs, inputFilePatterns, outputTmpls, outputFiles, cmnExecParamsRegexp(), config.FailSuffix) +} From f0de708aa2a65deb587c00742062005c05205bd1 Mon Sep 17 00:00:00 2001 From: srinathln7 Date: Thu, 6 Feb 2025 14:56:28 +0000 Subject: [PATCH 20/48] limitless prover job definitions and unit tests successful --- .../controller/controller/job_definition.go | 11 +- .../controller/job_definition_limitless.go | 22 +- .../job_definition_limitless_test.go | 2006 +++++++---------- .../controller/job_definition_test.go | 25 +- prover/cmd/controller/controller/tmp_test.go | 211 ++ 5 files changed, 1062 insertions(+), 1213 deletions(-) create mode 100644 prover/cmd/controller/controller/tmp_test.go diff --git a/prover/cmd/controller/controller/job_definition.go b/prover/cmd/controller/controller/job_definition.go index f89c45f05..9579549b0 100644 --- a/prover/cmd/controller/controller/job_definition.go +++ b/prover/cmd/controller/controller/job_definition.go @@ -130,7 +130,7 @@ func ExecutionDefinition(conf *config.Config) JobDefinition { []string{inputFilePattern}, []string{"exec-output-file"}, []string{"{{ index .Job.Start .Idx }}-{{ index .Job.End .Idx }}-getZkProof.json"}, - cmnExecParamsRegexp(), + cmnExecParamsRegexp(1), config.FailSuffix, ) if err != nil { @@ -264,14 +264,15 @@ func (jd *JobDefinition) dirTo(ipIdx int) string { return filepath.Join(jd.RequestsRootDir[ipIdx], config.RequestsToSubDir) } -func cmnExecParamsRegexp() []ParamsRegexp { - paramsRegexp := []ParamsRegexp{ - { +func cmnExecParamsRegexp(nInputs int) []ParamsRegexp { + paramsRegexp := make([]ParamsRegexp, nInputs) + for i := 0; i < nInputs; i++ { + paramsRegexp[i] = ParamsRegexp{ Start: regexp2.MustCompile(`^[0-9]+`, regexp2.None), End: regexp2.MustCompile(`(?<=^[0-9]+-)[0-9]+`, regexp2.None), Etv: matchVersionWithPrefix("etv"), Stv: matchVersionWithPrefix("stv"), - }, + } } return paramsRegexp } diff --git a/prover/cmd/controller/controller/job_definition_limitless.go b/prover/cmd/controller/controller/job_definition_limitless.go index 0c840afe6..fea362d0e 100644 --- a/prover/cmd/controller/controller/job_definition_limitless.go +++ b/prover/cmd/controller/controller/job_definition_limitless.go @@ -33,17 +33,17 @@ const ( // Output file templates and patterns execBootstrapGLSubmoduleTemplate = "execBootstrapGLSubmoduleReqFile" - execBootstrapGLSubmoduleFile = "{{ index .Job.Start .Idx }}-{{ index .Job.End .Idx }}-etv{{.Etv}}-stv{{.Stv}}-getZkProof_Bootstrap_GLSubmodule.json" + execBootstrapGLSubmoduleFile = "{{ index .Job.Start .Idx }}-{{ index .Job.End .Idx }}-etv{{ index .Job.Etv .Idx }}-stv{{ index .Job.Stv .Idx }}-getZkProof_Bootstrap_GLSubmodule.json" execBootstrapDistMetadataTemplate = "execBootstrapSubmoduleDistMetadataFile" - execBootstrapDistMetadataFile = "{{ index .Job.Start .Idx }}-{{ index .Job.End .Idx }}-etv{{.Etv}}-stv{{.Stv}}-getZkProof_Bootstrap_DistMetadata.json" + execBootstrapDistMetadataFile = "{{ index .Job.Start .Idx }}-{{ index .Job.End .Idx }}-etv{{ index .Job.Etv .Idx }}-stv{{ index .Job.Stv .Idx }}-getZkProof_Bootstrap_DistMetadata.json" execGLRndBeaconTemplate = "execGLBeaconFile" - execGLRndBeaconFile = "{{ index .Job.Start .Idx }}-{{ index .Job.End .Idx }}-etv{{.Etv}}-stv{{.Stv}}-getZkProof_GL_RndBeacon.json" + execGLRndBeaconFile = "{{ index .Job.Start .Idx }}-{{ index .Job.End .Idx }}-etv{{ index .Job.Etv .Idx }}-stv{{ index .Job.Stv .Idx }}-getZkProof_GL_RndBeacon.json" execGLTemplate = "execGLOutputFile" - execGLFile = "{{ index .Job.Start .Idx }}-{{ index .Job.End .Idx }}-etv{{.Etv}}-stv{{.Stv}}-getZkProof_GL.json" + execGLFile = "{{ index .Job.Start .Idx }}-{{ index .Job.End .Idx }}-etv{{ index .Job.Etv .Idx }}-stv{{ index .Job.Stv .Idx }}-getZkProof_GL.json" execRndBeaconTemplate = "execRndBeaconOutputFile" - execRndBeaconFile = "{{ index .Job.Start .Idx }}-{{ index .Job.End .Idx }}-etv{{.Etv}}-stv{{.Stv}}-getZkProof_RndBeacon.json" + execRndBeaconFile = "{{ index .Job.Start .Idx }}-{{ index .Job.End .Idx }}-etv{{ index .Job.Etv .Idx }}-stv{{ index .Job.Stv .Idx }}-getZkProof_RndBeacon.json" execLPPTemplate = "execLPPOutputFile" - execLPPFile = "{{ index .Job.Start .Idx }}-{{ index .Job.End .Idx }}-etv{{.Etv}}-stv{{.Stv}}-getZkProof_LPP.json" + execLPPFile = "{{ index .Job.Start .Idx }}-{{ index .Job.End .Idx }}-etv{{ index .Job.Etv .Idx }}-stv{{ index .Job.Stv .Idx }}-getZkProof_LPP.json" execConglomerateTemplate = "execOutputFile" execConglomerateFile = "{{ index .Job.Start .Idx }}-{{ index .Job.End .Idx }}-getZkProof.json" ) @@ -63,7 +63,7 @@ func ExecBootstrapDefinition(conf *config.Config) (*JobDefinition, error) { outputFiles := []string{execBootstrapGLSubmoduleFile, execBootstrapDistMetadataFile} return commonJobDefinition(jobExecBootstrap, priorityExecBootstrap, - reqDirs, inputFilePatterns, outputTmpls, outputFiles, cmnExecParamsRegexp(), config.FailSuffix) + reqDirs, inputFilePatterns, outputTmpls, outputFiles, cmnExecParamsRegexp(1), config.FailSuffix) } func ExecGLDefinition(conf *config.Config) (*JobDefinition, error) { @@ -81,7 +81,7 @@ func ExecGLDefinition(conf *config.Config) (*JobDefinition, error) { outputFiles := []string{execGLRndBeaconFile, execGLFile} return commonJobDefinition(jobExecGL, priorityExecGL, - reqDirs, inputFilePatterns, outputTmpls, outputFiles, cmnExecParamsRegexp(), config.FailSuffix) + reqDirs, inputFilePatterns, outputTmpls, outputFiles, cmnExecParamsRegexp(1), config.FailSuffix) } func ExecRndBeaconDefinition(conf *config.Config) (*JobDefinition, error) { @@ -105,7 +105,7 @@ func ExecRndBeaconDefinition(conf *config.Config) (*JobDefinition, error) { outputFiles := []string{execRndBeaconFile} return commonJobDefinition(jobExecRndBeacon, priorityExecRndBeacon, - reqDirs, inputFilePatterns, outputTmpls, outputFiles, cmnExecParamsRegexp(), config.FailSuffix) + reqDirs, inputFilePatterns, outputTmpls, outputFiles, cmnExecParamsRegexp(2), config.FailSuffix) } func ExecLPPDefinition(conf *config.Config) (*JobDefinition, error) { @@ -123,7 +123,7 @@ func ExecLPPDefinition(conf *config.Config) (*JobDefinition, error) { outputFiles := []string{execLPPFile} return commonJobDefinition(jobExecLPP, priorityExecLPP, - reqDirs, inputFilePatterns, outputTmpls, outputFiles, cmnExecParamsRegexp(), config.FailSuffix) + reqDirs, inputFilePatterns, outputTmpls, outputFiles, cmnExecParamsRegexp(1), config.FailSuffix) } func ExecConglomerationDefinition(conf *config.Config) (*JobDefinition, error) { @@ -149,5 +149,5 @@ func ExecConglomerationDefinition(conf *config.Config) (*JobDefinition, error) { outputFiles := []string{execConglomerateFile} return commonJobDefinition(jobExecCongolomerateLPP, priorityExecCongolomeration, - reqDirs, inputFilePatterns, outputTmpls, outputFiles, cmnExecParamsRegexp(), config.FailSuffix) + reqDirs, inputFilePatterns, outputTmpls, outputFiles, cmnExecParamsRegexp(3), config.FailSuffix) } diff --git a/prover/cmd/controller/controller/job_definition_limitless_test.go b/prover/cmd/controller/controller/job_definition_limitless_test.go index 88be7c5d6..8b0518f4f 100644 --- a/prover/cmd/controller/controller/job_definition_limitless_test.go +++ b/prover/cmd/controller/controller/job_definition_limitless_test.go @@ -1,1188 +1,822 @@ package controller -// import ( -// "testing" - -// "github.com/consensys/linea-monorepo/prover/config" -// "github.com/stretchr/testify/assert" -// ) - -// // This tests ensures that the naming convention is respected by the file-watcher -// // i.e., files with the right naming only are recognized. And the corresponding -// // output files are also recognized. -// func TestBootstrapSubModInFileRegexp(t *testing.T) { - -// var ( -// correctM = "102-103-etv0.2.3-stv1.2.3-getZkProof.json" -// correctL = "102-103-etv0.2.3-stv1.2.3-getZkProof.json.large" -// correctWithFailM = "102-103-etv0.2.3-stv1.2.3-getZkProof.json.failure.code_77" -// correctWithFailL = "102-103-etv0.2.3-stv1.2.3-getZkProof.json.large.failure.code_77" -// correctWith2FailsM = "102-103-etv0.2.3-stv1.2.3-getZkProof.json.failure.code_77.failure.code_77" -// correctWith2FailsL = "102-103-etv0.2.3-stv1.2.3-getZkProof.json.large.failure.code_77.failure.code_77" -// missingEtv = "102-103-stv1.2.3-getZkProof.json" -// missingStv = "102-103-etv0.2.3-getZkProof.json" -// notAPoint = "102-103-etv0.2.3-getZkProofAjson" -// badName = "102-103-etv0.2.3-stv1.2.3-getAggregatedProof.json" -// ) - -// // The responses in case of success -// var ( -// respM = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json" -// respL = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json" -// // #nosec G101 -- Not a credential -// respWithFailM = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json" -// // #nosec G101 -- Not a credential -// respWithFailL = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json" -// // #nosec G101 -- Not a credential -// respWith2FailsM = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json" -// // #nosec G101 -- Not a credential -// respWith2FailsL = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json" -// respWoEtv = "responses/102-103-etv-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json" -// respWoStv = "responses/102-103-etv0.2.3-stv-getZkProof_Bootstrap_GLSubmodule.json" -// ) - -// // The rename in case it is deferred to the large prover -// var ( -// toLargeM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof.json.large.failure.code_137" -// toLargeWithFailM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof.json.large.failure.code_137" -// toLargeWith2FailsM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof.json.large.failure.code_137" -// toLargeWoEtv = "requests/102-103-stv1.2.3-getZkProof.json.large.failure.code_137" -// toLargeWoStv = "requests/102-103-etv0.2.3-getZkProof.json.large.failure.code_137" -// ) - -// // The rename in case it is a success -// var ( -// successM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof.json.success" -// successMWoStv = "requests-done/102-103-etv0.2.3-getZkProof.json.success" -// successtWoEtv = "requests-done/102-103-stv1.2.3-getZkProof.json.success" -// successL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof.json.large.success" -// successWithFailM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof.json.success" -// successWithFailL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof.json.large.success" -// successWith2FailsM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof.json.success" -// successWith2FailsL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof.json.large.success" -// ) - -// // The rename in case it is a panic (code = 2) -// var ( -// failM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof.json.failure.code_2" -// failMWoStv = "requests-done/102-103-etv0.2.3-getZkProof.json.failure.code_2" -// failtWoEtv = "requests-done/102-103-stv1.2.3-getZkProof.json.failure.code_2" -// failL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof.json.large.failure.code_2" -// failWithFailM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof.json.failure.code_2" -// failWithFailL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof.json.large.failure.code_2" -// failWith2FailsM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof.json.failure.code_2" -// failWith2FailsL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof.json.large.failure.code_2" -// ) - -// testcase := []inpFileNamesCases{ -// { -// Ext: "", Fail: "code", ShouldMatch: true, -// Fnames: []string{correctM, correctWithFailM, correctWith2FailsM, missingEtv, missingStv}, -// Explainer: "happy path, case M", -// ExpectedOutput: []string{respM, respWithFailM, respWith2FailsM, respWoEtv, respWoStv}, -// ExpToLarge: []string{toLargeM, toLargeWithFailM, toLargeWith2FailsM, toLargeWoEtv, toLargeWoStv}, -// ExpSuccess: []string{successM, successWithFailM, successWith2FailsM, successtWoEtv, successMWoStv}, -// ExpFailW2: []string{failM, failWithFailM, failWith2FailsM, failtWoEtv, failMWoStv}, -// }, -// { -// Ext: "large", Fail: "code", ShouldMatch: true, -// Fnames: []string{correctL, correctWithFailL, correctWith2FailsL}, -// Explainer: "happy path, case L", -// ExpectedOutput: []string{respL, respWithFailL, respWith2FailsL}, -// ExpSuccess: []string{successL, successWithFailL, successWith2FailsL}, -// ExpFailW2: []string{failL, failWithFailL, failWith2FailsL}, -// }, -// { -// Ext: "", Fail: "code", ShouldMatch: false, -// Fnames: []string{correctL, correctWithFailL, correctWith2FailsL}, -// Explainer: "M does not pick the files reserved for L", -// }, -// { -// Ext: "large", Fail: "code", ShouldMatch: false, -// Fnames: []string{correctM, correctWithFailM, correctWith2FailsM}, -// Explainer: "L does not pick the files reserved for M", -// }, -// { -// Ext: "", Fail: "code", ShouldMatch: false, -// Fnames: []string{notAPoint, badName}, -// Explainer: "M does not pick obviously invalid files", -// }, -// { -// Ext: "large", Fail: "code", ShouldMatch: false, -// Fnames: []string{missingEtv, missingStv, notAPoint, badName}, -// Explainer: "L does not pick obviously invalid files", -// }, -// } - -// for _, c := range testcase { -// conf := config.Config{} -// conf.Version = "0.1.2" -// conf.Bootstrap.CanRunFullLarge = c.Ext == "large" - -// def, err := BootstrapGLSubModDefinition(&conf) -// assert.NoError(t, err) - -// t.Run(c.Explainer, func(t *testing.T) { -// runInpFileTestCase(t, def, c) -// }) -// } -// } - -// // This tests ensures that the naming convention is respected by the file-watcher -// // i.e., files with the right naming only are recognized. And the corresponding -// // output files are also recognized. -// func TestBootstrapMetaDataInFileRegexp(t *testing.T) { - -// var ( -// correctM = "102-103-etv0.2.3-stv1.2.3-getZkProof.json" -// correctL = "102-103-etv0.2.3-stv1.2.3-getZkProof.json.large" -// correctWithFailM = "102-103-etv0.2.3-stv1.2.3-getZkProof.json.failure.code_77" -// correctWithFailL = "102-103-etv0.2.3-stv1.2.3-getZkProof.json.large.failure.code_77" -// correctWith2FailsM = "102-103-etv0.2.3-stv1.2.3-getZkProof.json.failure.code_77.failure.code_77" -// correctWith2FailsL = "102-103-etv0.2.3-stv1.2.3-getZkProof.json.large.failure.code_77.failure.code_77" -// missingEtv = "102-103-stv1.2.3-getZkProof.json" -// missingStv = "102-103-etv0.2.3-getZkProof.json" -// notAPoint = "102-103-etv0.2.3-getZkProofAjson" -// badName = "102-103-etv0.2.3-stv1.2.3-getAggregatedProof.json" -// ) - -// // The responses in case of success -// var ( -// respM = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json" -// respL = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json" -// // #nosec G101 -- Not a credential -// respWithFailM = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json" -// // #nosec G101 -- Not a credential -// respWithFailL = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json" -// // #nosec G101 -- Not a credential -// respWith2FailsM = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json" -// // #nosec G101 -- Not a credential -// respWith2FailsL = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json" -// respWoEtv = "responses/102-103-etv-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json" -// respWoStv = "responses/102-103-etv0.2.3-stv-getZkProof_Bootstrap_DistMetadata.json" -// ) - -// // The rename in case it is deferred to the large prover -// var ( -// toLargeM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof.json.large.failure.code_137" -// toLargeWithFailM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof.json.large.failure.code_137" -// toLargeWith2FailsM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof.json.large.failure.code_137" -// toLargeWoEtv = "requests/102-103-stv1.2.3-getZkProof.json.large.failure.code_137" -// toLargeWoStv = "requests/102-103-etv0.2.3-getZkProof.json.large.failure.code_137" -// ) - -// // The rename in case it is a success -// var ( -// successM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof.json.success" -// successMWoStv = "requests-done/102-103-etv0.2.3-getZkProof.json.success" -// successtWoEtv = "requests-done/102-103-stv1.2.3-getZkProof.json.success" -// successL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof.json.large.success" -// successWithFailM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof.json.success" -// successWithFailL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof.json.large.success" -// successWith2FailsM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof.json.success" -// successWith2FailsL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof.json.large.success" -// ) - -// // The rename in case it is a panic (code = 2) -// var ( -// failM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof.json.failure.code_2" -// failMWoStv = "requests-done/102-103-etv0.2.3-getZkProof.json.failure.code_2" -// failtWoEtv = "requests-done/102-103-stv1.2.3-getZkProof.json.failure.code_2" -// failL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof.json.large.failure.code_2" -// failWithFailM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof.json.failure.code_2" -// failWithFailL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof.json.large.failure.code_2" -// failWith2FailsM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof.json.failure.code_2" -// failWith2FailsL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof.json.large.failure.code_2" -// ) - -// testcase := []inpFileNamesCases{ -// { -// Ext: "", Fail: "code", ShouldMatch: true, -// Fnames: []string{correctM, correctWithFailM, correctWith2FailsM, missingEtv, missingStv}, -// Explainer: "happy path, case M", -// ExpectedOutput: []string{respM, respWithFailM, respWith2FailsM, respWoEtv, respWoStv}, -// ExpToLarge: []string{toLargeM, toLargeWithFailM, toLargeWith2FailsM, toLargeWoEtv, toLargeWoStv}, -// ExpSuccess: []string{successM, successWithFailM, successWith2FailsM, successtWoEtv, successMWoStv}, -// ExpFailW2: []string{failM, failWithFailM, failWith2FailsM, failtWoEtv, failMWoStv}, -// }, -// { -// Ext: "large", Fail: "code", ShouldMatch: true, -// Fnames: []string{correctL, correctWithFailL, correctWith2FailsL}, -// Explainer: "happy path, case L", -// ExpectedOutput: []string{respL, respWithFailL, respWith2FailsL}, -// ExpSuccess: []string{successL, successWithFailL, successWith2FailsL}, -// ExpFailW2: []string{failL, failWithFailL, failWith2FailsL}, -// }, -// { -// Ext: "", Fail: "code", ShouldMatch: false, -// Fnames: []string{correctL, correctWithFailL, correctWith2FailsL}, -// Explainer: "M does not pick the files reserved for L", -// }, -// { -// Ext: "large", Fail: "code", ShouldMatch: false, -// Fnames: []string{correctM, correctWithFailM, correctWith2FailsM}, -// Explainer: "L does not pick the files reserved for M", -// }, -// { -// Ext: "", Fail: "code", ShouldMatch: false, -// Fnames: []string{notAPoint, badName}, -// Explainer: "M does not pick obviously invalid files", -// }, -// { -// Ext: "large", Fail: "code", ShouldMatch: false, -// Fnames: []string{missingEtv, missingStv, notAPoint, badName}, -// Explainer: "L does not pick obviously invalid files", -// }, -// } - -// for _, c := range testcase { -// conf := config.Config{} -// conf.Version = "0.1.2" -// conf.Bootstrap.CanRunFullLarge = c.Ext == "large" - -// def, err := BootstrapDistMetadataDefinition(&conf) -// assert.NoError(t, err) - -// t.Run(c.Explainer, func(t *testing.T) { -// runInpFileTestCase(t, def, c) -// }) -// } -// } - -// func TestGLRndBeaconInFileRegexp(t *testing.T) { - -// var ( -// correctM = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json" -// correctL = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large" -// correctWithFailM = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.failure.code_77" -// correctWithFailL = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.failure.code_77" -// correctWith2FailsM = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.failure.code_77.failure.code_77" -// correctWith2FailsL = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.failure.code_77.failure.code_77" -// missingEtv = "102-103-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json" -// missingStv = "102-103-etv0.2.3-getZkProof_Bootstrap_GLSubmodule.json" -// notAPoint = "102-103-etv0.2.3-getZkProof_Bootstrap_SubmoduleAjson" -// badName = "102-103-etv0.2.3-stv1.2.3-getAggregatedProof.json" -// ) - -// // The responses in case of success -// var ( -// respM = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json" -// respL = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json" -// // #nosec G101 -- Not a credential -// respWithFailM = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json" -// // #nosec G101 -- Not a credential -// respWithFailL = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json" -// // #nosec G101 -- Not a credential -// respWith2FailsM = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json" -// // #nosec G101 -- Not a credential -// respWith2FailsL = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json" -// respWoEtv = "responses/102-103-etv-stv1.2.3-getZkProof_GL_RndBeacon.json" -// respWoStv = "responses/102-103-etv0.2.3-stv-getZkProof_GL_RndBeacon.json" -// ) - -// // The rename in case it is deferred to the large prover -// var ( -// toLargeM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.failure.code_137" -// toLargeWithFailM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.failure.code_137" -// toLargeWith2FailsM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.failure.code_137" -// toLargeWoEtv = "requests/102-103-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.failure.code_137" -// toLargeWoStv = "requests/102-103-etv0.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.failure.code_137" -// ) - -// // The rename in case it is a success -// var ( -// successM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.success" -// successMWoStv = "requests-done/102-103-etv0.2.3-getZkProof_Bootstrap_GLSubmodule.json.success" -// successtWoEtv = "requests-done/102-103-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.success" -// successL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.success" -// successWithFailM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.success" -// successWithFailL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.success" -// successWith2FailsM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.success" -// successWith2FailsL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.success" -// ) - -// // The rename in case it is a panic (code = 2) -// var ( -// failM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.failure.code_2" -// failMWoStv = "requests-done/102-103-etv0.2.3-getZkProof_Bootstrap_GLSubmodule.json.failure.code_2" -// failtWoEtv = "requests-done/102-103-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.failure.code_2" -// failL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.failure.code_2" -// failWithFailM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.failure.code_2" -// failWithFailL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.failure.code_2" -// failWith2FailsM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.failure.code_2" -// failWith2FailsL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.failure.code_2" -// ) - -// testCases := []inpFileNamesCases{ -// { -// Ext: "", Fail: "code", ShouldMatch: true, -// Fnames: []string{correctM, correctWithFailM, correctWith2FailsM, missingEtv, missingStv}, -// Explainer: "happy path, case M", -// ExpectedOutput: []string{respM, respWithFailM, respWith2FailsM, respWoEtv, respWoStv}, -// ExpToLarge: []string{toLargeM, toLargeWithFailM, toLargeWith2FailsM, toLargeWoEtv, toLargeWoStv}, -// ExpSuccess: []string{successM, successWithFailM, successWith2FailsM, successtWoEtv, successMWoStv}, -// ExpFailW2: []string{failM, failWithFailM, failWith2FailsM, failtWoEtv, failMWoStv}, -// }, -// { -// Ext: "large", Fail: "code", ShouldMatch: true, -// Fnames: []string{correctL, correctWithFailL, correctWith2FailsL}, -// Explainer: "happy path, case L", -// ExpectedOutput: []string{respL, respWithFailL, respWith2FailsL}, -// ExpSuccess: []string{successL, successWithFailL, successWith2FailsL}, -// ExpFailW2: []string{failL, failWithFailL, failWith2FailsL}, -// }, -// { -// Ext: "", Fail: "code", ShouldMatch: false, -// Fnames: []string{correctL, correctWithFailL, correctWith2FailsL}, -// Explainer: "M does not pick the files reserved for L", -// }, -// { -// Ext: "large", Fail: "code", ShouldMatch: false, -// Fnames: []string{correctM, correctWithFailM, correctWith2FailsM}, -// Explainer: "L does not pick the files reserved for M", -// }, -// { -// Ext: "", Fail: "code", ShouldMatch: false, -// Fnames: []string{notAPoint, badName}, -// Explainer: "M does not pick obviously invalid files", -// }, -// { -// Ext: "large", Fail: "code", ShouldMatch: false, -// Fnames: []string{missingEtv, missingStv, notAPoint, badName}, -// Explainer: "L does not pick obviously invalid files", -// }, -// } - -// for _, c := range testCases { -// conf := config.Config{} -// conf.Version = "0.1.2" -// conf.GLExecution.CanRunFullLarge = c.Ext == "large" - -// def, err := GLRndBeaconDefinition(&conf) -// assert.NoError(t, err) - -// t.Run(c.Explainer, func(t *testing.T) { -// runInpFileTestCase(t, def, c) -// }) -// } -// } - -// func TestGLInFileRegexp(t *testing.T) { - -// var ( -// correctM = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json" -// correctL = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large" -// correctWithFailM = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.failure.code_77" -// correctWithFailL = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.failure.code_77" -// correctWith2FailsM = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.failure.code_77.failure.code_77" -// correctWith2FailsL = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.failure.code_77.failure.code_77" -// missingEtv = "102-103-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json" -// missingStv = "102-103-etv0.2.3-getZkProof_Bootstrap_GLSubmodule.json" -// notAPoint = "102-103-etv0.2.3-getZkProof_Bootstrap_SubmoduleAjson" -// badName = "102-103-etv0.2.3-stv1.2.3-getAggregatedProof.json" -// ) - -// // The responses in case of success -// var ( -// respM = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json" -// respL = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json" -// // #nosec G101 -- Not a credential -// respWithFailM = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json" -// // #nosec G101 -- Not a credential -// respWithFailL = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json" -// // #nosec G101 -- Not a credential -// respWith2FailsM = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json" -// // #nosec G101 -- Not a credential -// respWith2FailsL = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json" -// respWoEtv = "responses/102-103-etv-stv1.2.3-getZkProof_GL.json" -// respWoStv = "responses/102-103-etv0.2.3-stv-getZkProof_GL.json" -// ) - -// // The rename in case it is deferred to the large prover -// var ( -// toLargeM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.failure.code_137" -// toLargeWithFailM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.failure.code_137" -// toLargeWith2FailsM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.failure.code_137" -// toLargeWoEtv = "requests/102-103-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.failure.code_137" -// toLargeWoStv = "requests/102-103-etv0.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.failure.code_137" -// ) - -// // The rename in case it is a success -// var ( -// successM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.success" -// successMWoStv = "requests-done/102-103-etv0.2.3-getZkProof_Bootstrap_GLSubmodule.json.success" -// successtWoEtv = "requests-done/102-103-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.success" -// successL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.success" -// successWithFailM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.success" -// successWithFailL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.success" -// successWith2FailsM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.success" -// successWith2FailsL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.success" -// ) - -// // The rename in case it is a panic (code = 2) -// var ( -// failM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.failure.code_2" -// failMWoStv = "requests-done/102-103-etv0.2.3-getZkProof_Bootstrap_GLSubmodule.json.failure.code_2" -// failtWoEtv = "requests-done/102-103-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.failure.code_2" -// failL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.failure.code_2" -// failWithFailM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.failure.code_2" -// failWithFailL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.failure.code_2" -// failWith2FailsM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.failure.code_2" -// failWith2FailsL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.failure.code_2" -// ) - -// testCases := []inpFileNamesCases{ -// { -// Ext: "", Fail: "code", ShouldMatch: true, -// Fnames: []string{correctM, correctWithFailM, correctWith2FailsM, missingEtv, missingStv}, -// Explainer: "happy path, case M", -// ExpectedOutput: []string{respM, respWithFailM, respWith2FailsM, respWoEtv, respWoStv}, -// ExpToLarge: []string{toLargeM, toLargeWithFailM, toLargeWith2FailsM, toLargeWoEtv, toLargeWoStv}, -// ExpSuccess: []string{successM, successWithFailM, successWith2FailsM, successtWoEtv, successMWoStv}, -// ExpFailW2: []string{failM, failWithFailM, failWith2FailsM, failtWoEtv, failMWoStv}, -// }, -// { -// Ext: "large", Fail: "code", ShouldMatch: true, -// Fnames: []string{correctL, correctWithFailL, correctWith2FailsL}, -// Explainer: "happy path, case L", -// ExpectedOutput: []string{respL, respWithFailL, respWith2FailsL}, -// ExpSuccess: []string{successL, successWithFailL, successWith2FailsL}, -// ExpFailW2: []string{failL, failWithFailL, failWith2FailsL}, -// }, -// { -// Ext: "", Fail: "code", ShouldMatch: false, -// Fnames: []string{correctL, correctWithFailL, correctWith2FailsL}, -// Explainer: "M does not pick the files reserved for L", -// }, -// { -// Ext: "large", Fail: "code", ShouldMatch: false, -// Fnames: []string{correctM, correctWithFailM, correctWith2FailsM}, -// Explainer: "L does not pick the files reserved for M", -// }, -// { -// Ext: "", Fail: "code", ShouldMatch: false, -// Fnames: []string{notAPoint, badName}, -// Explainer: "M does not pick obviously invalid files", -// }, -// { -// Ext: "large", Fail: "code", ShouldMatch: false, -// Fnames: []string{missingEtv, missingStv, notAPoint, badName}, -// Explainer: "L does not pick obviously invalid files", -// }, -// } - -// for _, c := range testCases { -// conf := config.Config{} -// conf.Version = "0.1.2" -// conf.GLExecution.CanRunFullLarge = c.Ext == "large" - -// def, err := GLDefinition(&conf) -// assert.NoError(t, err) - -// t.Run(c.Explainer, func(t *testing.T) { -// runInpFileTestCase(t, def, c) -// }) -// } -// } - -// func TestBootstrapRndBeaconInFileRegexp(t *testing.T) { - -// var ( -// correctM = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json" -// correctL = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large" -// correctWithFailM = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.failure.code_77" -// correctWithFailL = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_77" -// correctWith2FailsM = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.failure.code_77.failure.code_77" -// correctWith2FailsL = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_77.failure.code_77" -// missingEtv = "102-103-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json" -// missingStv = "102-103-etv0.2.3-getZkProof_Bootstrap_DistMetadata.json" -// notAPoint = "102-103-etv0.2.3-getZkProof_Bootstrap_DistMetadataAjson" -// badName = "102-103-etv0.2.3-stv1.2.3-getAggregatedProof.json" -// ) - -// // The responses in case of success -// var ( -// respM = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json" -// respL = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json" -// // #nosec G101 -- Not a credential -// respWithFailM = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json" -// // #nosec G101 -- Not a credential -// respWithFailL = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json" -// // #nosec G101 -- Not a credential -// respWith2FailsM = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json" -// // #nosec G101 -- Not a credential -// respWith2FailsL = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json" -// respWoEtv = "responses/102-103-etv-stv1.2.3-getZkProof_RndBeacon.json" -// respWoStv = "responses/102-103-etv0.2.3-stv-getZkProof_RndBeacon.json" -// ) - -// // The rename in case it is deferred to the large prover -// var ( -// toLargeM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_137" -// toLargeWithFailM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_137" -// toLargeWith2FailsM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_137" -// toLargeWoEtv = "requests/102-103-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_137" -// toLargeWoStv = "requests/102-103-etv0.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_137" -// ) - -// // The rename in case it is a success -// var ( -// successM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.success" -// successMWoStv = "requests-done/102-103-etv0.2.3-getZkProof_Bootstrap_DistMetadata.json.success" -// successtWoEtv = "requests-done/102-103-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.success" -// successL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.success" -// successWithFailM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.success" -// successWithFailL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.success" -// successWith2FailsM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.success" -// successWith2FailsL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.success" -// ) - -// // The rename in case it is a panic (code = 2) -// var ( -// failM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.failure.code_2" -// failMWoStv = "requests-done/102-103-etv0.2.3-getZkProof_Bootstrap_DistMetadata.json.failure.code_2" -// failtWoEtv = "requests-done/102-103-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.failure.code_2" -// failL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_2" -// failWithFailM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.failure.code_2" -// failWithFailL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_2" -// failWith2FailsM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.failure.code_2" -// failWith2FailsL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_2" -// ) - -// testCases := []inpFileNamesCases{ -// { -// Ext: "", Fail: "code", ShouldMatch: true, -// Fnames: []string{correctM, correctWithFailM, correctWith2FailsM, missingEtv, missingStv}, -// Explainer: "happy path, case M", -// ExpectedOutput: []string{respM, respWithFailM, respWith2FailsM, respWoEtv, respWoStv}, -// ExpToLarge: []string{toLargeM, toLargeWithFailM, toLargeWith2FailsM, toLargeWoEtv, toLargeWoStv}, -// ExpSuccess: []string{successM, successWithFailM, successWith2FailsM, successtWoEtv, successMWoStv}, -// ExpFailW2: []string{failM, failWithFailM, failWith2FailsM, failtWoEtv, failMWoStv}, -// }, -// { -// Ext: "large", Fail: "code", ShouldMatch: true, -// Fnames: []string{correctL, correctWithFailL, correctWith2FailsL}, -// Explainer: "happy path, case L", -// ExpectedOutput: []string{respL, respWithFailL, respWith2FailsL}, -// ExpSuccess: []string{successL, successWithFailL, successWith2FailsL}, -// ExpFailW2: []string{failL, failWithFailL, failWith2FailsL}, -// }, -// { -// Ext: "", Fail: "code", ShouldMatch: false, -// Fnames: []string{correctL, correctWithFailL, correctWith2FailsL}, -// Explainer: "M does not pick the files reserved for L", -// }, -// { -// Ext: "large", Fail: "code", ShouldMatch: false, -// Fnames: []string{correctM, correctWithFailM, correctWith2FailsM}, -// Explainer: "L does not pick the files reserved for M", -// }, -// { -// Ext: "", Fail: "code", ShouldMatch: false, -// Fnames: []string{notAPoint, badName}, -// Explainer: "M does not pick obviously invalid files", -// }, -// { -// Ext: "large", Fail: "code", ShouldMatch: false, -// Fnames: []string{missingEtv, missingStv, notAPoint, badName}, -// Explainer: "L does not pick obviously invalid files", -// }, -// } - -// for _, c := range testCases { -// conf := config.Config{} -// conf.Version = "0.1.2" -// conf.RndBeacon.CanRunFullLarge = c.Ext == "large" - -// def, err := BootstrapRndBeaconDefinition(&conf) -// assert.NoError(t, err) - -// t.Run(c.Explainer, func(t *testing.T) { -// runInpFileTestCase(t, def, c) -// }) -// } -// } - -// func TestRndBeaconLPPInFileRegexp(t *testing.T) { - -// var ( -// correctM = "102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json" -// correctL = "102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.large" -// correctWithFailM = "102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.failure.code_77" -// correctWithFailL = "102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.large.failure.code_77" -// correctWith2FailsM = "102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.failure.code_77.failure.code_77" -// correctWith2FailsL = "102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.large.failure.code_77.failure.code_77" -// missingEtv = "102-103-stv1.2.3-getZkProof_GL_RndBeacon.json" -// missingStv = "102-103-etv0.2.3-getZkProof_GL_RndBeacon.json" -// notAPoint = "102-103-etv0.2.3-getZkProof_GL_RndBeaconAjson" -// badName = "102-103-etv0.2.3-stv1.2.3-getAggregatedProof.json" -// ) - -// // The responses in case of success -// var ( -// respM = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json" -// respL = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json" -// // #nosec G101 -- Not a credential -// respWithFailM = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json" -// // #nosec G101 -- Not a credential -// respWithFailL = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json" -// // #nosec G101 -- Not a credential -// respWith2FailsM = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json" -// // #nosec G101 -- Not a credential -// respWith2FailsL = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json" -// respWoEtv = "responses/102-103-etv-stv1.2.3-getZkProof_RndBeacon.json" -// respWoStv = "responses/102-103-etv0.2.3-stv-getZkProof_RndBeacon.json" -// ) - -// // The rename in case it is deferred to the large prover -// var ( -// toLargeM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.large.failure.code_137" -// toLargeWithFailM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.large.failure.code_137" -// toLargeWith2FailsM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.large.failure.code_137" -// toLargeWoEtv = "requests/102-103-stv1.2.3-getZkProof_GL_RndBeacon.json.large.failure.code_137" -// toLargeWoStv = "requests/102-103-etv0.2.3-getZkProof_GL_RndBeacon.json.large.failure.code_137" -// ) - -// // The rename in case it is a success -// var ( -// successM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.success" -// successMWoStv = "requests-done/102-103-etv0.2.3-getZkProof_GL_RndBeacon.json.success" -// successtWoEtv = "requests-done/102-103-stv1.2.3-getZkProof_GL_RndBeacon.json.success" -// successL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.large.success" -// successWithFailM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.success" -// successWithFailL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.large.success" -// successWith2FailsM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.success" -// successWith2FailsL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.large.success" -// ) - -// // The rename in case it is a panic (code = 2) -// var ( -// failM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.failure.code_2" -// failMWoStv = "requests-done/102-103-etv0.2.3-getZkProof_GL_RndBeacon.json.failure.code_2" -// failtWoEtv = "requests-done/102-103-stv1.2.3-getZkProof_GL_RndBeacon.json.failure.code_2" -// failL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.large.failure.code_2" -// failWithFailM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.failure.code_2" -// failWithFailL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.large.failure.code_2" -// failWith2FailsM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.failure.code_2" -// failWith2FailsL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.large.failure.code_2" -// ) - -// testCases := []inpFileNamesCases{ -// { -// Ext: "", Fail: "code", ShouldMatch: true, -// Fnames: []string{correctM, correctWithFailM, correctWith2FailsM, missingEtv, missingStv}, -// Explainer: "happy path, case M", -// ExpectedOutput: []string{respM, respWithFailM, respWith2FailsM, respWoEtv, respWoStv}, -// ExpToLarge: []string{toLargeM, toLargeWithFailM, toLargeWith2FailsM, toLargeWoEtv, toLargeWoStv}, -// ExpSuccess: []string{successM, successWithFailM, successWith2FailsM, successtWoEtv, successMWoStv}, -// ExpFailW2: []string{failM, failWithFailM, failWith2FailsM, failtWoEtv, failMWoStv}, -// }, -// { -// Ext: "large", Fail: "code", ShouldMatch: true, -// Fnames: []string{correctL, correctWithFailL, correctWith2FailsL}, -// Explainer: "happy path, case L", -// ExpectedOutput: []string{respL, respWithFailL, respWith2FailsL}, -// ExpSuccess: []string{successL, successWithFailL, successWith2FailsL}, -// ExpFailW2: []string{failL, failWithFailL, failWith2FailsL}, -// }, -// { -// Ext: "", Fail: "code", ShouldMatch: false, -// Fnames: []string{correctL, correctWithFailL, correctWith2FailsL}, -// Explainer: "M does not pick the files reserved for L", -// }, -// { -// Ext: "large", Fail: "code", ShouldMatch: false, -// Fnames: []string{correctM, correctWithFailM, correctWith2FailsM}, -// Explainer: "L does not pick the files reserved for M", -// }, -// { -// Ext: "", Fail: "code", ShouldMatch: false, -// Fnames: []string{notAPoint, badName}, -// Explainer: "M does not pick obviously invalid files", -// }, -// { -// Ext: "large", Fail: "code", ShouldMatch: false, -// Fnames: []string{missingEtv, missingStv, notAPoint, badName}, -// Explainer: "L does not pick obviously invalid files", -// }, -// } - -// for _, c := range testCases { -// conf := config.Config{} -// conf.Version = "0.1.2" -// conf.RndBeacon.CanRunFullLarge = c.Ext == "large" - -// def, err := RndBeaconLPPDefinition(&conf) -// assert.NoError(t, err) - -// t.Run(c.Explainer, func(t *testing.T) { -// runInpFileTestCase(t, def, c) -// }) -// } -// } - -// func TestLPPInFileRegexp(t *testing.T) { - -// var ( -// correctM = "102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json" -// correctL = "102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json.large" -// correctWithFailM = "102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json.failure.code_77" -// correctWithFailL = "102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json.large.failure.code_77" -// correctWith2FailsM = "102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json.failure.code_77.failure.code_77" -// correctWith2FailsL = "102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json.large.failure.code_77.failure.code_77" -// missingEtv = "102-103-stv1.2.3-getZkProof_RndBeacon.json" -// missingStv = "102-103-etv0.2.3-getZkProof_RndBeacon.json" -// notAPoint = "102-103-etv0.2.3-getZkProof_RndBeaconAjson" -// badName = "102-103-etv0.2.3-stv1.2.3-getAggregatedProof.json" -// ) - -// // The responses in case of success -// var ( -// respM = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json" -// respL = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json" -// // #nosec G101 -- Not a credential -// respWithFailM = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json" -// // #nosec G101 -- Not a credential -// respWithFailL = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json" -// // #nosec G101 -- Not a credential -// respWith2FailsM = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json" -// // #nosec G101 -- Not a credential -// respWith2FailsL = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json" -// respWoEtv = "responses/102-103-etv-stv1.2.3-getZkProof_LPP.json" -// respWoStv = "responses/102-103-etv0.2.3-stv-getZkProof_LPP.json" -// ) - -// // The rename in case it is deferred to the large prover -// var ( -// toLargeM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json.large.failure.code_137" -// toLargeWithFailM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json.large.failure.code_137" -// toLargeWith2FailsM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json.large.failure.code_137" -// toLargeWoEtv = "requests/102-103-stv1.2.3-getZkProof_RndBeacon.json.large.failure.code_137" -// toLargeWoStv = "requests/102-103-etv0.2.3-getZkProof_RndBeacon.json.large.failure.code_137" -// ) - -// // The rename in case it is a success -// var ( -// successM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json.success" -// successMWoStv = "requests-done/102-103-etv0.2.3-getZkProof_RndBeacon.json.success" -// successtWoEtv = "requests-done/102-103-stv1.2.3-getZkProof_RndBeacon.json.success" -// successL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json.large.success" -// successWithFailM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json.success" -// successWithFailL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json.large.success" -// successWith2FailsM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json.success" -// successWith2FailsL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json.large.success" -// ) - -// // The rename in case it is a panic (code = 2) -// var ( -// failM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json.failure.code_2" -// failMWoStv = "requests-done/102-103-etv0.2.3-getZkProof_RndBeacon.json.failure.code_2" -// failtWoEtv = "requests-done/102-103-stv1.2.3-getZkProof_RndBeacon.json.failure.code_2" -// failL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json.large.failure.code_2" -// failWithFailM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json.failure.code_2" -// failWithFailL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json.large.failure.code_2" -// failWith2FailsM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json.failure.code_2" -// failWith2FailsL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json.large.failure.code_2" -// ) - -// testCases := []inpFileNamesCases{ -// { -// Ext: "", Fail: "code", ShouldMatch: true, -// Fnames: []string{correctM, correctWithFailM, correctWith2FailsM, missingEtv, missingStv}, -// Explainer: "happy path, case M", -// ExpectedOutput: []string{respM, respWithFailM, respWith2FailsM, respWoEtv, respWoStv}, -// ExpToLarge: []string{toLargeM, toLargeWithFailM, toLargeWith2FailsM, toLargeWoEtv, toLargeWoStv}, -// ExpSuccess: []string{successM, successWithFailM, successWith2FailsM, successtWoEtv, successMWoStv}, -// ExpFailW2: []string{failM, failWithFailM, failWith2FailsM, failtWoEtv, failMWoStv}, -// }, -// { -// Ext: "large", Fail: "code", ShouldMatch: true, -// Fnames: []string{correctL, correctWithFailL, correctWith2FailsL}, -// Explainer: "happy path, case L", -// ExpectedOutput: []string{respL, respWithFailL, respWith2FailsL}, -// ExpSuccess: []string{successL, successWithFailL, successWith2FailsL}, -// ExpFailW2: []string{failL, failWithFailL, failWith2FailsL}, -// }, -// { -// Ext: "", Fail: "code", ShouldMatch: false, -// Fnames: []string{correctL, correctWithFailL, correctWith2FailsL}, -// Explainer: "M does not pick the files reserved for L", -// }, -// { -// Ext: "large", Fail: "code", ShouldMatch: false, -// Fnames: []string{correctM, correctWithFailM, correctWith2FailsM}, -// Explainer: "L does not pick the files reserved for M", -// }, -// { -// Ext: "", Fail: "code", ShouldMatch: false, -// Fnames: []string{notAPoint, badName}, -// Explainer: "M does not pick obviously invalid files", -// }, -// { -// Ext: "large", Fail: "code", ShouldMatch: false, -// Fnames: []string{missingEtv, missingStv, notAPoint, badName}, -// Explainer: "L does not pick obviously invalid files", -// }, -// } - -// for _, c := range testCases { -// conf := config.Config{} -// conf.Version = "0.1.2" -// conf.LPPExecution.CanRunFullLarge = c.Ext == "large" - -// def, err := LPPDefinition(&conf) -// assert.NoError(t, err) - -// t.Run(c.Explainer, func(t *testing.T) { -// runInpFileTestCase(t, def, c) -// }) -// } -// } - -// func TestConglomerateGLInFileRegexp(t *testing.T) { - -// var ( -// correctM = "102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json" -// correctL = "102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json.large" -// correctWithFailM = "102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json.failure.code_77" -// correctWithFailL = "102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json.large.failure.code_77" -// correctWith2FailsM = "102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json.failure.code_77.failure.code_77" -// correctWith2FailsL = "102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json.large.failure.code_77.failure.code_77" -// missingEtv = "102-103-stv1.2.3-getZkProof_GL.json" -// missingStv = "102-103-etv0.2.3-getZkProof_GL.json" -// notAPoint = "102-103-etv0.2.3-getZkProof_GLAjson" -// badName = "102-103-etv0.2.3-stv1.2.3-getAggregatedProof.json" -// ) - -// // The responses in case of success -// var ( -// respM = "responses/102-103-getZkProof.json" -// respL = "responses/102-103-getZkProof.json" -// // #nosec G101 -- Not a credential -// respWithFailM = "responses/102-103-getZkProof.json" -// // #nosec G101 -- Not a credential -// respWithFailL = "responses/102-103-getZkProof.json" -// // #nosec G101 -- Not a credential -// respWith2FailsM = "responses/102-103-getZkProof.json" -// // #nosec G101 -- Not a credential -// respWith2FailsL = "responses/102-103-getZkProof.json" -// ) - -// // The rename in case it is deferred to the large prover -// var ( -// toLargeM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json.large.failure.code_137" -// toLargeWithFailM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json.large.failure.code_137" -// toLargeWith2FailsM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json.large.failure.code_137" -// toLargeWoEtv = "requests/102-103-stv1.2.3-getZkProof_GL.json.large.failure.code_137" -// toLargeWoStv = "requests/102-103-etv0.2.3-getZkProof_GL.json.large.failure.code_137" -// ) - -// // The rename in case it is a success -// var ( -// successM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json.success" -// successMWoStv = "requests-done/102-103-etv0.2.3-getZkProof_GL.json.success" -// successtWoEtv = "requests-done/102-103-stv1.2.3-getZkProof_GL.json.success" -// successL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json.large.success" -// successWithFailM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json.success" -// successWithFailL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json.large.success" -// successWith2FailsM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json.success" -// successWith2FailsL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json.large.success" -// ) - -// // The rename in case it is a panic (code = 2) -// var ( -// failM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json.failure.code_2" -// failMWoStv = "requests-done/102-103-etv0.2.3-getZkProof_GL.json.failure.code_2" -// failtWoEtv = "requests-done/102-103-stv1.2.3-getZkProof_GL.json.failure.code_2" -// failL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json.large.failure.code_2" -// failWithFailM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json.failure.code_2" -// failWithFailL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json.large.failure.code_2" -// failWith2FailsM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json.failure.code_2" -// failWith2FailsL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json.large.failure.code_2" -// ) - -// testCases := []inpFileNamesCases{ -// { -// Ext: "", Fail: "code", ShouldMatch: true, -// Fnames: []string{correctM, correctWithFailM, correctWith2FailsM, missingEtv, missingStv}, -// Explainer: "happy path, case M", -// ExpectedOutput: []string{respM, respWithFailM, respWith2FailsM, respM, respM}, -// ExpToLarge: []string{toLargeM, toLargeWithFailM, toLargeWith2FailsM, toLargeWoEtv, toLargeWoStv}, -// ExpSuccess: []string{successM, successWithFailM, successWith2FailsM, successtWoEtv, successMWoStv}, -// ExpFailW2: []string{failM, failWithFailM, failWith2FailsM, failtWoEtv, failMWoStv}, -// }, -// { -// Ext: "large", Fail: "code", ShouldMatch: true, -// Fnames: []string{correctL, correctWithFailL, correctWith2FailsL}, -// Explainer: "happy path, case L", -// ExpectedOutput: []string{respL, respWithFailL, respWith2FailsL}, -// ExpSuccess: []string{successL, successWithFailL, successWith2FailsL}, -// ExpFailW2: []string{failL, failWithFailL, failWith2FailsL}, -// }, -// { -// Ext: "", Fail: "code", ShouldMatch: false, -// Fnames: []string{correctL, correctWithFailL, correctWith2FailsL}, -// Explainer: "M does not pick the files reserved for L", -// }, -// { -// Ext: "large", Fail: "code", ShouldMatch: false, -// Fnames: []string{correctM, correctWithFailM, correctWith2FailsM}, -// Explainer: "L does not pick the files reserved for M", -// }, -// { -// Ext: "", Fail: "code", ShouldMatch: false, -// Fnames: []string{notAPoint, badName}, -// Explainer: "M does not pick obviously invalid files", -// }, -// { -// Ext: "large", Fail: "code", ShouldMatch: false, -// Fnames: []string{missingEtv, missingStv, notAPoint, badName}, -// Explainer: "L does not pick obviously invalid files", -// }, -// } - -// for _, c := range testCases { -// conf := config.Config{} -// conf.Version = "0.1.2" -// conf.Conglomeration.CanRunFullLarge = c.Ext == "large" - -// def, err := ConglomerateGLDefinition(&conf) -// assert.NoError(t, err) - -// t.Run(c.Explainer, func(t *testing.T) { -// runInpFileTestCase(t, def, c) -// }) -// } -// } - -// func TestConglomerateDistMetadataInFileRegexp(t *testing.T) { - -// var ( -// correctM = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json" -// correctL = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large" -// correctWithFailM = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.failure.code_77" -// correctWithFailL = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_77" -// correctWith2FailsM = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.failure.code_77.failure.code_77" -// correctWith2FailsL = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_77.failure.code_77" -// missingEtv = "102-103-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json" -// missingStv = "102-103-etv0.2.3-getZkProof_Bootstrap_DistMetadata.json" -// notAPoint = "102-103-etv0.2.3-getZkProof_Bootstrap_DistMetadataAjson" -// badName = "102-103-etv0.2.3-stv1.2.3-getAggregatedProof.json" -// ) - -// // The responses in case of success -// var ( -// respM = "responses/102-103-getZkProof.json" -// respL = "responses/102-103-getZkProof.json" -// // #nosec G101 -- Not a credential -// respWithFailM = "responses/102-103-getZkProof.json" -// // #nosec G101 -- Not a credential -// respWithFailL = "responses/102-103-getZkProof.json" -// // #nosec G101 -- Not a credential -// respWith2FailsM = "responses/102-103-getZkProof.json" -// // #nosec G101 -- Not a credential -// respWith2FailsL = "responses/102-103-getZkProof.json" -// ) - -// // The rename in case it is deferred to the large prover -// var ( -// toLargeM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_137" -// toLargeWithFailM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_137" -// toLargeWith2FailsM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_137" -// toLargeWoEtv = "requests/102-103-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_137" -// toLargeWoStv = "requests/102-103-etv0.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_137" -// ) - -// // The rename in case it is a success -// var ( -// successM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.success" -// successMWoStv = "requests-done/102-103-etv0.2.3-getZkProof_Bootstrap_DistMetadata.json.success" -// successtWoEtv = "requests-done/102-103-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.success" -// successL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.success" -// successWithFailM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.success" -// successWithFailL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.success" -// successWith2FailsM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.success" -// successWith2FailsL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.success" -// ) - -// // The rename in case it is a panic (code = 2) -// var ( -// failM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.failure.code_2" -// failMWoStv = "requests-done/102-103-etv0.2.3-getZkProof_Bootstrap_DistMetadata.json.failure.code_2" -// failtWoEtv = "requests-done/102-103-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.failure.code_2" -// failL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_2" -// failWithFailM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.failure.code_2" -// failWithFailL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_2" -// failWith2FailsM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.failure.code_2" -// failWith2FailsL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_2" -// ) - -// testCases := []inpFileNamesCases{ -// { -// Ext: "", Fail: "code", ShouldMatch: true, -// Fnames: []string{correctM, correctWithFailM, correctWith2FailsM, missingEtv, missingStv}, -// Explainer: "happy path, case M", -// ExpectedOutput: []string{respM, respWithFailM, respWith2FailsM, respM, respM}, -// ExpToLarge: []string{toLargeM, toLargeWithFailM, toLargeWith2FailsM, toLargeWoEtv, toLargeWoStv}, -// ExpSuccess: []string{successM, successWithFailM, successWith2FailsM, successtWoEtv, successMWoStv}, -// ExpFailW2: []string{failM, failWithFailM, failWith2FailsM, failtWoEtv, failMWoStv}, -// }, -// { -// Ext: "large", Fail: "code", ShouldMatch: true, -// Fnames: []string{correctL, correctWithFailL, correctWith2FailsL}, -// Explainer: "happy path, case L", -// ExpectedOutput: []string{respL, respWithFailL, respWith2FailsL}, -// ExpSuccess: []string{successL, successWithFailL, successWith2FailsL}, -// ExpFailW2: []string{failL, failWithFailL, failWith2FailsL}, -// }, -// { -// Ext: "", Fail: "code", ShouldMatch: false, -// Fnames: []string{correctL, correctWithFailL, correctWith2FailsL}, -// Explainer: "M does not pick the files reserved for L", -// }, -// { -// Ext: "large", Fail: "code", ShouldMatch: false, -// Fnames: []string{correctM, correctWithFailM, correctWith2FailsM}, -// Explainer: "L does not pick the files reserved for M", -// }, -// { -// Ext: "", Fail: "code", ShouldMatch: false, -// Fnames: []string{notAPoint, badName}, -// Explainer: "M does not pick obviously invalid files", -// }, -// { -// Ext: "large", Fail: "code", ShouldMatch: false, -// Fnames: []string{missingEtv, missingStv, notAPoint, badName}, -// Explainer: "L does not pick obviously invalid files", -// }, -// } - -// for _, c := range testCases { -// conf := config.Config{} -// conf.Version = "0.1.2" -// conf.Conglomeration.CanRunFullLarge = c.Ext == "large" - -// def, err := ConglomerateDistMetadataDefinition(&conf) -// assert.NoError(t, err) - -// t.Run(c.Explainer, func(t *testing.T) { -// runInpFileTestCase(t, def, c) -// }) -// } -// } - -// func TestConglomerateLPPInFileRegexp(t *testing.T) { - -// var ( -// correctM = "102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json" -// correctL = "102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json.large" -// correctWithFailM = "102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json.failure.code_77" -// correctWithFailL = "102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json.large.failure.code_77" -// correctWith2FailsM = "102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json.failure.code_77.failure.code_77" -// correctWith2FailsL = "102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json.large.failure.code_77.failure.code_77" -// missingEtv = "102-103-stv1.2.3-getZkProof_LPP.json" -// missingStv = "102-103-etv0.2.3-getZkProof_LPP.json" -// notAPoint = "102-103-etv0.2.3-getZkProof_LPPAjson" -// badName = "102-103-etv0.2.3-stv1.2.3-getAggregatedProof.json" -// ) - -// // The responses in case of success -// var ( -// respM = "responses/102-103-getZkProof.json" -// respL = "responses/102-103-getZkProof.json" -// // #nosec G101 -- Not a credential -// respWithFailM = "responses/102-103-getZkProof.json" -// // #nosec G101 -- Not a credential -// respWithFailL = "responses/102-103-getZkProof.json" -// // #nosec G101 -- Not a credential -// respWith2FailsM = "responses/102-103-getZkProof.json" -// // #nosec G101 -- Not a credential -// respWith2FailsL = "responses/102-103-getZkProof.json" -// ) - -// // The rename in case it is deferred to the large prover -// var ( -// toLargeM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json.large.failure.code_137" -// toLargeWithFailM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json.large.failure.code_137" -// toLargeWith2FailsM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json.large.failure.code_137" -// toLargeWoEtv = "requests/102-103-stv1.2.3-getZkProof_LPP.json.large.failure.code_137" -// toLargeWoStv = "requests/102-103-etv0.2.3-getZkProof_LPP.json.large.failure.code_137" -// ) - -// // The rename in case it is a success -// var ( -// successM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json.success" -// successMWoStv = "requests-done/102-103-etv0.2.3-getZkProof_LPP.json.success" -// successtWoEtv = "requests-done/102-103-stv1.2.3-getZkProof_LPP.json.success" -// successL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json.large.success" -// successWithFailM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json.success" -// successWithFailL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json.large.success" -// successWith2FailsM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json.success" -// successWith2FailsL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json.large.success" -// ) - -// // The rename in case it is a panic (code = 2) -// var ( -// failM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json.failure.code_2" -// failMWoStv = "requests-done/102-103-etv0.2.3-getZkProof_LPP.json.failure.code_2" -// failtWoEtv = "requests-done/102-103-stv1.2.3-getZkProof_LPP.json.failure.code_2" -// failL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json.large.failure.code_2" -// failWithFailM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json.failure.code_2" -// failWithFailL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json.large.failure.code_2" -// failWith2FailsM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json.failure.code_2" -// failWith2FailsL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json.large.failure.code_2" -// ) - -// testCases := []inpFileNamesCases{ -// { -// Ext: "", Fail: "code", ShouldMatch: true, -// Fnames: []string{correctM, correctWithFailM, correctWith2FailsM, missingEtv, missingStv}, -// Explainer: "happy path, case M", -// ExpectedOutput: []string{respM, respWithFailM, respWith2FailsM, respM, respM}, -// ExpToLarge: []string{toLargeM, toLargeWithFailM, toLargeWith2FailsM, toLargeWoEtv, toLargeWoStv}, -// ExpSuccess: []string{successM, successWithFailM, successWith2FailsM, successtWoEtv, successMWoStv}, -// ExpFailW2: []string{failM, failWithFailM, failWith2FailsM, failtWoEtv, failMWoStv}, -// }, -// { -// Ext: "large", Fail: "code", ShouldMatch: true, -// Fnames: []string{correctL, correctWithFailL, correctWith2FailsL}, -// Explainer: "happy path, case L", -// ExpectedOutput: []string{respL, respWithFailL, respWith2FailsL}, -// ExpSuccess: []string{successL, successWithFailL, successWith2FailsL}, -// ExpFailW2: []string{failL, failWithFailL, failWith2FailsL}, -// }, -// { -// Ext: "", Fail: "code", ShouldMatch: false, -// Fnames: []string{correctL, correctWithFailL, correctWith2FailsL}, -// Explainer: "M does not pick the files reserved for L", -// }, -// { -// Ext: "large", Fail: "code", ShouldMatch: false, -// Fnames: []string{correctM, correctWithFailM, correctWith2FailsM}, -// Explainer: "L does not pick the files reserved for M", -// }, -// { -// Ext: "", Fail: "code", ShouldMatch: false, -// Fnames: []string{notAPoint, badName}, -// Explainer: "M does not pick obviously invalid files", -// }, -// { -// Ext: "large", Fail: "code", ShouldMatch: false, -// Fnames: []string{missingEtv, missingStv, notAPoint, badName}, -// Explainer: "L does not pick obviously invalid files", -// }, -// } - -// for _, c := range testCases { -// conf := config.Config{} -// conf.Version = "0.1.2" -// conf.Conglomeration.CanRunFullLarge = c.Ext == "large" - -// def, err := ConglomerateLPPDefinition(&conf) -// assert.NoError(t, err) - -// t.Run(c.Explainer, func(t *testing.T) { -// runInpFileTestCase(t, def, c) -// }) -// } -// } +import ( + "testing" + + "github.com/consensys/linea-monorepo/prover/config" + "github.com/stretchr/testify/assert" +) + +func TestExecBootstrapInFileRegexp(t *testing.T) { + var ( + correctM = "102-103-etv0.2.3-stv1.2.3-getZkProof.json" + correctL = "102-103-etv0.2.3-stv1.2.3-getZkProof.json.large" + correctWithFailM = "102-103-etv0.2.3-stv1.2.3-getZkProof.json.failure.code_77" + correctWithFailL = "102-103-etv0.2.3-stv1.2.3-getZkProof.json.large.failure.code_77" + correctWith2FailsM = "102-103-etv0.2.3-stv1.2.3-getZkProof.json.failure.code_77.failure.code_77" + correctWith2FailsL = "102-103-etv0.2.3-stv1.2.3-getZkProof.json.large.failure.code_77.failure.code_77" + missingEtv = "102-103-stv1.2.3-getZkProof.json" + missingStv = "102-103-etv0.2.3-getZkProof.json" + notAPoint = "102-103-etv0.2.3-getZkProofAjson" + badName = "102-103-etv0.2.3-stv1.2.3-getAggregatedProof.json" + ) + + // The responses in case of success + var ( + respGLM = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json" + respGLL = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json" + respMetadataM = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json" + respMetadataL = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json" + + respGLWithFailM = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json" + respGLWithFailL = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json" + respGLWith2FailsM = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json" + respGLWith2FailsL = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json" + respMetadataWithFailM = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json" + respMetadataWithFailL = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json" + respMetadataWith2FailsM = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json" + respMetadataWith2FailsL = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json" + + respGLWoEtv = "responses/102-103-etv-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json" + respGLWoStv = "responses/102-103-etv0.2.3-stv-getZkProof_Bootstrap_GLSubmodule.json" + respMetadataWoEtv = "responses/102-103-etv-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json" + respMetadataWoStv = "responses/102-103-etv0.2.3-stv-getZkProof_Bootstrap_DistMetadata.json" + ) + + // The rename in case it is deferred to the large prover + var ( + toLargeM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof.json.large.failure.code_137" + toLargeWithFailM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof.json.large.failure.code_137" + toLargeWith2FailsM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof.json.large.failure.code_137" + toLargeWoEtv = "requests/102-103-stv1.2.3-getZkProof.json.large.failure.code_137" + toLargeWoStv = "requests/102-103-etv0.2.3-getZkProof.json.large.failure.code_137" + ) + + // The rename in case it is a success + var ( + successM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof.json.success" + successMWoStv = "requests-done/102-103-etv0.2.3-getZkProof.json.success" + successtWoEtv = "requests-done/102-103-stv1.2.3-getZkProof.json.success" + successL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof.json.large.success" + successWithFailM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof.json.success" + successWithFailL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof.json.large.success" + successWith2FailsM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof.json.success" + successWith2FailsL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof.json.large.success" + ) + + // The rename in case it is a panic (code = 2) + var ( + failM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof.json.failure.code_2" + failMWoStv = "requests-done/102-103-etv0.2.3-getZkProof.json.failure.code_2" + failtWoEtv = "requests-done/102-103-stv1.2.3-getZkProof.json.failure.code_2" + failL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof.json.large.failure.code_2" + failWithFailM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof.json.failure.code_2" + failWithFailL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof.json.large.failure.code_2" + failWith2FailsM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof.json.failure.code_2" + failWith2FailsL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof.json.large.failure.code_2" + ) + + testcase := []inpFileNamesCases{ + { + Ext: "", Fail: "code", ShouldMatch: true, + Fnames: [][]string{{correctM}, {correctWithFailM}, {correctWith2FailsM}, {missingEtv}, {missingStv}}, + Explainer: "happy path, case M", + ExpectedOutput: [][]string{{respGLM, respMetadataM}, {respGLWithFailM, respMetadataWithFailM}, {respGLWith2FailsM, respMetadataWith2FailsM}, {respGLWoEtv, respMetadataWoEtv}, {respGLWoStv, respMetadataWoStv}}, + ExpToLarge: [][]string{{toLargeM, toLargeM}, {toLargeWithFailM, toLargeWithFailM}, {toLargeWith2FailsM, toLargeWith2FailsM}, {toLargeWoEtv, toLargeWoEtv}, {toLargeWoStv, toLargeWoStv}}, + ExpSuccess: [][]string{{successM, successM}, {successWithFailM, successWithFailM}, {successWith2FailsM, successWith2FailsM}, {successtWoEtv, successtWoEtv}, {successMWoStv, successMWoStv}}, + ExpFailW2: [][]string{{failM, failM}, {failWithFailM, failWithFailM}, {failWith2FailsM, failWith2FailsM}, {failtWoEtv, failtWoEtv}, {failMWoStv, failMWoStv}}, + }, + { + Ext: "large", Fail: "code", ShouldMatch: true, + Fnames: [][]string{{correctL}, {correctWithFailL}, {correctWith2FailsL}}, + Explainer: "happy path, case L", + ExpectedOutput: [][]string{{respGLL, respMetadataL}, {respGLWithFailL, respMetadataWithFailL}, {respGLWith2FailsL, respMetadataWith2FailsL}}, + ExpSuccess: [][]string{{successL, successL}, {successWithFailL, successWithFailL}, {successWith2FailsL, successWith2FailsL}}, + ExpFailW2: [][]string{{failL, failL}, {failWithFailL, failWithFailL}, {failWith2FailsL, failWith2FailsL}}, + }, + { + Ext: "", Fail: "code", ShouldMatch: false, + Fnames: [][]string{{correctL}, {correctWithFailL}, {correctWith2FailsL}}, + Explainer: "M does not pick the files reserved for L", + }, + { + Ext: "large", Fail: "code", ShouldMatch: false, + Fnames: [][]string{{correctM}, {correctWithFailM}, {correctWith2FailsM}}, + Explainer: "L does not pick the files reserved for M", + }, + { + Ext: "", Fail: "code", ShouldMatch: false, + Fnames: [][]string{{notAPoint}, {badName}}, + Explainer: "M does not pick obviously invalid files", + }, + { + Ext: "large", Fail: "code", ShouldMatch: false, + Fnames: [][]string{{missingEtv}, {missingStv}, {notAPoint}, {badName}}, + Explainer: "L does not pick obviously invalid files", + }, + } + + for _, c := range testcase { + conf := config.Config{} + conf.Version = "0.1.2" + conf.Bootstrap.CanRunFullLarge = c.Ext == "large" + + def, err := ExecBootstrapDefinition(&conf) + assert.NoError(t, err) + + t.Run(c.Explainer, func(t *testing.T) { + runInpFileTestCase(t, def, c) + }) + } +} + +func TestExecGLInFileRegexp(t *testing.T) { + var ( + correctM = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json" + correctL = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large" + correctWithFailM = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.failure.code_77" + correctWithFailL = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.failure.code_77" + correctWith2FailsM = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.failure.code_77.failure.code_77" + correctWith2FailsL = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.failure.code_77.failure.code_77" + missingEtv = "102-103-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json" + missingStv = "102-103-etv0.2.3-getZkProof_Bootstrap_GLSubmodule.json" + notAPoint = "102-103-etv0.2.3-getZkProofAjson" + badName = "102-103-etv0.2.3-stv1.2.3-getAggregatedProof.json" + ) + + // The responses in case of success + var ( + respRndBeaconM = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json" + respRndBeaconL = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json" + respGLM = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json" + respGLL = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json" + + respRndBeaconWithFailM = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json" + respRndBeaconWithFailL = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json" + respRndBeaconWith2FailsM = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json" + respRndBeaconWith2FailsL = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json" + respGLWithFailM = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json" + respGLWithFailL = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json" + respGLWith2FailsM = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json" + respGLWith2FailsL = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json" + + respRndBeaconWoEtv = "responses/102-103-etv-stv1.2.3-getZkProof_GL_RndBeacon.json" + respRndBeaconWoStv = "responses/102-103-etv0.2.3-stv-getZkProof_GL_RndBeacon.json" + respGLWoEtv = "responses/102-103-etv-stv1.2.3-getZkProof_GL.json" + respGLWoStv = "responses/102-103-etv0.2.3-stv-getZkProof_GL.json" + ) + + // The rename in case it is deferred to the large prover + var ( + toLargeM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.failure.code_137" + toLargeWithFailM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.failure.code_137" + toLargeWith2FailsM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.failure.code_137" + toLargeWoEtv = "requests/102-103-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.failure.code_137" + toLargeWoStv = "requests/102-103-etv0.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.failure.code_137" + ) + + // The rename in case it is a success + var ( + successM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.success" + successMWoStv = "requests-done/102-103-etv0.2.3-getZkProof_Bootstrap_GLSubmodule.json.success" + successtWoEtv = "requests-done/102-103-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.success" + successL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.success" + successWithFailM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.success" + successWithFailL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.success" + successWith2FailsM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.success" + successWith2FailsL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.success" + ) + + // The rename in case it is a panic (code = 2) + var ( + failM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.failure.code_2" + failMWoStv = "requests-done/102-103-etv0.2.3-getZkProof_Bootstrap_GLSubmodule.json.failure.code_2" + failtWoEtv = "requests-done/102-103-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.failure.code_2" + failL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.failure.code_2" + failWithFailM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.failure.code_2" + failWithFailL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.failure.code_2" + failWith2FailsM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.failure.code_2" + failWith2FailsL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.failure.code_2" + ) + + testcase := []inpFileNamesCases{ + { + Ext: "", Fail: "code", ShouldMatch: true, + Fnames: [][]string{{correctM}, {correctWithFailM}, {correctWith2FailsM}, {missingEtv}, {missingStv}}, + Explainer: "happy path, case M", + ExpectedOutput: [][]string{{respRndBeaconM, respGLM}, {respRndBeaconWithFailM, respGLWithFailM}, {respRndBeaconWith2FailsM, respGLWith2FailsM}, {respRndBeaconWoEtv, respGLWoEtv}, {respRndBeaconWoStv, respGLWoStv}}, + ExpToLarge: [][]string{{toLargeM, toLargeM}, {toLargeWithFailM, toLargeWithFailM}, {toLargeWith2FailsM, toLargeWith2FailsM}, {toLargeWoEtv, toLargeWoEtv}, {toLargeWoStv, toLargeWoStv}}, + ExpSuccess: [][]string{{successM, successM}, {successWithFailM, successWithFailM}, {successWith2FailsM, successWith2FailsM}, {successtWoEtv, successtWoEtv}, {successMWoStv, successMWoStv}}, + ExpFailW2: [][]string{{failM, failM}, {failWithFailM, failWithFailM}, {failWith2FailsM, failWith2FailsM}, {failtWoEtv, failtWoEtv}, {failMWoStv, failMWoStv}}, + }, + { + Ext: "large", Fail: "code", ShouldMatch: true, + Fnames: [][]string{{correctL}, {correctWithFailL}, {correctWith2FailsL}}, + Explainer: "happy path, case L", + ExpectedOutput: [][]string{{respRndBeaconL, respGLL}, {respRndBeaconWithFailL, respGLWithFailL}, {respRndBeaconWith2FailsL, respGLWith2FailsL}}, + ExpSuccess: [][]string{{successL, successL}, {successWithFailL, successWithFailL}, {successWith2FailsL, successWith2FailsL}}, + ExpFailW2: [][]string{{failL, failL}, {failWithFailL, failWithFailL}, {failWith2FailsL, failWith2FailsL}}, + }, + { + Ext: "", Fail: "code", ShouldMatch: false, + Fnames: [][]string{{correctL}, {correctWithFailL}, {correctWith2FailsL}}, + Explainer: "M does not pick the files reserved for L", + }, + { + Ext: "large", Fail: "code", ShouldMatch: false, + Fnames: [][]string{{correctM}, {correctWithFailM}, {correctWith2FailsM}}, + Explainer: "L does not pick the files reserved for M", + }, + { + Ext: "", Fail: "code", ShouldMatch: false, + Fnames: [][]string{{notAPoint}, {badName}}, + Explainer: "M does not pick obviously invalid files", + }, + { + Ext: "large", Fail: "code", ShouldMatch: false, + Fnames: [][]string{{missingEtv}, {missingStv}, {notAPoint}, {badName}}, + Explainer: "L does not pick obviously invalid files", + }, + } + + for _, c := range testcase { + conf := config.Config{} + conf.Version = "0.1.2" + conf.GLExecution.CanRunFullLarge = c.Ext == "large" + + def, err := ExecGLDefinition(&conf) + assert.NoError(t, err) + + t.Run(c.Explainer, func(t *testing.T) { + runInpFileTestCase(t, def, c) + }) + } +} + +func TestExecRndBeaconInFileRegexp(t *testing.T) { + var ( + correctBootstrapMetadataM = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json" + correctBootstrapMetadataL = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large" + correctBootstrapMetadataWithFailM = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.failure.code_77" + correctBootstrapMetadataWithFailL = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_77" + correctBootstrapMetadataWith2FailsM = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.failure.code_77.failure.code_77" + correctBootstrapMetadataWith2FailsL = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_77.failure.code_77" + missingBootstrapMetadataEtv = "102-103-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json" + missingBootstrapMetadataStv = "102-103-etv0.2.3-getZkProof_Bootstrap_DistMetadata.json" + + correctGLM = "102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json" + correctGLL = "102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.large" + correctGLWithFailM = "102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.failure.code_77" + correctGLWithFailL = "102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.large.failure.code_77" + correctGLWith2FailsM = "102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.failure.code_77.failure.code_77" + correctGLWith2FailsL = "102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.large.failure.code_77.failure.code_77" + missingGLEtv = "102-103-stv1.2.3-getZkProof_GL_RndBeacon.json" + missingGLStv = "102-103-etv0.2.3-getZkProof_GL_RndBeacon.json" + notAPoint = "102-103-etv0.2.3-getZkProofAjson" + badName = "102-103-etv0.2.3-stv1.2.3-getAggregatedProof.json" + ) + + // The responses in case of success + var ( + respRndBeaconM = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json" + respRndBeaconL = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json" + + respRndBeaconWithFailM = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json" + respRndBeaconWithFailL = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json" + respRndBeaconWith2FailsM = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json" + respRndBeaconWith2FailsL = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json" + + respRndBeaconWoEtv = "responses/102-103-etv-stv1.2.3-getZkProof_RndBeacon.json" + respRndBeaconWoStv = "responses/102-103-etv0.2.3-stv-getZkProof_RndBeacon.json" + ) + + // The rename in case it is deferred to the large prover + var ( + toLargeBootstrapMetadataM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_137" + toLargeBootstrapMetadataWithFailM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_137" + toLargeBootstrapMetadataWith2FailsM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_137" + toLargeBootstrapMetadataWoEtv = "requests/102-103-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_137" + toLargeBootstrapMetadataWoStv = "requests/102-103-etv0.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_137" + + toLargeGLM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.large.failure.code_137" + toLargeGLWithFailM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.large.failure.code_137" + toLargeGLWith2FailsM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.large.failure.code_137" + toLargeGLWoEtv = "requests/102-103-stv1.2.3-getZkProof_GL_RndBeacon.json.large.failure.code_137" + toLargeGLWoStv = "requests/102-103-etv0.2.3-getZkProof_GL_RndBeacon.json.large.failure.code_137" + ) + + // The rename in case it is a success + var ( + successBootstrapMetadataM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.success" + successBootstrapMetadataMWoStv = "requests-done/102-103-etv0.2.3-getZkProof_Bootstrap_DistMetadata.json.success" + successBootstrapMetadatastWoEtv = "requests-done/102-103-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.success" + successBootstrapMetadataL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.success" + successBootstrapMetadataWithFailM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.success" + successBootstrapMetadataWithFailL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.success" + successBootstrapMetadataWith2FailsM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.success" + successBootstrapMetadataWith2FailsL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.success" + + successGLM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.success" + successGLMWoStv = "requests-done/102-103-etv0.2.3-getZkProof_GL_RndBeacon.json.success" + successGLstWoEtv = "requests-done/102-103-stv1.2.3-getZkProof_GL_RndBeacon.json.success" + successGLL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.large.success" + successGLWithFailM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.success" + successGLWithFailL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.large.success" + successGLWith2FailsM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.success" + successGLWith2FailsL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.large.success" + ) + + // The rename in case it is a panic (code = 2) + var ( + failBootstrapMetadataM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.failure.code_2" + failBootstrapMetadataMWoStv = "requests-done/102-103-etv0.2.3-getZkProof_Bootstrap_DistMetadata.json.failure.code_2" + failBootstrapMetadatastWoEtv = "requests-done/102-103-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.failure.code_2" + failBootstrapMetadataL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_2" + failBootstrapMetadataWithFailM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.failure.code_2" + failBootstrapMetadataWithFailL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_2" + failBootstrapMetadataWith2FailsM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.failure.code_2" + failBootstrapMetadataWith2FailsL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_2" + + failGLM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.failure.code_2" + failGLMWoStv = "requests-done/102-103-etv0.2.3-getZkProof_GL_RndBeacon.json.failure.code_2" + failGLstWoEtv = "requests-done/102-103-stv1.2.3-getZkProof_GL_RndBeacon.json.failure.code_2" + failGLL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.large.failure.code_2" + failGLWithFailM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.failure.code_2" + failGLWithFailL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.large.failure.code_2" + failGLWith2FailsM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.failure.code_2" + failGLWith2FailsL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.large.failure.code_2" + ) + + testcase := []inpFileNamesCases{ + { + Ext: "", Fail: "code", ShouldMatch: true, + Fnames: [][]string{ + {correctBootstrapMetadataM, correctGLM}, + {correctBootstrapMetadataWithFailM, correctGLWithFailM}, + {correctBootstrapMetadataWith2FailsM, correctGLWith2FailsM}, + {missingBootstrapMetadataEtv, missingGLEtv}, + {missingBootstrapMetadataStv, missingGLStv}, + }, + Explainer: "happy path, case M", + ExpectedOutput: [][]string{ + {respRndBeaconM}, + {respRndBeaconWithFailM}, + {respRndBeaconWith2FailsM}, + {respRndBeaconWoEtv}, + {respRndBeaconWoStv}, + }, + ExpToLarge: [][]string{ + {toLargeBootstrapMetadataM, toLargeGLM}, + {toLargeBootstrapMetadataWithFailM, toLargeGLWithFailM}, + {toLargeBootstrapMetadataWith2FailsM, toLargeGLWith2FailsM}, + {toLargeBootstrapMetadataWoEtv, toLargeGLWoEtv}, + {toLargeBootstrapMetadataWoStv, toLargeGLWoStv}, + }, + ExpSuccess: [][]string{ + {successBootstrapMetadataM, successGLM}, + {successBootstrapMetadataWithFailM, successGLWithFailM}, + {successBootstrapMetadataWith2FailsM, successGLWith2FailsM}, + {successBootstrapMetadatastWoEtv, successGLstWoEtv}, + {successBootstrapMetadataMWoStv, successGLMWoStv}, + }, + ExpFailW2: [][]string{ + {failBootstrapMetadataM, failGLM}, + {failBootstrapMetadataWithFailM, failGLWithFailM}, + {failBootstrapMetadataWith2FailsM, failGLWith2FailsM}, + {failBootstrapMetadatastWoEtv, failGLstWoEtv}, + {failBootstrapMetadataMWoStv, failGLMWoStv}, + }, + }, + { + Ext: "large", Fail: "code", ShouldMatch: true, + Fnames: [][]string{ + {correctBootstrapMetadataL, correctGLL}, + {correctBootstrapMetadataWithFailL, correctGLWithFailL}, + {correctBootstrapMetadataWith2FailsL, correctGLWith2FailsL}, + }, + Explainer: "happy path, case L", + ExpectedOutput: [][]string{ + {respRndBeaconL}, + {respRndBeaconWithFailL}, + {respRndBeaconWith2FailsL}, + }, + ExpSuccess: [][]string{ + {successBootstrapMetadataL, successGLL}, + {successBootstrapMetadataWithFailL, successGLWithFailL}, + {successBootstrapMetadataWith2FailsL, successGLWith2FailsL}, + }, + ExpFailW2: [][]string{ + {failBootstrapMetadataL, failGLL}, + {failBootstrapMetadataWithFailL, failGLWithFailL}, + {failBootstrapMetadataWith2FailsL, failGLWith2FailsL}, + }, + }, + { + Ext: "", Fail: "code", ShouldMatch: false, + Fnames: [][]string{ + {correctBootstrapMetadataL, correctGLL}, + {correctBootstrapMetadataWithFailL, correctGLWithFailL}, + {correctBootstrapMetadataWith2FailsL, correctGLWith2FailsL}, + }, + Explainer: "M does not pick the files reserved for L", + }, + { + Ext: "large", Fail: "code", ShouldMatch: false, + Fnames: [][]string{ + {correctBootstrapMetadataM, correctGLM}, + {correctBootstrapMetadataWithFailM, correctGLWithFailM}, + {correctBootstrapMetadataWith2FailsM, correctGLWith2FailsM}, + }, + Explainer: "L does not pick the files reserved for M", + }, + { + Ext: "", Fail: "code", ShouldMatch: false, + Fnames: [][]string{ + {notAPoint}, + {badName}, + }, + Explainer: "M does not pick obviously invalid files", + }, + { + Ext: "large", Fail: "code", ShouldMatch: false, + Fnames: [][]string{ + {missingBootstrapMetadataEtv, missingGLEtv}, + {missingBootstrapMetadataStv, missingGLStv}, + {notAPoint}, + {badName}, + }, + Explainer: "L does not pick obviously invalid files", + }, + } + + for _, c := range testcase { + conf := config.Config{} + conf.Version = "0.1.2" + conf.RndBeacon.CanRunFullLarge = c.Ext == "large" + + def, err := ExecRndBeaconDefinition(&conf) + assert.NoError(t, err) + + t.Run(c.Explainer, func(t *testing.T) { + runInpFileTestCase(t, def, c) + }) + } +} + +func TestExecLPPInFileRegexp(t *testing.T) { + var ( + correctM = "102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json" + correctL = "102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json.large" + correctWithFailM = "102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json.failure.code_77" + correctWithFailL = "102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json.large.failure.code_77" + correctWith2FailsM = "102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json.failure.code_77.failure.code_77" + correctWith2FailsL = "102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json.large.failure.code_77.failure.code_77" + missingEtv = "102-103-stv1.2.3-getZkProof_RndBeacon.json" + missingStv = "102-103-etv0.2.3-getZkProof_RndBeacon.json" + notAPoint = "102-103-etv0.2.3-getZkProofAjson" + badName = "102-103-etv0.2.3-stv1.2.3-getAggregatedProof.json" + ) + + // The responses in case of success + var ( + respLPPM = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json" + respLPPL = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json" + + respLPPWithFailM = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json" + respLPPWithFailL = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json" + respLPPWith2FailsM = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json" + respLPPWith2FailsL = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json" + + respLPPWoEtv = "responses/102-103-etv-stv1.2.3-getZkProof_LPP.json" + respLPPWoStv = "responses/102-103-etv0.2.3-stv-getZkProof_LPP.json" + ) + + // The rename in case it is deferred to the large prover + var ( + toLargeM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json.large.failure.code_137" + toLargeWithFailM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json.large.failure.code_137" + toLargeWith2FailsM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json.large.failure.code_137" + toLargeWoEtv = "requests/102-103-stv1.2.3-getZkProof_RndBeacon.json.large.failure.code_137" + toLargeWoStv = "requests/102-103-etv0.2.3-getZkProof_RndBeacon.json.large.failure.code_137" + ) + + // The rename in case it is a success + var ( + successM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json.success" + successMWoStv = "requests-done/102-103-etv0.2.3-getZkProof_RndBeacon.json.success" + successtWoEtv = "requests-done/102-103-stv1.2.3-getZkProof_RndBeacon.json.success" + successL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json.large.success" + successWithFailM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json.success" + successWithFailL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json.large.success" + successWith2FailsM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json.success" + successWith2FailsL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json.large.success" + ) + + // The rename in case it is a panic (code = 2) + var ( + failM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json.failure.code_2" + failMWoStv = "requests-done/102-103-etv0.2.3-getZkProof_RndBeacon.json.failure.code_2" + failtWoEtv = "requests-done/102-103-stv1.2.3-getZkProof_RndBeacon.json.failure.code_2" + failL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json.large.failure.code_2" + failWithFailM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json.failure.code_2" + failWithFailL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json.large.failure.code_2" + failWith2FailsM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json.failure.code_2" + failWith2FailsL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json.large.failure.code_2" + ) + + testcase := []inpFileNamesCases{ + { + Ext: "", Fail: "code", ShouldMatch: true, + Fnames: [][]string{{correctM}, {correctWithFailM}, {correctWith2FailsM}, {missingEtv}, {missingStv}}, + Explainer: "happy path, case M", + ExpectedOutput: [][]string{{respLPPM}, {respLPPWithFailM}, {respLPPWith2FailsM}, {respLPPWoEtv}, {respLPPWoStv}}, + ExpToLarge: [][]string{{toLargeM}, {toLargeWithFailM}, {toLargeWith2FailsM}, {toLargeWoEtv}, {toLargeWoStv}}, + ExpSuccess: [][]string{{successM}, {successWithFailM}, {successWith2FailsM}, {successtWoEtv}, {successMWoStv}}, + ExpFailW2: [][]string{{failM}, {failWithFailM}, {failWith2FailsM}, {failtWoEtv}, {failMWoStv}}, + }, + { + Ext: "large", Fail: "code", ShouldMatch: true, + Fnames: [][]string{{correctL}, {correctWithFailL}, {correctWith2FailsL}}, + Explainer: "happy path, case L", + ExpectedOutput: [][]string{{respLPPL}, {respLPPWithFailL}, {respLPPWith2FailsL}}, + ExpSuccess: [][]string{{successL}, {successWithFailL}, {successWith2FailsL}}, + ExpFailW2: [][]string{{failL}, {failWithFailL}, {failWith2FailsL}}, + }, + { + Ext: "", Fail: "code", ShouldMatch: false, + Fnames: [][]string{{correctL}, {correctWithFailL}, {correctWith2FailsL}}, + Explainer: "M does not pick the files reserved for L", + }, + { + Ext: "large", Fail: "code", ShouldMatch: false, + Fnames: [][]string{{correctM}, {correctWithFailM}, {correctWith2FailsM}}, + Explainer: "L does not pick the files reserved for M", + }, + { + Ext: "", Fail: "code", ShouldMatch: false, + Fnames: [][]string{{notAPoint}, {badName}}, + Explainer: "M does not pick obviously invalid files", + }, + { + Ext: "large", Fail: "code", ShouldMatch: false, + Fnames: [][]string{{missingEtv}, {missingStv}, {notAPoint}, {badName}}, + Explainer: "L does not pick obviously invalid files", + }, + } + + for _, c := range testcase { + conf := config.Config{} + conf.Version = "0.1.2" + conf.LPPExecution.CanRunFullLarge = c.Ext == "large" + + def, err := ExecLPPDefinition(&conf) + assert.NoError(t, err) + + t.Run(c.Explainer, func(t *testing.T) { + runInpFileTestCase(t, def, c) + }) + } +} + +func TestExecConglomerationInFileRegexp(t *testing.T) { + var ( + correctBootstrapMetadataM = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json" + correctBootstrapMetadataL = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large" + correctBootstrapMetadataWithFailM = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.failure.code_77" + correctBootstrapMetadataWithFailL = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_77" + correctBootstrapMetadataWith2FailsM = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.failure.code_77.failure.code_77" + correctBootstrapMetadataWith2FailsL = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_77.failure.code_77" + missingBootstrapMetadataEtv = "102-103-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json" + missingBootstrapMetadataStv = "102-103-etv0.2.3-getZkProof_Bootstrap_DistMetadata.json" + + correctGLM = "102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json" + correctGLL = "102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json.large" + correctGLWithFailM = "102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json.failure.code_77" + correctGLWithFailL = "102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json.large.failure.code_77" + correctGLWith2FailsM = "102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json.failure.code_77.failure.code_77" + correctGLWith2FailsL = "102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json.large.failure.code_77.failure.code_77" + missingGLEtv = "102-103-stv1.2.3-getZkProof_GL.json" + missingGLStv = "102-103-etv0.2.3-getZkProof_GL.json" + + correctLPPM = "102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json" + correctLPPL = "102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json.large" + correctLPPWithFailM = "102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json.failure.code_77" + correctLPPWithFailL = "102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json.large.failure.code_77" + correctLPPWith2FailsM = "102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json.failure.code_77.failure.code_77" + correctLPPWith2FailsL = "102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json.large.failure.code_77.failure.code_77" + missingLPPEtv = "102-103-stv1.2.3-getZkProof_LPP.json" + missingLPPStv = "102-103-etv0.2.3-getZkProof_LPP.json" + notAPoint = "102-103-etv0.2.3-getZkProofAjson" + badName = "102-103-etv0.2.3-stv1.2.3-getAggregatedProof.json" + ) + + // The responses in case of success + var ( + respConglomerateM = "responses/102-103-getZkProof.json" + respConglomerateL = "responses/102-103-getZkProof.json" + + respConglomerateWithFailM = "responses/102-103-getZkProof.json" + respConglomerateWithFailL = "responses/102-103-getZkProof.json" + respConglomerateWith2FailsM = "responses/102-103-getZkProof.json" + respConglomerateWith2FailsL = "responses/102-103-getZkProof.json" + + respConglomerateWoEtv = "responses/102-103-getZkProof.json" + respConglomerateWoStv = "responses/102-103-getZkProof.json" + ) + + // The rename in case it is deferred to the large prover + var ( + toLargeBootstrapMetadataM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_137" + toLargeBootstrapMetadataWithFailM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_137" + toLargeBootstrapMetadataWith2FailsM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_137" + toLargeBootstrapMetadataWoEtv = "requests/102-103-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_137" + toLargeBootstrapMetadataWoStv = "requests/102-103-etv0.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_137" + + toLargeGLM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json.large.failure.code_137" + toLargeGLWithFailM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json.large.failure.code_137" + toLargeGLWith2FailsM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json.large.failure.code_137" + toLargeGLWoEtv = "requests/102-103-stv1.2.3-getZkProof_GL.json.large.failure.code_137" + toLargeGLWoStv = "requests/102-103-etv0.2.3-getZkProof_GL.json.large.failure.code_137" + + toLargeLPPM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json.large.failure.code_137" + toLargeLPPWithFailM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json.large.failure.code_137" + toLargeLPPWith2FailsM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json.large.failure.code_137" + toLargeLPPWoEtv = "requests/102-103-stv1.2.3-getZkProof_LPP.json.large.failure.code_137" + toLargeLPPWoStv = "requests/102-103-etv0.2.3-getZkProof_LPP.json.large.failure.code_137" + ) + + // The rename in case it is a success + var ( + successBootstrapMetadataM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.success" + successBootstrapMetadataMWoStv = "requests-done/102-103-etv0.2.3-getZkProof_Bootstrap_DistMetadata.json.success" + successBootstrapMetadatastWoEtv = "requests-done/102-103-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.success" + successBootstrapMetadataL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.success" + successBootstrapMetadataWithFailM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.success" + successBootstrapMetadataWithFailL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.success" + successBootstrapMetadataWith2FailsM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.success" + successBootstrapMetadataWith2FailsL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.success" + + successGLM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json.success" + successGLMWoStv = "requests-done/102-103-etv0.2.3-getZkProof_GL.json.success" + successGLstWoEtv = "requests-done/102-103-stv1.2.3-getZkProof_GL.json.success" + successGLL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json.large.success" + successGLWithFailM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json.success" + successGLWithFailL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json.large.success" + successGLWith2FailsM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json.success" + successGLWith2FailsL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json.large.success" + + successLPPM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json.success" + successLPPMWoStv = "requests-done/102-103-etv0.2.3-getZkProof_LPP.json.success" + successLPPstWoEtv = "requests-done/102-103-stv1.2.3-getZkProof_LPP.json.success" + successLPPL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json.large.success" + successLPPWithFailM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json.success" + successLPPWithFailL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json.large.success" + successLPPWith2FailsM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json.success" + successLPPWith2FailsL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json.large.success" + ) + + // The rename in case it is a panic (code = 2) + var ( + failBootstrapMetadataM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.failure.code_2" + failBootstrapMetadataMWoStv = "requests-done/102-103-etv0.2.3-getZkProof_Bootstrap_DistMetadata.json.failure.code_2" + failBootstrapMetadatastWoEtv = "requests-done/102-103-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.failure.code_2" + failBootstrapMetadataL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_2" + failBootstrapMetadataWithFailM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.failure.code_2" + failBootstrapMetadataWithFailL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_2" + failBootstrapMetadataWith2FailsM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.failure.code_2" + failBootstrapMetadataWith2FailsL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_2" + + failGLM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json.failure.code_2" + failGLMWoStv = "requests-done/102-103-etv0.2.3-getZkProof_GL.json.failure.code_2" + failGLstWoEtv = "requests-done/102-103-stv1.2.3-getZkProof_GL.json.failure.code_2" + failGLL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json.large.failure.code_2" + failGLWithFailM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json.failure.code_2" + failGLWithFailL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json.large.failure.code_2" + failGLWith2FailsM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json.failure.code_2" + failGLWith2FailsL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL.json.large.failure.code_2" + + failLPPM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json.failure.code_2" + failLPPMWoStv = "requests-done/102-103-etv0.2.3-getZkProof_LPP.json.failure.code_2" + failLPPstWoEtv = "requests-done/102-103-stv1.2.3-getZkProof_LPP.json.failure.code_2" + failLPPL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json.large.failure.code_2" + failLPPWithFailM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json.failure.code_2" + failLPPWithFailL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json.large.failure.code_2" + failLPPWith2FailsM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json.failure.code_2" + failLPPWith2FailsL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_LPP.json.large.failure.code_2" + ) + + testcase := []inpFileNamesCases{ + { + Ext: "", Fail: "code", ShouldMatch: true, + Fnames: [][]string{ + {correctBootstrapMetadataM, correctGLM, correctLPPM}, + {correctBootstrapMetadataWithFailM, correctGLWithFailM, correctLPPWithFailM}, + {correctBootstrapMetadataWith2FailsM, correctGLWith2FailsM, correctLPPWith2FailsM}, + {missingBootstrapMetadataEtv, missingGLEtv, missingLPPEtv}, + {missingBootstrapMetadataStv, missingGLStv, missingLPPStv}, + }, + Explainer: "happy path, case M", + ExpectedOutput: [][]string{ + {respConglomerateM}, + {respConglomerateWithFailM}, + {respConglomerateWith2FailsM}, + {respConglomerateWoEtv}, + {respConglomerateWoStv}, + }, + ExpToLarge: [][]string{ + {toLargeBootstrapMetadataM, toLargeGLM, toLargeLPPM}, + {toLargeBootstrapMetadataWithFailM, toLargeGLWithFailM, toLargeLPPWithFailM}, + {toLargeBootstrapMetadataWith2FailsM, toLargeGLWith2FailsM, toLargeLPPWith2FailsM}, + {toLargeBootstrapMetadataWoEtv, toLargeGLWoEtv, toLargeLPPWoEtv}, + {toLargeBootstrapMetadataWoStv, toLargeGLWoStv, toLargeLPPWoStv}, + }, + ExpSuccess: [][]string{ + {successBootstrapMetadataM, successGLM, successLPPM}, + {successBootstrapMetadataWithFailM, successGLWithFailM, successLPPWithFailM}, + {successBootstrapMetadataWith2FailsM, successGLWith2FailsM, successLPPWith2FailsM}, + {successBootstrapMetadatastWoEtv, successGLstWoEtv, successLPPstWoEtv}, + {successBootstrapMetadataMWoStv, successGLMWoStv, successLPPMWoStv}, + }, + ExpFailW2: [][]string{ + {failBootstrapMetadataM, failGLM, failLPPM}, + {failBootstrapMetadataWithFailM, failGLWithFailM, failLPPWithFailM}, + {failBootstrapMetadataWith2FailsM, failGLWith2FailsM, failLPPWith2FailsM}, + {failBootstrapMetadatastWoEtv, failGLstWoEtv, failLPPstWoEtv}, + {failBootstrapMetadataMWoStv, failGLMWoStv, failLPPMWoStv}, + }, + }, + { + Ext: "large", Fail: "code", ShouldMatch: true, + Fnames: [][]string{ + {correctBootstrapMetadataL, correctGLL, correctLPPL}, + {correctBootstrapMetadataWithFailL, correctGLWithFailL, correctLPPWithFailL}, + {correctBootstrapMetadataWith2FailsL, correctGLWith2FailsL, correctLPPWith2FailsL}, + }, + Explainer: "happy path, case L", + ExpectedOutput: [][]string{ + {respConglomerateL}, + {respConglomerateWithFailL}, + {respConglomerateWith2FailsL}, + }, + ExpSuccess: [][]string{ + {successBootstrapMetadataL, successGLL, successLPPL}, + {successBootstrapMetadataWithFailL, successGLWithFailL, successLPPWithFailL}, + {successBootstrapMetadataWith2FailsL, successGLWith2FailsL, successLPPWith2FailsL}, + }, + ExpFailW2: [][]string{ + {failBootstrapMetadataL, failGLL, failLPPL}, + {failBootstrapMetadataWithFailL, failGLWithFailL, failLPPWithFailL}, + {failBootstrapMetadataWith2FailsL, failGLWith2FailsL, failLPPWith2FailsL}, + }, + }, + { + Ext: "", Fail: "code", ShouldMatch: false, + Fnames: [][]string{ + {correctBootstrapMetadataL, correctGLL, correctLPPL}, + {correctBootstrapMetadataWithFailL, correctGLWithFailL, correctLPPWithFailL}, + {correctBootstrapMetadataWith2FailsL, correctGLWith2FailsL, correctLPPWith2FailsL}, + }, + Explainer: "M does not pick the files reserved for L", + }, + { + Ext: "large", Fail: "code", ShouldMatch: false, + Fnames: [][]string{ + {correctBootstrapMetadataM, correctGLM, correctLPPM}, + {correctBootstrapMetadataWithFailM, correctGLWithFailM, correctLPPWithFailM}, + {correctBootstrapMetadataWith2FailsM, correctGLWith2FailsM, correctLPPWith2FailsM}, + }, + Explainer: "L does not pick the files reserved for M", + }, + { + Ext: "", Fail: "code", ShouldMatch: false, + Fnames: [][]string{ + {notAPoint}, + {badName}, + }, + Explainer: "M does not pick obviously invalid files", + }, + { + Ext: "large", Fail: "code", ShouldMatch: false, + Fnames: [][]string{ + {missingBootstrapMetadataEtv, missingGLEtv, missingLPPEtv}, + {missingBootstrapMetadataStv, missingGLStv, missingLPPStv}, + {notAPoint}, + {badName}, + }, + Explainer: "L does not pick obviously invalid files", + }, + } + + for _, c := range testcase { + conf := config.Config{} + conf.Version = "0.1.2" + conf.Conglomeration.CanRunFullLarge = c.Ext == "large" + + def, err := ExecConglomerationDefinition(&conf) + assert.NoError(t, err) + + t.Run(c.Explainer, func(t *testing.T) { + runInpFileTestCase(t, def, c) + }) + } +} diff --git a/prover/cmd/controller/controller/job_definition_test.go b/prover/cmd/controller/controller/job_definition_test.go index 5e3281be1..df68057d5 100644 --- a/prover/cmd/controller/controller/job_definition_test.go +++ b/prover/cmd/controller/controller/job_definition_test.go @@ -20,24 +20,27 @@ type inpFileNamesCases struct { func runInpFileTestCase(t *testing.T, def *JobDefinition, c inpFileNamesCases) { - for i, fnames := range c.Fnames { + for i, ipFiles := range c.Fnames { // NB: if the regexp matches but the fields cannot be parsed // this will panic and fail the test. This is intentional. All // errors must be caught by the input file regexp. - job, err := NewJob(def, fnames) + job, err := NewJob(def, ipFiles) if c.ShouldMatch { - if !assert.NoError(t, err, fnames) { + if !assert.NoError(t, err, ipFiles) { // stop there for this iteration continue } - // Then try to format the response of the job - for idx := range fnames { - resp, err := job.ResponseFile(idx) - if assert.NoErrorf(t, err, "cannot produce a response for job %s", fnames[idx]) { - assert.Equal(t, c.ExpectedOutput[i][idx], resp, "wrong output file") + for idx := range ipFiles { + // idx -> inputIndex. ResponseFile takes in output Idx only + if idx < len(job.Def.OutputFileTmpl) { + opIdx := idx + resp, err := job.ResponseFile(opIdx) + if assert.NoErrorf(t, err, "cannot produce a response for job %s", ipFiles[idx]) { + assert.Equal(t, c.ExpectedOutput[i][idx], resp, "wrong output file") + } } // Try the name of the large one. If the case is specifying some @@ -66,11 +69,11 @@ func runInpFileTestCase(t *testing.T, def *JobDefinition, c inpFileNamesCases) { } } else { - for i := range fnames { + for i := range ipFiles { assert.Errorf( - t, err, fnames[i], + t, err, ipFiles[i], "%v should not match %s", - fnames, def.InputFileRegexp[i].String(), + ipFiles, def.InputFileRegexp[i].String(), ) } } diff --git a/prover/cmd/controller/controller/tmp_test.go b/prover/cmd/controller/controller/tmp_test.go new file mode 100644 index 000000000..8646df72f --- /dev/null +++ b/prover/cmd/controller/controller/tmp_test.go @@ -0,0 +1,211 @@ +package controller + +// func TestExecRndBeaconInFileRegexp(t *testing.T) { +// var ( +// correctBootstrapMetadataM = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json" +// // correctBootstrapMetadataL = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large" +// // correctBootstrapMetadataWithFailM = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.failure.code_77" +// // correctBootstrapMetadataWithFailL = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_77" +// // correctBootstrapMetadataWith2FailsM = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.failure.code_77.failure.code_77" +// // correctBootstrapMetadataWith2FailsL = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_77.failure.code_77" +// // missingBootstrapMetadataEtv = "102-103-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json" +// // missingBootstrapMetadataStv = "102-103-etv0.2.3-getZkProof_Bootstrap_DistMetadata.json" + +// correctGLM = "102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json" +// // correctGLL = "102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.large" +// // correctGLWithFailM = "102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.failure.code_77" +// // correctGLWithFailL = "102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.large.failure.code_77" +// // correctGLWith2FailsM = "102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.failure.code_77.failure.code_77" +// // correctGLWith2FailsL = "102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.large.failure.code_77.failure.code_77" +// // missingGLEtv = "102-103-stv1.2.3-getZkProof_GL_RndBeacon.json" +// // missingGLStv = "102-103-etv0.2.3-getZkProof_GL_RndBeacon.json" +// // notAPoint = "102-103-etv0.2.3-getZkProofAjson" +// // badName = "102-103-etv0.2.3-stv1.2.3-getAggregatedProof.json" +// ) + +// // The responses in case of success +// var ( +// respRndBeaconM = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json" +// // respRndBeaconL = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json" + +// // respRndBeaconWithFailM = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json" +// // respRndBeaconWithFailL = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json" +// // respRndBeaconWith2FailsM = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json" +// // respRndBeaconWith2FailsL = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json" + +// // respRndBeaconWoEtv = "responses/102-103-etv-stv1.2.3-getZkProof_RndBeacon.json" +// // respRndBeaconWoStv = "responses/102-103-etv0.2.3-stv-getZkProof_RndBeacon.json" +// ) + +// // The rename in case it is deferred to the large prover +// var ( +// toLargeBootstrapMetadataM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_137" +// // toLargeBootstrapMetadataWithFailM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_137" +// // toLargeBootstrapMetadataWith2FailsM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_137" +// // toLargeBootstrapMetadataWoEtv = "requests/102-103-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_137" +// // toLargeBootstrapMetadataWoStv = "requests/102-103-etv0.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_137" + +// toLargeGLM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.large.failure.code_137" +// // toLargeGLWithFailM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.large.failure.code_137" +// // toLargeGLWith2FailsM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.large.failure.code_137" +// // toLargeGLWoEtv = "requests/102-103-stv1.2.3-getZkProof_GL_RndBeacon.json.large.failure.code_137" +// // toLargeGLWoStv = "requests/102-103-etv0.2.3-getZkProof_GL_RndBeacon.json.large.failure.code_137" +// ) + +// // The rename in case it is a success +// var ( +// successBootstrapMetadataM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.success" +// // successBootstrapMetadataMWoStv = "requests-done/102-103-etv0.2.3-getZkProof_Bootstrap_DistMetadata.json.success" +// // successBootstrapMetadatastWoEtv = "requests-done/102-103-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.success" +// // successBootstrapMetadataL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.success" +// // successBootstrapMetadataWithFailM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.success" +// // successBootstrapMetadataWithFailL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.success" +// // successBootstrapMetadataWith2FailsM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.success" +// // successBootstrapMetadataWith2FailsL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.success" + +// successGLM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.success" +// // successGLMWoStv = "requests-done/102-103-etv0.2.3-getZkProof_GL_RndBeacon.json.success" +// // successGLstWoEtv = "requests-done/102-103-stv1.2.3-getZkProof_GL_RndBeacon.json.success" +// // successGLL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.large.success" +// // successGLWithFailM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.success" +// // successGLWithFailL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.large.success" +// // successGLWith2FailsM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.success" +// // successGLWith2FailsL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.large.success" +// ) + +// // The rename in case it is a panic (code = 2) +// var ( +// failBootstrapMetadataM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.failure.code_2" +// // failBootstrapMetadataMWoStv = "requests-done/102-103-etv0.2.3-getZkProof_Bootstrap_DistMetadata.json.failure.code_2" +// // failBootstrapMetadatastWoEtv = "requests-done/102-103-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.failure.code_2" +// // failBootstrapMetadataL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_2" +// // failBootstrapMetadataWithFailM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.failure.code_2" +// // failBootstrapMetadataWithFailL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_2" +// // failBootstrapMetadataWith2FailsM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.failure.code_2" +// // failBootstrapMetadataWith2FailsL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_2" + +// failGLM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.failure.code_2" +// // failGLMWoStv = "requests-done/102-103-etv0.2.3-getZkProof_GL_RndBeacon.json.failure.code_2" +// // failGLstWoEtv = "requests-done/102-103-stv1.2.3-getZkProof_GL_RndBeacon.json.failure.code_2" +// // failGLL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.large.failure.code_2" +// // failGLWithFailM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.failure.code_2" +// // failGLWithFailL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.large.failure.code_2" +// // failGLWith2FailsM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.failure.code_2" +// // failGLWith2FailsL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.large.failure.code_2" +// ) + +// testcase := []inpFileNamesCases{ +// { +// Ext: "", Fail: "code", ShouldMatch: true, +// Fnames: [][]string{ +// {correctBootstrapMetadataM, correctGLM}, +// // {correctBootstrapMetadataWithFailM, correctGLWithFailM}, +// // {correctBootstrapMetadataWith2FailsM, correctGLWith2FailsM}, +// // {missingBootstrapMetadataEtv, missingGLEtv}, +// // {missingBootstrapMetadataStv, missingGLStv}, +// }, +// Explainer: "happy path, case M", +// ExpectedOutput: [][]string{ +// {respRndBeaconM}, +// // {respRndBeaconWithFailM}, +// // {respRndBeaconWith2FailsM}, +// // {respRndBeaconWoEtv}, +// // {respRndBeaconWoStv}, +// }, +// ExpToLarge: [][]string{ +// {toLargeBootstrapMetadataM, toLargeGLM}, +// // {toLargeBootstrapMetadataWithFailM, toLargeGLWithFailM}, +// // {toLargeBootstrapMetadataWith2FailsM, toLargeGLWith2FailsM}, +// // {toLargeBootstrapMetadataWoEtv, toLargeGLWoEtv}, +// // {toLargeBootstrapMetadataWoStv, toLargeGLWoStv}, +// }, +// ExpSuccess: [][]string{ +// {successBootstrapMetadataM, successGLM}, +// // {successBootstrapMetadataWithFailM, successGLWithFailM}, +// // {successBootstrapMetadataWith2FailsM, successGLWith2FailsM}, +// // {successBootstrapMetadatastWoEtv, successGLstWoEtv}, +// // {successBootstrapMetadataMWoStv, successGLMWoStv}, +// }, +// ExpFailW2: [][]string{ +// {failBootstrapMetadataM, failGLM}, +// // {failBootstrapMetadataWithFailM, failGLWithFailM}, +// // {failBootstrapMetadataWith2FailsM, failGLWith2FailsM}, +// // {failBootstrapMetadatastWoEtv, failGLstWoEtv}, +// // {failBootstrapMetadataMWoStv, failGLMWoStv}, +// }, +// }, +// // { +// // Ext: "large", Fail: "code", ShouldMatch: true, +// // Fnames: [][]string{ +// // {correctBootstrapMetadataL, correctGLL}, +// // {correctBootstrapMetadataWithFailL, correctGLWithFailL}, +// // {correctBootstrapMetadataWith2FailsL, correctGLWith2FailsL}, +// // }, +// // Explainer: "happy path, case L", +// // ExpectedOutput: [][]string{ +// // {respRndBeaconL}, +// // {respRndBeaconWithFailL}, +// // {respRndBeaconWith2FailsL}, +// // }, +// // ExpSuccess: [][]string{ +// // {successBootstrapMetadataL, successGLL}, +// // {successBootstrapMetadataWithFailL, successGLWithFailL}, +// // {successBootstrapMetadataWith2FailsL, successGLWith2FailsL}, +// // }, +// // ExpFailW2: [][]string{ +// // {failBootstrapMetadataL, failGLL}, +// // {failBootstrapMetadataWithFailL, failGLWithFailL}, +// // {failBootstrapMetadataWith2FailsL, failGLWith2FailsL}, +// // }, +// // }, +// // { +// // Ext: "", Fail: "code", ShouldMatch: false, +// // Fnames: [][]string{ +// // {correctBootstrapMetadataL, correctGLL}, +// // {correctBootstrapMetadataWithFailL, correctGLWithFailL}, +// // {correctBootstrapMetadataWith2FailsL, correctGLWith2FailsL}, +// // }, +// // Explainer: "M does not pick the files reserved for L", +// // }, +// // { +// // Ext: "large", Fail: "code", ShouldMatch: false, +// // Fnames: [][]string{ +// // {correctBootstrapMetadataM, correctGLM}, +// // {correctBootstrapMetadataWithFailM, correctGLWithFailM}, +// // {correctBootstrapMetadataWith2FailsM, correctGLWith2FailsM}, +// // }, +// // Explainer: "L does not pick the files reserved for M", +// // }, +// // { +// // Ext: "", Fail: "code", ShouldMatch: false, +// // Fnames: [][]string{ +// // {notAPoint}, +// // {badName}, +// // }, +// // Explainer: "M does not pick obviously invalid files", +// // }, +// // { +// // Ext: "large", Fail: "code", ShouldMatch: false, +// // Fnames: [][]string{ +// // {missingBootstrapMetadataEtv, missingGLEtv}, +// // {missingBootstrapMetadataStv, missingGLStv}, +// // {notAPoint}, +// // {badName}, +// // }, +// // Explainer: "L does not pick obviously invalid files", +// // }, +// } + +// for _, c := range testcase { +// conf := config.Config{} +// conf.Version = "0.1.2" +// conf.RndBeacon.CanRunFullLarge = c.Ext == "large" + +// def, err := ExecRndBeaconDefinition(&conf) +// assert.NoError(t, err) + +// t.Run(c.Explainer, func(t *testing.T) { +// runInpFileTestCase(t, def, c) +// }) +// } +// } From ee92346b4c28cf0c599796ced4fc03d5bb35c704 Mon Sep 17 00:00:00 2001 From: srinathln7 Date: Fri, 7 Feb 2025 15:44:21 +0000 Subject: [PATCH 21/48] add limitless prover components to file watcher --- .../cmd/controller/controller/fs_watcher.go | 31 +++++++ .../controller/controller/fs_watcher_test.go | 85 ++++++++++++++++++- .../controller/job_definition_limitless.go | 26 +++--- .../job_definition_limitless_test.go | 10 +-- prover/config/config.go | 29 +++---- prover/config/config_default.go | 6 ++ 6 files changed, 152 insertions(+), 35 deletions(-) diff --git a/prover/cmd/controller/controller/fs_watcher.go b/prover/cmd/controller/controller/fs_watcher.go index f20047f9f..49bfefd90 100644 --- a/prover/cmd/controller/controller/fs_watcher.go +++ b/prover/cmd/controller/controller/fs_watcher.go @@ -47,6 +47,37 @@ func NewFsWatcher(conf *config.Config) *FsWatcher { fs.JobToWatch = append(fs.JobToWatch, AggregatedDefinition(conf)) } + // Limitless prover job additions + if conf.Controller.EnableExecBootstrap { + if job, err := ExecBootstrapDefinition(conf); err == nil { + fs.JobToWatch = append(fs.JobToWatch, *job) + } + } + + if conf.Controller.EnableExecGL { + if job, err := ExecGLDefinition(conf); err == nil { + fs.JobToWatch = append(fs.JobToWatch, *job) + } + } + + if conf.Controller.EnableExecRndBeacon { + if job, err := ExecRndBeaconDefinition(conf); err == nil { + fs.JobToWatch = append(fs.JobToWatch, *job) + } + } + + if conf.Controller.EnableExecLPP { + if job, err := ExecLPPDefinition(conf); err == nil { + fs.JobToWatch = append(fs.JobToWatch, *job) + } + } + + if conf.Controller.EnableExecConglomeration { + if job, err := ExecConglomerationDefinition(conf); err == nil { + fs.JobToWatch = append(fs.JobToWatch, *job) + } + } + return fs } diff --git a/prover/cmd/controller/controller/fs_watcher_test.go b/prover/cmd/controller/controller/fs_watcher_test.go index fd57f7de3..237841ef2 100644 --- a/prover/cmd/controller/controller/fs_watcher_test.go +++ b/prover/cmd/controller/controller/fs_watcher_test.go @@ -179,6 +179,15 @@ func setupFsTest(t *testing.T) (confM, confL *config.Config) { execution = "execution" compression = "compression" aggregation = "aggregation" + + // Add conf. for Limitless prover: Naming convention: exec + exec = "bootstrap" + execBootstrapGL = "bootstrapGl" + execBootstrapMetadata = "bootstrapMetadata" + execGLRndBeacon = "gl-rndbeacon" + execGLConglomeration = "gl" + execRndbeaconLPP = "rndbeacon" + execLPPConglomeration = "lpp" ) // Create a configuration using temporary directories @@ -211,7 +220,6 @@ if [ $CODE -eq 0 ]; then fi exit $CODE ` - // For a prover M confM = &config.Config{ Version: "0.2.4", @@ -227,6 +235,13 @@ exit $CODE WorkerCmdLarge: cmdLargeInternal, DeferToOtherLargeCodes: []int{12, 137}, RetryLocallyWithLargeCodes: []int{10, 77}, + + // Limitless prover components + EnableExecBootstrap: true, + EnableExecGL: true, + EnableExecRndBeacon: true, + EnableExecLPP: true, + EnableExecConglomeration: true, }, Execution: config.Execution{ @@ -244,6 +259,42 @@ exit $CODE RequestsRootDir: path.Join(testDir, proverM, aggregation), }, }, + + // Limitless prover components + ExecBootstrap: config.Execution{ + WithRequestDir: config.WithRequestDir{ + RequestsRootDir: path.Join(testDir, proverM, exec), + }, + }, + ExecGL: config.Execution{ + WithRequestDir: config.WithRequestDir{ + RequestsRootDir: path.Join(testDir, proverM, execBootstrapGL), + }, + }, + ExecRndBeacon: config.RndBeacon{ + GL: config.WithRequestDir{ + RequestsRootDir: path.Join(testDir, proverM, execGLRndBeacon), + }, + BootstrapMetadata: config.WithRequestDir{ + RequestsRootDir: path.Join(testDir, proverM, execBootstrapMetadata), + }, + }, + ExecLPP: config.Execution{ + WithRequestDir: config.WithRequestDir{ + RequestsRootDir: path.Join(testDir, proverM, execRndbeaconLPP), + }, + }, + ExecConglomeration: config.Conglomeration{ + GL: config.WithRequestDir{ + RequestsRootDir: path.Join(testDir, proverM, execGLConglomeration), + }, + LPP: config.WithRequestDir{ + RequestsRootDir: path.Join(testDir, proverM, execLPPConglomeration), + }, + BootstrapMetadata: config.WithRequestDir{ + RequestsRootDir: path.Join(testDir, proverM, execBootstrapMetadata), + }, + }, } _confL := *confM @@ -271,6 +322,38 @@ exit $CODE os.MkdirAll(confM.Aggregation.DirFrom(), permCode), os.MkdirAll(confM.Aggregation.DirTo(), permCode), os.MkdirAll(confM.Aggregation.DirDone(), permCode), + + // Add stuff for Limitless prover + os.MkdirAll(confM.ExecBootstrap.DirFrom(), permCode), + os.MkdirAll(confM.ExecBootstrap.DirTo(), permCode), + os.MkdirAll(confM.ExecBootstrap.DirDone(), permCode), + + os.MkdirAll(confM.ExecGL.DirFrom(), permCode), + os.MkdirAll(confM.ExecGL.DirTo(), permCode), + os.MkdirAll(confM.ExecGL.DirDone(), permCode), + + os.MkdirAll(confM.ExecRndBeacon.GL.DirFrom(), permCode), + os.MkdirAll(confM.ExecRndBeacon.GL.DirTo(), permCode), + os.MkdirAll(confM.ExecRndBeacon.GL.DirDone(), permCode), + os.MkdirAll(confM.ExecRndBeacon.BootstrapMetadata.DirFrom(), permCode), + os.MkdirAll(confM.ExecRndBeacon.BootstrapMetadata.DirTo(), permCode), + os.MkdirAll(confM.ExecRndBeacon.BootstrapMetadata.DirDone(), permCode), + + os.MkdirAll(confM.ExecLPP.DirFrom(), permCode), + os.MkdirAll(confM.ExecLPP.DirTo(), permCode), + os.MkdirAll(confM.ExecLPP.DirDone(), permCode), + + os.MkdirAll(confM.ExecConglomeration.GL.DirFrom(), permCode), + os.MkdirAll(confM.ExecConglomeration.GL.DirTo(), permCode), + os.MkdirAll(confM.ExecConglomeration.GL.DirDone(), permCode), + + os.MkdirAll(confM.ExecConglomeration.LPP.DirFrom(), permCode), + os.MkdirAll(confM.ExecConglomeration.LPP.DirTo(), permCode), + os.MkdirAll(confM.ExecConglomeration.LPP.DirDone(), permCode), + + os.MkdirAll(confM.ExecConglomeration.BootstrapMetadata.DirFrom(), permCode), + os.MkdirAll(confM.ExecConglomeration.BootstrapMetadata.DirTo(), permCode), + os.MkdirAll(confM.ExecConglomeration.BootstrapMetadata.DirDone(), permCode), ) if err != nil { diff --git a/prover/cmd/controller/controller/job_definition_limitless.go b/prover/cmd/controller/controller/job_definition_limitless.go index fea362d0e..1e353cc58 100644 --- a/prover/cmd/controller/controller/job_definition_limitless.go +++ b/prover/cmd/controller/controller/job_definition_limitless.go @@ -50,12 +50,12 @@ const ( func ExecBootstrapDefinition(conf *config.Config) (*JobDefinition, error) { inpFileExt := "" - if conf.Bootstrap.CanRunFullLarge { + if conf.ExecBootstrap.CanRunFullLarge { inpFileExt = fmt.Sprintf(`\.%v`, config.LargeSuffix) } // Input files - reqDirs := []string{conf.Bootstrap.RequestsRootDir} + reqDirs := []string{conf.ExecBootstrap.RequestsRootDir} inputFilePatterns := []string{fmt.Sprintf(execBootstrapInputPattern, inpFileExt, config.FailSuffix)} // Output files @@ -68,12 +68,12 @@ func ExecBootstrapDefinition(conf *config.Config) (*JobDefinition, error) { func ExecGLDefinition(conf *config.Config) (*JobDefinition, error) { inpFileExt := "" - if conf.GLExecution.CanRunFullLarge { + if conf.ExecGL.CanRunFullLarge { inpFileExt = fmt.Sprintf(`\.%v`, config.LargeSuffix) } // Input files - reqDirs := []string{conf.GLExecution.RequestsRootDir} + reqDirs := []string{conf.ExecGL.RequestsRootDir} inputFilePatterns := []string{fmt.Sprintf(execBootstrapGLInputPattern, inpFileExt, config.FailSuffix)} // Output files @@ -86,14 +86,14 @@ func ExecGLDefinition(conf *config.Config) (*JobDefinition, error) { func ExecRndBeaconDefinition(conf *config.Config) (*JobDefinition, error) { inpFileExt := "" - if conf.RndBeacon.CanRunFullLarge { + if conf.ExecRndBeacon.CanRunFullLarge { inpFileExt = fmt.Sprintf(`\.%v`, config.LargeSuffix) } // Input files reqDirs := []string{ - conf.RndBeacon.MetaData.RequestsRootDir, - conf.RndBeacon.GL.RequestsRootDir, + conf.ExecRndBeacon.BootstrapMetadata.RequestsRootDir, + conf.ExecRndBeacon.GL.RequestsRootDir, } inputFilePatterns := []string{ fmt.Sprintf(execBootstrapRndBeaconInputPattern, inpFileExt, config.FailSuffix), @@ -110,12 +110,12 @@ func ExecRndBeaconDefinition(conf *config.Config) (*JobDefinition, error) { func ExecLPPDefinition(conf *config.Config) (*JobDefinition, error) { inpFileExt := "" - if conf.LPPExecution.CanRunFullLarge { + if conf.ExecLPP.CanRunFullLarge { inpFileExt = fmt.Sprintf(`\.%v`, config.LargeSuffix) } // Input files - reqDirs := []string{conf.LPPExecution.RequestsRootDir} + reqDirs := []string{conf.ExecLPP.RequestsRootDir} inputFilePatterns := []string{fmt.Sprintf(execLPPInputPattern, inpFileExt, config.FailSuffix)} // Output files @@ -128,15 +128,15 @@ func ExecLPPDefinition(conf *config.Config) (*JobDefinition, error) { func ExecConglomerationDefinition(conf *config.Config) (*JobDefinition, error) { inpFileExt := "" - if conf.Conglomeration.CanRunFullLarge { + if conf.ExecConglomeration.CanRunFullLarge { inpFileExt = fmt.Sprintf(`\.%v`, config.LargeSuffix) } // Input files reqDirs := []string{ - conf.Conglomeration.BootstrapMetadata.RequestsRootDir, - conf.Conglomeration.GL.RequestsRootDir, - conf.Conglomeration.LPP.RequestsRootDir, + conf.ExecConglomeration.BootstrapMetadata.RequestsRootDir, + conf.ExecConglomeration.GL.RequestsRootDir, + conf.ExecConglomeration.LPP.RequestsRootDir, } inputFilePatterns := []string{ fmt.Sprintf(execConglomerateBootstrapDistMetadataPattern, inpFileExt, config.FailSuffix), diff --git a/prover/cmd/controller/controller/job_definition_limitless_test.go b/prover/cmd/controller/controller/job_definition_limitless_test.go index 8b0518f4f..53289586b 100644 --- a/prover/cmd/controller/controller/job_definition_limitless_test.go +++ b/prover/cmd/controller/controller/job_definition_limitless_test.go @@ -119,7 +119,7 @@ func TestExecBootstrapInFileRegexp(t *testing.T) { for _, c := range testcase { conf := config.Config{} conf.Version = "0.1.2" - conf.Bootstrap.CanRunFullLarge = c.Ext == "large" + conf.ExecBootstrap.CanRunFullLarge = c.Ext == "large" def, err := ExecBootstrapDefinition(&conf) assert.NoError(t, err) @@ -242,7 +242,7 @@ func TestExecGLInFileRegexp(t *testing.T) { for _, c := range testcase { conf := config.Config{} conf.Version = "0.1.2" - conf.GLExecution.CanRunFullLarge = c.Ext == "large" + conf.ExecGL.CanRunFullLarge = c.Ext == "large" def, err := ExecGLDefinition(&conf) assert.NoError(t, err) @@ -452,7 +452,7 @@ func TestExecRndBeaconInFileRegexp(t *testing.T) { for _, c := range testcase { conf := config.Config{} conf.Version = "0.1.2" - conf.RndBeacon.CanRunFullLarge = c.Ext == "large" + conf.ExecRndBeacon.CanRunFullLarge = c.Ext == "large" def, err := ExecRndBeaconDefinition(&conf) assert.NoError(t, err) @@ -567,7 +567,7 @@ func TestExecLPPInFileRegexp(t *testing.T) { for _, c := range testcase { conf := config.Config{} conf.Version = "0.1.2" - conf.LPPExecution.CanRunFullLarge = c.Ext == "large" + conf.ExecLPP.CanRunFullLarge = c.Ext == "large" def, err := ExecLPPDefinition(&conf) assert.NoError(t, err) @@ -810,7 +810,7 @@ func TestExecConglomerationInFileRegexp(t *testing.T) { for _, c := range testcase { conf := config.Config{} conf.Version = "0.1.2" - conf.Conglomeration.CanRunFullLarge = c.Ext == "large" + conf.ExecConglomeration.CanRunFullLarge = c.Ext == "large" def, err := ExecConglomerationDefinition(&conf) assert.NoError(t, err) diff --git a/prover/config/config.go b/prover/config/config.go index 67164625b..163b50329 100644 --- a/prover/config/config.go +++ b/prover/config/config.go @@ -112,15 +112,11 @@ type Config struct { PublicInputInterconnection PublicInput `mapstructure:"public_input_interconnection"` // TODO add wizard compilation params // LIMITLESS PROVER Components - Bootstrap Execution `mapstructure:"execution_bootstrap"` - - GLExecution Execution `mapstructure:"execution_gl"` - - RndBeacon RndBeacon `mapstructure:"execution_rndbeacon"` - - LPPExecution Execution `mapstructure:"execution_lpp"` - - Conglomeration Conglomeration `mapstructure:"execution_conglomeration"` + ExecBootstrap Execution `mapstructure:"execution_bootstrap"` + ExecGL Execution `mapstructure:"execution_gl"` + ExecLPP Execution `mapstructure:"execution_lpp"` + ExecRndBeacon RndBeacon `mapstructure:"execution_rndbeacon"` + ExecConglomeration Conglomeration `mapstructure:"execution_conglomeration"` Debug struct { // Profiling indicates whether we want to generate profiles using the [runtime/pprof] pkg. @@ -153,16 +149,13 @@ type Config struct { type RndBeacon struct { GL WithRequestDir `mapstructure:",squash"` - MetaData WithRequestDir `mapstructure:",squash"` + BootstrapMetadata WithRequestDir `mapstructure:",squash"` // ProverMode stores the kind of prover to use. ProverMode ProverMode `mapstructure:"prover_mode" validate:"required,oneof=dev partial full proofless bench check-only encode-only"` // CanRunFullLarge indicates whether the prover is running on a large machine (and can run full large traces). CanRunFullLarge bool `mapstructure:"can_run_full_large"` - - // ConflatedTracesDir stores the directory where the conflation traces are stored. - ConflatedTracesDir string `mapstructure:"conflated_traces_dir" validate:"required"` } type Conglomeration struct { @@ -177,9 +170,6 @@ type Conglomeration struct { // CanRunFullLarge indicates whether the prover is running on a large machine (and can run full large traces). CanRunFullLarge bool `mapstructure:"can_run_full_large"` - - // ConflatedTracesDir stores the directory where the conflation traces are stored. - ConflatedTracesDir string `mapstructure:"conflated_traces_dir" validate:"required"` } func (cfg *Config) Logger() *logrus.Logger { @@ -224,6 +214,13 @@ type Controller struct { EnableBlobDecompression bool `mapstructure:"enable_blob_decompression"` EnableAggregation bool `mapstructure:"enable_aggregation"` + // Limitless prover components. Defaults to true + EnableExecBootstrap bool `mapstructure:"enable_exec_bootstrap"` + EnableExecGL bool `mapstructure:"enable_exec_gl"` + EnableExecRndBeacon bool `mapstructure:"enable_exec_rndbeacon"` + EnableExecLPP bool `mapstructure:"enable_exec_lpp"` + EnableExecConglomeration bool `mapstructure:"enable_exec_conglomeration"` + // TODO @gbotrel the only reason we keep these is for test purposes; default value is fine, // we should remove them from here for readability. WorkerCmd string `mapstructure:"worker_cmd_tmpl"` diff --git a/prover/config/config_default.go b/prover/config/config_default.go index 40a392a79..5c444012f 100644 --- a/prover/config/config_default.go +++ b/prover/config/config_default.go @@ -18,6 +18,12 @@ func setDefaultValues() { viper.SetDefault("controller.enable_blob_decompression", true) viper.SetDefault("controller.enable_aggregation", true) + viper.SetDefault("controller.enable_exec_bootstrap", true) + viper.SetDefault("controller.enable_exec_gl", true) + viper.SetDefault("controller.enable_exec_rndbeacon", true) + viper.SetDefault("controller.enable_exec_lpp", true) + viper.SetDefault("controller.enable_exec_conglomeration", true) + // Set the default values for the retry delays viper.SetDefault("controller.retry_delays", []int{0, 1, 2, 3, 5, 8, 13, 21, 44, 85}) viper.SetDefault("controller.defer_to_other_large_codes", DefaultDeferToOtherLargeCodes) From d44226ec7c0c4c4f29d9c51d716590dc3463563c Mon Sep 17 00:00:00 2001 From: srinathln7 Date: Fri, 7 Feb 2025 15:57:20 +0000 Subject: [PATCH 22/48] define fswatcher for limitless prover --- prover/cmd/controller/controller/fs_watcher_test.go | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/prover/cmd/controller/controller/fs_watcher_test.go b/prover/cmd/controller/controller/fs_watcher_test.go index 237841ef2..8a8f228f5 100644 --- a/prover/cmd/controller/controller/fs_watcher_test.go +++ b/prover/cmd/controller/controller/fs_watcher_test.go @@ -164,6 +164,11 @@ func TestFileWatcherL(t *testing.T) { assert.Nil(t, fw.GetBest(), "the queue should be empty now") } +// TODO: Write this test +func TestLimitlessProverFileWatcherL(t *testing.T) { + +} + // Sets up the test environment by creating temporary directories and configurations for the prover. func setupFsTest(t *testing.T) (confM, confL *config.Config) { // Testdir is going to contain the whole test directory @@ -181,7 +186,7 @@ func setupFsTest(t *testing.T) (confM, confL *config.Config) { aggregation = "aggregation" // Add conf. for Limitless prover: Naming convention: exec - exec = "bootstrap" + execBootstrap = "bootstrap" execBootstrapGL = "bootstrapGl" execBootstrapMetadata = "bootstrapMetadata" execGLRndBeacon = "gl-rndbeacon" @@ -263,7 +268,7 @@ exit $CODE // Limitless prover components ExecBootstrap: config.Execution{ WithRequestDir: config.WithRequestDir{ - RequestsRootDir: path.Join(testDir, proverM, exec), + RequestsRootDir: path.Join(testDir, proverM, execBootstrap), }, }, ExecGL: config.Execution{ From f398561a5d64fd39dc91288d3aa834d7aae5e855 Mon Sep 17 00:00:00 2001 From: srinathln7 Date: Tue, 11 Feb 2025 08:47:45 +0000 Subject: [PATCH 23/48] change withdraw req struct --- prover/backend/aggregation/craft.go | 4 +- prover/cmd/controller/controller/command.go | 18 +- .../controller/controller/controller_test.go | 24 +-- .../controller/controller/fs_watcher_test.go | 176 +++++++++--------- .../controller/controller/job_definition.go | 6 +- .../controller/job_definition_limitless.go | 18 +- .../job_definition_limitless_test.go | 7 + .../controller/job_definition_test.go | 7 +- prover/config/config.go | 14 +- prover/utils/requests.go | 22 +++ 10 files changed, 161 insertions(+), 135 deletions(-) create mode 100644 prover/utils/requests.go diff --git a/prover/backend/aggregation/craft.go b/prover/backend/aggregation/craft.go index 075b2a2c9..59bcdceb4 100644 --- a/prover/backend/aggregation/craft.go +++ b/prover/backend/aggregation/craft.go @@ -60,7 +60,7 @@ func collectFields(cfg *config.Config, req *Request) (*CollectedFields, error) { var ( po execution.Response l2MessageHashes []string - fpath = path.Join(cfg.Execution.DirTo(), execReqFPath) + fpath = path.Join(cfg.Execution.DirTo(0), execReqFPath) f = files.MustRead(fpath) ) @@ -155,7 +155,7 @@ func collectFields(cfg *config.Config, req *Request) (*CollectedFields, error) { for i, decompReqFPath := range req.DecompressionProofs { dp := &blobdecompression.Response{} - fpath := path.Join(cfg.BlobDecompression.DirTo(), decompReqFPath) + fpath := path.Join(cfg.BlobDecompression.DirTo(0), decompReqFPath) f := files.MustRead(fpath) if err := json.NewDecoder(f).Decode(dp); err != nil { diff --git a/prover/cmd/controller/controller/command.go b/prover/cmd/controller/controller/command.go index bd856d1eb..e7a0ce779 100644 --- a/prover/cmd/controller/controller/command.go +++ b/prover/cmd/controller/controller/command.go @@ -47,15 +47,15 @@ func cobraControllerRunCmd(c *cobra.Command, args []string) { // TODO @gbotrel @AlexandreBelling check who is responsible for creating the directories // create the sub directories if they do not exist dirs := []string{ - cfg.Execution.DirDone(), - cfg.Execution.DirFrom(), - cfg.Execution.DirTo(), - cfg.BlobDecompression.DirDone(), - cfg.BlobDecompression.DirFrom(), - cfg.BlobDecompression.DirTo(), - cfg.Aggregation.DirDone(), - cfg.Aggregation.DirFrom(), - cfg.Aggregation.DirTo(), + cfg.Execution.DirDone(0), + cfg.Execution.DirFrom(0), + cfg.Execution.DirTo(0), + cfg.BlobDecompression.DirDone(0), + cfg.BlobDecompression.DirFrom(0), + cfg.BlobDecompression.DirTo(0), + cfg.Aggregation.DirDone(0), + cfg.Aggregation.DirFrom(0), + cfg.Aggregation.DirTo(0), } for _, dir := range dirs { diff --git a/prover/cmd/controller/controller/controller_test.go b/prover/cmd/controller/controller/controller_test.go index 5b586fbf6..216b2a10a 100644 --- a/prover/cmd/controller/controller/controller_test.go +++ b/prover/cmd/controller/controller/controller_test.go @@ -17,9 +17,9 @@ func TestRunCommand(t *testing.T) { confM, confL := setupFsTest(t) var ( - eFrom string = confM.Execution.DirFrom() - cFrom string = confM.BlobDecompression.DirFrom() - aFrom string = confM.Aggregation.DirFrom() + eFrom string = confM.Execution.DirFrom(0) + cFrom string = confM.BlobDecompression.DirFrom(0) + aFrom string = confM.Aggregation.DirFrom(0) exit0 int = 0 exit2 int = 2 exit10 int = 10 @@ -73,11 +73,11 @@ func TestRunCommand(t *testing.T) { Entries []string }{ { - Path: confM.Execution.DirFrom(), + Path: confM.Execution.DirFrom(0), Entries: []string{}, // all files should be processed }, { - Path: confM.Execution.DirDone(), + Path: confM.Execution.DirDone(0), Entries: []string{ "0-1-etv0.1.2-stv1.2.3-getZkProof.json.success", "1-2-etv0.1.2-stv1.2.3-getZkProof.json.large.success", @@ -92,7 +92,7 @@ func TestRunCommand(t *testing.T) { }, }, { - Path: confM.Execution.DirTo(), + Path: confM.Execution.DirTo(0), Entries: []string{ "0-1-getZkProof.json", "1-2-getZkProof.json", @@ -101,11 +101,11 @@ func TestRunCommand(t *testing.T) { }, }, { - Path: confM.BlobDecompression.DirFrom(), + Path: confM.BlobDecompression.DirFrom(0), Entries: []string{}, }, { - Path: confM.BlobDecompression.DirDone(), + Path: confM.BlobDecompression.DirDone(0), Entries: []string{ "0-2-bcv0.1.2-ccv0.1.2-getZkBlobCompressionProof.json.success", "2-4-bcv0.1.2-ccv0.1.2-getZkBlobCompressionProof.json.failure.code_2", @@ -114,17 +114,17 @@ func TestRunCommand(t *testing.T) { }, }, { - Path: confM.BlobDecompression.DirTo(), + Path: confM.BlobDecompression.DirTo(0), Entries: []string{ "0-2-getZkBlobCompressionProof.json", }, }, { - Path: confM.Aggregation.DirFrom(), + Path: confM.Aggregation.DirFrom(0), Entries: []string{}, }, { - Path: confM.Aggregation.DirDone(), + Path: confM.Aggregation.DirDone(0), Entries: []string{ "0-2-deadbeef57-getZkAggregatedProof.json.success", "2-4-deadbeef57-getZkAggregatedProof.json.failure.code_2", @@ -133,7 +133,7 @@ func TestRunCommand(t *testing.T) { }, }, { - Path: confM.Aggregation.DirTo(), + Path: confM.Aggregation.DirTo(0), Entries: []string{"0-2-deadbeef57-getZkAggregatedProof.json"}, }, } diff --git a/prover/cmd/controller/controller/fs_watcher_test.go b/prover/cmd/controller/controller/fs_watcher_test.go index 8a8f228f5..553c24ddc 100644 --- a/prover/cmd/controller/controller/fs_watcher_test.go +++ b/prover/cmd/controller/controller/fs_watcher_test.go @@ -56,36 +56,36 @@ func TestFileWatcherM(t *testing.T) { Skip bool }{ { - FName: []string{createTestInputFile(eFrom(), 0, 1, execJob, exitCode)}, + FName: []string{createTestInputFile(eFrom(0), 0, 1, execJob, exitCode)}, }, { Skip: true, // wrong directory - FName: []string{createTestInputFile(eFrom(), 0, 1, aggregationJob, exitCode)}, + FName: []string{createTestInputFile(eFrom(0), 0, 1, aggregationJob, exitCode)}, }, { - FName: []string{createTestInputFile(cFrom(), 0, 1, compressionJob, exitCode)}, + FName: []string{createTestInputFile(cFrom(0), 0, 1, compressionJob, exitCode)}, }, { - FName: []string{createTestInputFile(eFrom(), 1, 2, execJob, exitCode)}, + FName: []string{createTestInputFile(eFrom(0), 1, 2, execJob, exitCode)}, }, { - FName: []string{createTestInputFile(cFrom(), 1, 2, compressionJob, exitCode)}, + FName: []string{createTestInputFile(cFrom(0), 1, 2, compressionJob, exitCode)}, }, { - FName: []string{createTestInputFile(aFrom(), 0, 2, aggregationJob, exitCode)}, + FName: []string{createTestInputFile(aFrom(0), 0, 2, aggregationJob, exitCode)}, }, { Skip: true, // for large only - FName: []string{createTestInputFile(eFrom(), 2, 4, execJob, exitCode, forLarge)}, + FName: []string{createTestInputFile(eFrom(0), 2, 4, execJob, exitCode, forLarge)}, }, { - FName: []string{createTestInputFile(eFrom(), 4, 5, execJob, exitCode)}, + FName: []string{createTestInputFile(eFrom(0), 4, 5, execJob, exitCode)}, }, { - FName: []string{createTestInputFile(cFrom(), 2, 5, compressionJob, exitCode)}, + FName: []string{createTestInputFile(cFrom(0), 2, 5, compressionJob, exitCode)}, }, { - FName: []string{createTestInputFile(aFrom(), 2, 5, aggregationJob, exitCode)}, + FName: []string{createTestInputFile(aFrom(0), 2, 5, aggregationJob, exitCode)}, }, } @@ -111,7 +111,7 @@ func TestFileWatcherL(t *testing.T) { _, confL := setupFsTest(t) // Create a list of files - eFrom := confL.Execution.DirFrom() + eFrom := confL.Execution.DirFrom(0) exitCode := 0 // we are not interested in the exit code here @@ -251,55 +251,56 @@ exit $CODE Execution: config.Execution{ WithRequestDir: config.WithRequestDir{ - RequestsRootDir: path.Join(testDir, proverM, execution), + RequestsRootDir: []string{path.Join(testDir, proverM, execution)}, }, }, BlobDecompression: config.BlobDecompression{ WithRequestDir: config.WithRequestDir{ - RequestsRootDir: path.Join(testDir, proverM, compression), + RequestsRootDir: []string{path.Join(testDir, proverM, compression)}, }, }, Aggregation: config.Aggregation{ WithRequestDir: config.WithRequestDir{ - RequestsRootDir: path.Join(testDir, proverM, aggregation), + RequestsRootDir: []string{path.Join(testDir, proverM, aggregation)}, }, }, - // Limitless prover components - ExecBootstrap: config.Execution{ - WithRequestDir: config.WithRequestDir{ - RequestsRootDir: path.Join(testDir, proverM, execBootstrap), - }, - }, - ExecGL: config.Execution{ - WithRequestDir: config.WithRequestDir{ - RequestsRootDir: path.Join(testDir, proverM, execBootstrapGL), - }, - }, - ExecRndBeacon: config.RndBeacon{ - GL: config.WithRequestDir{ - RequestsRootDir: path.Join(testDir, proverM, execGLRndBeacon), - }, - BootstrapMetadata: config.WithRequestDir{ - RequestsRootDir: path.Join(testDir, proverM, execBootstrapMetadata), - }, - }, - ExecLPP: config.Execution{ - WithRequestDir: config.WithRequestDir{ - RequestsRootDir: path.Join(testDir, proverM, execRndbeaconLPP), + /* + // Limitless prover components + ExecBootstrap: config.Execution{ + WithRequestDir: config.WithRequestDir{ + RequestsRootDir: path.Join(testDir, proverM, execBootstrap), + }, }, - }, - ExecConglomeration: config.Conglomeration{ - GL: config.WithRequestDir{ - RequestsRootDir: path.Join(testDir, proverM, execGLConglomeration), + ExecGL: config.Execution{ + WithRequestDir: config.WithRequestDir{ + RequestsRootDir: path.Join(testDir, proverM, execBootstrapGL), + }, }, - LPP: config.WithRequestDir{ - RequestsRootDir: path.Join(testDir, proverM, execLPPConglomeration), + ExecRndBeacon: config.RndBeacon{ + GL: config.WithRequestDir{ + RequestsRootDir: path.Join(testDir, proverM, execGLRndBeacon), + }, + BootstrapMetadata: config.WithRequestDir{ + RequestsRootDir: path.Join(testDir, proverM, execBootstrapMetadata), + }, }, - BootstrapMetadata: config.WithRequestDir{ - RequestsRootDir: path.Join(testDir, proverM, execBootstrapMetadata), + ExecLPP: config.Execution{ + WithRequestDir: config.WithRequestDir{ + RequestsRootDir: path.Join(testDir, proverM, execRndbeaconLPP), + }, }, - }, + ExecConglomeration: config.Conglomeration{ + GL: config.WithRequestDir{ + RequestsRootDir: path.Join(testDir, proverM, execGLConglomeration), + }, + LPP: config.WithRequestDir{ + RequestsRootDir: path.Join(testDir, proverM, execLPPConglomeration), + }, + BootstrapMetadata: config.WithRequestDir{ + RequestsRootDir: path.Join(testDir, proverM, execBootstrapMetadata), + }, + }, */ } _confL := *confM @@ -318,47 +319,50 @@ exit $CODE // wiped out after the test anyway. permCode := fs.FileMode(0777) err := errors.Join( - os.MkdirAll(confM.Execution.DirFrom(), permCode), - os.MkdirAll(confM.Execution.DirTo(), permCode), - os.MkdirAll(confM.Execution.DirDone(), permCode), - os.MkdirAll(confM.BlobDecompression.DirFrom(), permCode), - os.MkdirAll(confM.BlobDecompression.DirTo(), permCode), - os.MkdirAll(confM.BlobDecompression.DirDone(), permCode), - os.MkdirAll(confM.Aggregation.DirFrom(), permCode), - os.MkdirAll(confM.Aggregation.DirTo(), permCode), - os.MkdirAll(confM.Aggregation.DirDone(), permCode), - - // Add stuff for Limitless prover - os.MkdirAll(confM.ExecBootstrap.DirFrom(), permCode), - os.MkdirAll(confM.ExecBootstrap.DirTo(), permCode), - os.MkdirAll(confM.ExecBootstrap.DirDone(), permCode), - - os.MkdirAll(confM.ExecGL.DirFrom(), permCode), - os.MkdirAll(confM.ExecGL.DirTo(), permCode), - os.MkdirAll(confM.ExecGL.DirDone(), permCode), - - os.MkdirAll(confM.ExecRndBeacon.GL.DirFrom(), permCode), - os.MkdirAll(confM.ExecRndBeacon.GL.DirTo(), permCode), - os.MkdirAll(confM.ExecRndBeacon.GL.DirDone(), permCode), - os.MkdirAll(confM.ExecRndBeacon.BootstrapMetadata.DirFrom(), permCode), - os.MkdirAll(confM.ExecRndBeacon.BootstrapMetadata.DirTo(), permCode), - os.MkdirAll(confM.ExecRndBeacon.BootstrapMetadata.DirDone(), permCode), - - os.MkdirAll(confM.ExecLPP.DirFrom(), permCode), - os.MkdirAll(confM.ExecLPP.DirTo(), permCode), - os.MkdirAll(confM.ExecLPP.DirDone(), permCode), - - os.MkdirAll(confM.ExecConglomeration.GL.DirFrom(), permCode), - os.MkdirAll(confM.ExecConglomeration.GL.DirTo(), permCode), - os.MkdirAll(confM.ExecConglomeration.GL.DirDone(), permCode), - - os.MkdirAll(confM.ExecConglomeration.LPP.DirFrom(), permCode), - os.MkdirAll(confM.ExecConglomeration.LPP.DirTo(), permCode), - os.MkdirAll(confM.ExecConglomeration.LPP.DirDone(), permCode), - - os.MkdirAll(confM.ExecConglomeration.BootstrapMetadata.DirFrom(), permCode), - os.MkdirAll(confM.ExecConglomeration.BootstrapMetadata.DirTo(), permCode), - os.MkdirAll(confM.ExecConglomeration.BootstrapMetadata.DirDone(), permCode), + os.MkdirAll(confM.Execution.DirFrom(0), permCode), + os.MkdirAll(confM.Execution.DirTo(0), permCode), + os.MkdirAll(confM.Execution.DirDone(0), permCode), + os.MkdirAll(confM.BlobDecompression.DirFrom(0), permCode), + os.MkdirAll(confM.BlobDecompression.DirTo(0), permCode), + os.MkdirAll(confM.BlobDecompression.DirDone(0), permCode), + os.MkdirAll(confM.Aggregation.DirFrom(0), permCode), + os.MkdirAll(confM.Aggregation.DirTo(0), permCode), + os.MkdirAll(confM.Aggregation.DirDone(0), permCode), + + /* + // Add stuff for Limitless prover + os.MkdirAll(confM.ExecBootstrap.DirFrom(), permCode), + os.MkdirAll(confM.ExecBootstrap.DirTo(), permCode), + os.MkdirAll(confM.ExecBootstrap.DirDone(), permCode), + + os.MkdirAll(confM.ExecGL.DirFrom(), permCode), + os.MkdirAll(confM.ExecGL.DirTo(), permCode), + os.MkdirAll(confM.ExecGL.DirDone(), permCode), + + os.MkdirAll(confM.ExecRndBeacon.GL.DirFrom(), permCode), + os.MkdirAll(confM.ExecRndBeacon.GL.DirTo(), permCode), + os.MkdirAll(confM.ExecRndBeacon.GL.DirDone(), permCode), + os.MkdirAll(confM.ExecRndBeacon.BootstrapMetadata.DirFrom(), permCode), + os.MkdirAll(confM.ExecRndBeacon.BootstrapMetadata.DirTo(), permCode), + os.MkdirAll(confM.ExecRndBeacon.BootstrapMetadata.DirDone(), permCode), + + os.MkdirAll(confM.ExecLPP.DirFrom(), permCode), + os.MkdirAll(confM.ExecLPP.DirTo(), permCode), + os.MkdirAll(confM.ExecLPP.DirDone(), permCode), + + os.MkdirAll(confM.ExecConglomeration.GL.DirFrom(), permCode), + os.MkdirAll(confM.ExecConglomeration.GL.DirTo(), permCode), + os.MkdirAll(confM.ExecConglomeration.GL.DirDone(), permCode), + + os.MkdirAll(confM.ExecConglomeration.LPP.DirFrom(), permCode), + os.MkdirAll(confM.ExecConglomeration.LPP.DirTo(), permCode), + os.MkdirAll(confM.ExecConglomeration.LPP.DirDone(), permCode), + + os.MkdirAll(confM.ExecConglomeration.BootstrapMetadata.DirFrom(), permCode), + os.MkdirAll(confM.ExecConglomeration.BootstrapMetadata.DirTo(), permCode), + os.MkdirAll(confM.ExecConglomeration.BootstrapMetadata.DirDone(), permCode), + + */ ) if err != nil { diff --git a/prover/cmd/controller/controller/job_definition.go b/prover/cmd/controller/controller/job_definition.go index 9579549b0..adddadf0a 100644 --- a/prover/cmd/controller/controller/job_definition.go +++ b/prover/cmd/controller/controller/job_definition.go @@ -126,7 +126,7 @@ func ExecutionDefinition(conf *config.Config) JobDefinition { jobDef, err := commonJobDefinition( jobNameExecution, 0, - []string{conf.Execution.RequestsRootDir}, + conf.Execution.RequestsRootDir, []string{inputFilePattern}, []string{"exec-output-file"}, []string{"{{ index .Job.Start .Idx }}-{{ index .Job.End .Idx }}-getZkProof.json"}, @@ -158,7 +158,7 @@ func CompressionDefinition(conf *config.Config) JobDefinition { jobDef, err := commonJobDefinition( jobNameBlobDecompression, 1, - []string{conf.BlobDecompression.RequestsRootDir}, + conf.BlobDecompression.RequestsRootDir, []string{inputFilePattern}, []string{"compress-output-file"}, []string{"{{ index .Job.Start .Idx }}-{{ index .Job.End .Idx }}-{{ index .Job.ContentHash .Idx }}-getZkBlobCompressionProof.json"}, @@ -189,7 +189,7 @@ func AggregatedDefinition(conf *config.Config) JobDefinition { jobDef, err := commonJobDefinition( jobNameAggregation, 2, - []string{conf.Aggregation.RequestsRootDir}, + conf.Aggregation.RequestsRootDir, []string{inputFilePattern}, []string{"agreg-output-file"}, []string{"{{ index .Job.Start .Idx }}-{{ index .Job.End .Idx }}-{{ index .Job.ContentHash .Idx }}-getZkAggregatedProof.json"}, diff --git a/prover/cmd/controller/controller/job_definition_limitless.go b/prover/cmd/controller/controller/job_definition_limitless.go index 1e353cc58..536a52c97 100644 --- a/prover/cmd/controller/controller/job_definition_limitless.go +++ b/prover/cmd/controller/controller/job_definition_limitless.go @@ -4,6 +4,7 @@ import ( "fmt" "github.com/consensys/linea-monorepo/prover/config" + "github.com/consensys/linea-monorepo/prover/utils" ) const ( @@ -55,7 +56,7 @@ func ExecBootstrapDefinition(conf *config.Config) (*JobDefinition, error) { } // Input files - reqDirs := []string{conf.ExecBootstrap.RequestsRootDir} + reqDirs := conf.ExecBootstrap.RequestsRootDir inputFilePatterns := []string{fmt.Sprintf(execBootstrapInputPattern, inpFileExt, config.FailSuffix)} // Output files @@ -73,7 +74,7 @@ func ExecGLDefinition(conf *config.Config) (*JobDefinition, error) { } // Input files - reqDirs := []string{conf.ExecGL.RequestsRootDir} + reqDirs := conf.ExecGL.RequestsRootDir inputFilePatterns := []string{fmt.Sprintf(execBootstrapGLInputPattern, inpFileExt, config.FailSuffix)} // Output files @@ -91,10 +92,7 @@ func ExecRndBeaconDefinition(conf *config.Config) (*JobDefinition, error) { } // Input files - reqDirs := []string{ - conf.ExecRndBeacon.BootstrapMetadata.RequestsRootDir, - conf.ExecRndBeacon.GL.RequestsRootDir, - } + reqDirs := utils.CombineRequests(conf.ExecRndBeacon.BootstrapMetadata.RequestsRootDir, conf.ExecRndBeacon.GL.RequestsRootDir) inputFilePatterns := []string{ fmt.Sprintf(execBootstrapRndBeaconInputPattern, inpFileExt, config.FailSuffix), fmt.Sprintf(execGLRndBeaconInputPattern, inpFileExt, config.FailSuffix), @@ -115,7 +113,7 @@ func ExecLPPDefinition(conf *config.Config) (*JobDefinition, error) { } // Input files - reqDirs := []string{conf.ExecLPP.RequestsRootDir} + reqDirs := conf.ExecLPP.RequestsRootDir inputFilePatterns := []string{fmt.Sprintf(execLPPInputPattern, inpFileExt, config.FailSuffix)} // Output files @@ -133,11 +131,7 @@ func ExecConglomerationDefinition(conf *config.Config) (*JobDefinition, error) { } // Input files - reqDirs := []string{ - conf.ExecConglomeration.BootstrapMetadata.RequestsRootDir, - conf.ExecConglomeration.GL.RequestsRootDir, - conf.ExecConglomeration.LPP.RequestsRootDir, - } + reqDirs := utils.CombineRequests(conf.ExecConglomeration.BootstrapMetadata.RequestsRootDir, conf.ExecConglomeration.GL.RequestsRootDir, conf.ExecConglomeration.LPP.RequestsRootDir) inputFilePatterns := []string{ fmt.Sprintf(execConglomerateBootstrapDistMetadataPattern, inpFileExt, config.FailSuffix), fmt.Sprintf(execConglomerateGLInputPattern, inpFileExt, config.FailSuffix), diff --git a/prover/cmd/controller/controller/job_definition_limitless_test.go b/prover/cmd/controller/controller/job_definition_limitless_test.go index 53289586b..81d328abd 100644 --- a/prover/cmd/controller/controller/job_definition_limitless_test.go +++ b/prover/cmd/controller/controller/job_definition_limitless_test.go @@ -120,6 +120,7 @@ func TestExecBootstrapInFileRegexp(t *testing.T) { conf := config.Config{} conf.Version = "0.1.2" conf.ExecBootstrap.CanRunFullLarge = c.Ext == "large" + conf.ExecBootstrap.RequestsRootDir = []string{""} def, err := ExecBootstrapDefinition(&conf) assert.NoError(t, err) @@ -243,6 +244,7 @@ func TestExecGLInFileRegexp(t *testing.T) { conf := config.Config{} conf.Version = "0.1.2" conf.ExecGL.CanRunFullLarge = c.Ext == "large" + conf.ExecGL.RequestsRootDir = []string{""} def, err := ExecGLDefinition(&conf) assert.NoError(t, err) @@ -453,6 +455,7 @@ func TestExecRndBeaconInFileRegexp(t *testing.T) { conf := config.Config{} conf.Version = "0.1.2" conf.ExecRndBeacon.CanRunFullLarge = c.Ext == "large" + conf.ExecRndBeacon.GL.RequestsRootDir = []string{"", ""} def, err := ExecRndBeaconDefinition(&conf) assert.NoError(t, err) @@ -568,6 +571,7 @@ func TestExecLPPInFileRegexp(t *testing.T) { conf := config.Config{} conf.Version = "0.1.2" conf.ExecLPP.CanRunFullLarge = c.Ext == "large" + conf.ExecLPP.RequestsRootDir = []string{""} def, err := ExecLPPDefinition(&conf) assert.NoError(t, err) @@ -811,6 +815,9 @@ func TestExecConglomerationInFileRegexp(t *testing.T) { conf := config.Config{} conf.Version = "0.1.2" conf.ExecConglomeration.CanRunFullLarge = c.Ext == "large" + conf.ExecConglomeration.GL.RequestsRootDir = []string{""} + conf.ExecConglomeration.LPP.RequestsRootDir = []string{""} + conf.ExecConglomeration.BootstrapMetadata.RequestsRootDir = []string{""} def, err := ExecConglomerationDefinition(&conf) assert.NoError(t, err) diff --git a/prover/cmd/controller/controller/job_definition_test.go b/prover/cmd/controller/controller/job_definition_test.go index df68057d5..865a36457 100644 --- a/prover/cmd/controller/controller/job_definition_test.go +++ b/prover/cmd/controller/controller/job_definition_test.go @@ -190,6 +190,7 @@ func TestExecutionInFileRegexp(t *testing.T) { conf := config.Config{} conf.Version = "0.1.2" conf.Execution.CanRunFullLarge = c.Ext == "large" + conf.Execution.RequestsRootDir = []string{""} // conf.Execution.FilterInExtension = c.Ext def := ExecutionDefinition(&conf) @@ -252,8 +253,7 @@ func TestCompressionInFileRegexp(t *testing.T) { conf := config.Config{} conf.Version = "0.1.2" - conf.Execution.CanRunFullLarge = c.Ext == "large" - // conf.Execution.FilterInExtension = c.Ext + conf.BlobDecompression.RequestsRootDir = []string{""} def := CompressionDefinition(&conf) @@ -300,8 +300,7 @@ func TestAggregatedInFileRegexp(t *testing.T) { conf := config.Config{} conf.Version = "0.1.2" - conf.Execution.CanRunFullLarge = c.Ext == "large" - // conf.Execution.FilterInExtension = c.Ext + conf.Aggregation.RequestsRootDir = []string{""} def := AggregatedDefinition(&conf) diff --git a/prover/config/config.go b/prover/config/config.go index 163b50329..54aedac80 100644 --- a/prover/config/config.go +++ b/prover/config/config.go @@ -289,19 +289,19 @@ type Aggregation struct { } type WithRequestDir struct { - RequestsRootDir string `mapstructure:"requests_root_dir" validate:"required"` + RequestsRootDir []string `mapstructure:"requests_root_dir" validate:"required"` } -func (cfg *WithRequestDir) DirFrom() string { - return path.Join(cfg.RequestsRootDir, RequestsFromSubDir) +func (cfg *WithRequestDir) DirFrom(idx int) string { + return path.Join(cfg.RequestsRootDir[idx], RequestsFromSubDir) } -func (cfg *WithRequestDir) DirTo() string { - return path.Join(cfg.RequestsRootDir, RequestsToSubDir) +func (cfg *WithRequestDir) DirTo(idx int) string { + return path.Join(cfg.RequestsRootDir[idx], RequestsToSubDir) } -func (cfg *WithRequestDir) DirDone() string { - return path.Join(cfg.RequestsRootDir, RequestsDoneSubDir) +func (cfg *WithRequestDir) DirDone(idx int) string { + return path.Join(cfg.RequestsRootDir[idx], RequestsDoneSubDir) } type PublicInput struct { diff --git a/prover/utils/requests.go b/prover/utils/requests.go new file mode 100644 index 000000000..ff7dbf3e8 --- /dev/null +++ b/prover/utils/requests.go @@ -0,0 +1,22 @@ +package utils + +// CombineRequests combines multiple request files (slices) into a single request slice +func CombineRequests(requests ...[]string) []string { + // Calculate the total length of the combined slice + totalLength := 0 + for _, request := range requests { + totalLength += len(request) + } + + // Preallocate the combined slice with the total length + combined := make([]string, totalLength) + + // Copy each slice into the combined slice + currentIndex := 0 + for _, request := range requests { + copy(combined[currentIndex:], request) + currentIndex += len(request) + } + + return combined +} From a0ee65d30b7d8bf192ee1d675e348534691ed86a Mon Sep 17 00:00:00 2001 From: srinathln7 Date: Tue, 11 Feb 2025 12:56:48 +0000 Subject: [PATCH 24/48] commit progress --- .../controller/fs_watcher_limitless_test.go | 250 ++++++++++++++++++ .../controller/controller/fs_watcher_test.go | 103 +++----- .../controller/controller/job_definition.go | 7 +- 3 files changed, 285 insertions(+), 75 deletions(-) create mode 100644 prover/cmd/controller/controller/fs_watcher_limitless_test.go diff --git a/prover/cmd/controller/controller/fs_watcher_limitless_test.go b/prover/cmd/controller/controller/fs_watcher_limitless_test.go new file mode 100644 index 000000000..165b1e69d --- /dev/null +++ b/prover/cmd/controller/controller/fs_watcher_limitless_test.go @@ -0,0 +1,250 @@ +package controller + +import ( + "errors" + "fmt" + "io/fs" + "os" + "path" + "testing" + "text/template" + + "github.com/consensys/linea-monorepo/prover/config" + "github.com/consensys/linea-monorepo/prover/utils" +) + +const ( + Bootstrap int = iota + GL + RndBeacon + LPP + Conglomeration +) + +// TODO: Write this test +func TestLimitlessProverFileWatcherL(t *testing.T) { + +} + +// Sets up the test environment by creating temporary directories and configurations for the prover. +func setupLimitlessFsTest(t *testing.T) (confM, confL *config.Config) { + // Testdir is going to contain the whole test directory + testDir := t.TempDir() + + const ( + dirfrom = "prover-requests" + dirto = "prover-responses" + dirdone = "requests-done" + dirlogs = "logs" + proverM = "prover-full-M" + proverL = "prover-full-L" + + // Add conf. for Limitless prover: Naming convention: exec + execBootstrap = "execution" + execBootstrapGL = "bootstrapGl" + execBootstrapMetadata = "bootstrapMetadata" + execGLRndBeacon = "gl-rndbeacon" + execGLConglomeration = "gl" + execRndbeaconLPP = "rndbeacon" + execLPPConglomeration = "lpp" + ) + + // Create a configuration using temporary directories + // Defines three command templates for different types of jobs. + // These templates will be used to create shell commands for the worker processes. + cmd := ` +/bin/sh {{index .InFile 0}} +CODE=$? +if [ $CODE -eq 0 ]; then + touch {{index .OutFile 0}} +fi +exit $CODE +` + cmdLarge := ` +/bin/sh {{index .InFile 0}} +CODE=$? +CODE=$(($CODE - 12)) +if [ $CODE -eq 0 ]; then + touch {{index .OutFile 0}} +fi +exit $CODE +` + + cmdLargeInternal := ` +/bin/sh {{index .InFile 0}} +CODE=$? +CODE=$(($CODE - 10)) +if [ $CODE -eq 0 ]; then + touch {{index .OutFile 0}} +fi +exit $CODE +` + // For a prover M + confM = &config.Config{ + Version: "0.2.4", + + Controller: config.Controller{ + EnableExecution: false, + EnableBlobDecompression: false, + EnableAggregation: false, + LocalID: proverM, + Prometheus: config.Prometheus{Enabled: false}, + RetryDelays: []int{0, 1}, + WorkerCmd: cmd, + WorkerCmdLarge: cmdLargeInternal, + DeferToOtherLargeCodes: []int{12, 137}, + RetryLocallyWithLargeCodes: []int{10, 77}, + + // Limitless prover components + EnableExecBootstrap: true, + EnableExecGL: true, + EnableExecRndBeacon: true, + EnableExecLPP: true, + EnableExecConglomeration: true, + }, + + // Limitless prover components + ExecBootstrap: config.Execution{ + WithRequestDir: config.WithRequestDir{ + RequestsRootDir: []string{path.Join(testDir, proverM, execBootstrap)}, + }, + }, + // ExecGL: config.Execution{ + // WithRequestDir: config.WithRequestDir{ + // RequestsRootDir: []string{path.Join(testDir, proverM, execBootstrapGL)}, + // }, + // }, + // ExecRndBeacon: config.RndBeacon{ + // GL: config.WithRequestDir{ + // RequestsRootDir: []string{path.Join(testDir, proverM, execGLRndBeacon)}, + // }, + // BootstrapMetadata: config.WithRequestDir{ + // RequestsRootDir: []string{path.Join(testDir, proverM, execBootstrapMetadata)}, + // }, + // }, + // ExecLPP: config.Execution{ + // WithRequestDir: config.WithRequestDir{ + // RequestsRootDir: []string{path.Join(testDir, proverM, execRndbeaconLPP)}, + // }, + // }, + // ExecConglomeration: config.Conglomeration{ + // GL: config.WithRequestDir{ + // RequestsRootDir: []string{path.Join(testDir, proverM, execGLConglomeration)}, + // }, + // LPP: config.WithRequestDir{ + // RequestsRootDir: []string{path.Join(testDir, proverM, execLPPConglomeration)}, + // }, + // BootstrapMetadata: config.WithRequestDir{ + // RequestsRootDir: []string{path.Join(testDir, proverM, execBootstrapMetadata)}, + // }, + // }, + } + + _confL := *confM + confL = &_confL + confL.Controller.LocalID = proverL + confL.Controller.WorkerCmdLarge = cmdLarge + confL.Execution.CanRunFullLarge = true + + // ensure the template are parsed + confM.Controller.WorkerCmdTmpl = template.Must(template.New("worker").Parse(confM.Controller.WorkerCmd)) + confM.Controller.WorkerCmdLargeTmpl = template.Must(template.New("worker-large").Parse(confM.Controller.WorkerCmdLarge)) + confL.Controller.WorkerCmdTmpl = template.Must(template.New("worker").Parse(confL.Controller.WorkerCmd)) + confL.Controller.WorkerCmdLargeTmpl = template.Must(template.New("worker-large").Parse(confL.Controller.WorkerCmdLarge)) + + // Initialize the dirs (and give them all permissions). They will be + // wiped out after the test anyway. + permCode := fs.FileMode(0777) + err := errors.Join( + + // Add stuff for Limitless prover + os.MkdirAll(confM.ExecBootstrap.DirFrom(0), permCode), + os.MkdirAll(confM.ExecBootstrap.DirTo(0), permCode), + os.MkdirAll(confM.ExecBootstrap.DirDone(0), permCode), + + // os.MkdirAll(confM.ExecGL.DirFrom(), permCode), + // os.MkdirAll(confM.ExecGL.DirTo(), permCode), + // os.MkdirAll(confM.ExecGL.DirDone(), permCode), + + // os.MkdirAll(confM.ExecRndBeacon.GL.DirFrom(), permCode), + // os.MkdirAll(confM.ExecRndBeacon.GL.DirTo(), permCode), + // os.MkdirAll(confM.ExecRndBeacon.GL.DirDone(), permCode), + // os.MkdirAll(confM.ExecRndBeacon.BootstrapMetadata.DirFrom(), permCode), + // os.MkdirAll(confM.ExecRndBeacon.BootstrapMetadata.DirTo(), permCode), + // os.MkdirAll(confM.ExecRndBeacon.BootstrapMetadata.DirDone(), permCode), + + // os.MkdirAll(confM.ExecLPP.DirFrom(), permCode), + // os.MkdirAll(confM.ExecLPP.DirTo(), permCode), + // os.MkdirAll(confM.ExecLPP.DirDone(), permCode), + + // os.MkdirAll(confM.ExecConglomeration.GL.DirFrom(), permCode), + // os.MkdirAll(confM.ExecConglomeration.GL.DirTo(), permCode), + // os.MkdirAll(confM.ExecConglomeration.GL.DirDone(), permCode), + + // os.MkdirAll(confM.ExecConglomeration.LPP.DirFrom(), permCode), + // os.MkdirAll(confM.ExecConglomeration.LPP.DirTo(), permCode), + // os.MkdirAll(confM.ExecConglomeration.LPP.DirDone(), permCode), + + // os.MkdirAll(confM.ExecConglomeration.BootstrapMetadata.DirFrom(), permCode), + // os.MkdirAll(confM.ExecConglomeration.BootstrapMetadata.DirTo(), permCode), + // os.MkdirAll(confM.ExecConglomeration.BootstrapMetadata.DirDone(), permCode), + ) + + if err != nil { + t.Fatalf("could not create the temporary directories") + } + + return confM, confL +} + +// Creates test input files with specific filenames and exit codes to simulate job files for the file system watcher. +func createLimitlessTestInputFile( + dirFrom []string, + start, end, jobType, exitWith int, + large ...bool, +) (fnames []string) { + // The filenames are expected to match the regexp pattern that we have in + // the job definition. + var fmtStrArr []string + switch jobType { + case Bootstrap: + fmtStrArr = []string{"%v-%v-etv0.1.2-stv1.2.3-getZkProof.json"} + case GL: + fmtStrArr = []string{"%v-%v-etv0.1.2-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json"} + case RndBeacon: + fmtStrArr = []string{"%v-%v-etv0.1.2-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json", + "%v-%v-etv0.1.2-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json"} + case LPP: + fmtStrArr = []string{"%v-%v-etv0.1.2-stv1.2.3-getZkProof_RndBeacon.json"} + case Conglomeration: + fmtStrArr = []string{"%v-%v-etv0.1.2-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json", + "%v-%v-etv0.1.2-stv1.2.3-getZkProof_GLjson", + "%v-%v-etv0.1.2-stv1.2.3-getZkProof_LPP.json"} + default: + panic("incorrect job type") + } + + m, n := len(dirFrom), len(fmtStrArr) + if m != n { + utils.Panic("number of entries in dirFrom:%d should match with the length of formated input files:%d", m, n) + } + + fnames = make([]string, len(fmtStrArr)) + for i, fmtString := range fmtStrArr { + fnames[i] = fmt.Sprintf(fmtString, start, end) + if len(large) > 0 && large[0] { + fnames[i] += ".large" + } + f, err := os.Create(path.Join(dirFrom[i], fnames[i])) + if err != nil { + panic(err) + } + + // If called (with the test configuration (i.e. with sh), the file will + // immediately exit with the provided error code) + f.WriteString(fmt.Sprintf("#!/bin/sh\nexit %v", exitWith)) + f.Close() + } + + return fnames +} diff --git a/prover/cmd/controller/controller/fs_watcher_test.go b/prover/cmd/controller/controller/fs_watcher_test.go index 553c24ddc..d2d59faba 100644 --- a/prover/cmd/controller/controller/fs_watcher_test.go +++ b/prover/cmd/controller/controller/fs_watcher_test.go @@ -164,11 +164,6 @@ func TestFileWatcherL(t *testing.T) { assert.Nil(t, fw.GetBest(), "the queue should be empty now") } -// TODO: Write this test -func TestLimitlessProverFileWatcherL(t *testing.T) { - -} - // Sets up the test environment by creating temporary directories and configurations for the prover. func setupFsTest(t *testing.T) (confM, confL *config.Config) { // Testdir is going to contain the whole test directory @@ -265,42 +260,41 @@ exit $CODE }, }, - /* - // Limitless prover components - ExecBootstrap: config.Execution{ - WithRequestDir: config.WithRequestDir{ - RequestsRootDir: path.Join(testDir, proverM, execBootstrap), - }, + // Limitless prover components + ExecBootstrap: config.Execution{ + WithRequestDir: config.WithRequestDir{ + RequestsRootDir: []string{path.Join(testDir, proverM, execBootstrap)}, + }, + }, + ExecGL: config.Execution{ + WithRequestDir: config.WithRequestDir{ + RequestsRootDir: []string{path.Join(testDir, proverM, execBootstrapGL)}, + }, + }, + ExecRndBeacon: config.RndBeacon{ + GL: config.WithRequestDir{ + RequestsRootDir: []string{path.Join(testDir, proverM, execGLRndBeacon)}, + }, + BootstrapMetadata: config.WithRequestDir{ + RequestsRootDir: []string{path.Join(testDir, proverM, execBootstrapMetadata)}, + }, + }, + ExecLPP: config.Execution{ + WithRequestDir: config.WithRequestDir{ + RequestsRootDir: []string{path.Join(testDir, proverM, execRndbeaconLPP)}, }, - ExecGL: config.Execution{ - WithRequestDir: config.WithRequestDir{ - RequestsRootDir: path.Join(testDir, proverM, execBootstrapGL), - }, + }, + ExecConglomeration: config.Conglomeration{ + GL: config.WithRequestDir{ + RequestsRootDir: []string{path.Join(testDir, proverM, execGLConglomeration)}, }, - ExecRndBeacon: config.RndBeacon{ - GL: config.WithRequestDir{ - RequestsRootDir: path.Join(testDir, proverM, execGLRndBeacon), - }, - BootstrapMetadata: config.WithRequestDir{ - RequestsRootDir: path.Join(testDir, proverM, execBootstrapMetadata), - }, + LPP: config.WithRequestDir{ + RequestsRootDir: []string{path.Join(testDir, proverM, execLPPConglomeration)}, }, - ExecLPP: config.Execution{ - WithRequestDir: config.WithRequestDir{ - RequestsRootDir: path.Join(testDir, proverM, execRndbeaconLPP), - }, + BootstrapMetadata: config.WithRequestDir{ + RequestsRootDir: []string{path.Join(testDir, proverM, execBootstrapMetadata)}, }, - ExecConglomeration: config.Conglomeration{ - GL: config.WithRequestDir{ - RequestsRootDir: path.Join(testDir, proverM, execGLConglomeration), - }, - LPP: config.WithRequestDir{ - RequestsRootDir: path.Join(testDir, proverM, execLPPConglomeration), - }, - BootstrapMetadata: config.WithRequestDir{ - RequestsRootDir: path.Join(testDir, proverM, execBootstrapMetadata), - }, - }, */ + }, } _confL := *confM @@ -328,41 +322,6 @@ exit $CODE os.MkdirAll(confM.Aggregation.DirFrom(0), permCode), os.MkdirAll(confM.Aggregation.DirTo(0), permCode), os.MkdirAll(confM.Aggregation.DirDone(0), permCode), - - /* - // Add stuff for Limitless prover - os.MkdirAll(confM.ExecBootstrap.DirFrom(), permCode), - os.MkdirAll(confM.ExecBootstrap.DirTo(), permCode), - os.MkdirAll(confM.ExecBootstrap.DirDone(), permCode), - - os.MkdirAll(confM.ExecGL.DirFrom(), permCode), - os.MkdirAll(confM.ExecGL.DirTo(), permCode), - os.MkdirAll(confM.ExecGL.DirDone(), permCode), - - os.MkdirAll(confM.ExecRndBeacon.GL.DirFrom(), permCode), - os.MkdirAll(confM.ExecRndBeacon.GL.DirTo(), permCode), - os.MkdirAll(confM.ExecRndBeacon.GL.DirDone(), permCode), - os.MkdirAll(confM.ExecRndBeacon.BootstrapMetadata.DirFrom(), permCode), - os.MkdirAll(confM.ExecRndBeacon.BootstrapMetadata.DirTo(), permCode), - os.MkdirAll(confM.ExecRndBeacon.BootstrapMetadata.DirDone(), permCode), - - os.MkdirAll(confM.ExecLPP.DirFrom(), permCode), - os.MkdirAll(confM.ExecLPP.DirTo(), permCode), - os.MkdirAll(confM.ExecLPP.DirDone(), permCode), - - os.MkdirAll(confM.ExecConglomeration.GL.DirFrom(), permCode), - os.MkdirAll(confM.ExecConglomeration.GL.DirTo(), permCode), - os.MkdirAll(confM.ExecConglomeration.GL.DirDone(), permCode), - - os.MkdirAll(confM.ExecConglomeration.LPP.DirFrom(), permCode), - os.MkdirAll(confM.ExecConglomeration.LPP.DirTo(), permCode), - os.MkdirAll(confM.ExecConglomeration.LPP.DirDone(), permCode), - - os.MkdirAll(confM.ExecConglomeration.BootstrapMetadata.DirFrom(), permCode), - os.MkdirAll(confM.ExecConglomeration.BootstrapMetadata.DirTo(), permCode), - os.MkdirAll(confM.ExecConglomeration.BootstrapMetadata.DirDone(), permCode), - - */ ) if err != nil { diff --git a/prover/cmd/controller/controller/job_definition.go b/prover/cmd/controller/controller/job_definition.go index adddadf0a..7f22fa432 100644 --- a/prover/cmd/controller/controller/job_definition.go +++ b/prover/cmd/controller/controller/job_definition.go @@ -257,11 +257,12 @@ func (jd *JobDefinition) dirDone(ipIdx int) string { return filepath.Join(jd.RequestsRootDir[ipIdx], config.RequestsDoneSubDir) } -func (jd *JobDefinition) dirTo(ipIdx int) string { - if err := jd.isValidReqRootDirIdx(ipIdx); err != nil { +func (jd *JobDefinition) dirTo(opIdx int) string { + if err := jd.isValidOutputFileIdx(opIdx); err != nil { utils.Panic("dirTo:%v", err.Error()) } - return filepath.Join(jd.RequestsRootDir[ipIdx], config.RequestsToSubDir) + + return filepath.Join(jd.RequestsRootDir[opIdx], config.RequestsToSubDir) } func cmnExecParamsRegexp(nInputs int) []ParamsRegexp { From 3396fb17ffa7c609642054228b6f195944819ad4 Mon Sep 17 00:00:00 2001 From: srinathln7 Date: Tue, 11 Feb 2025 13:48:03 +0000 Subject: [PATCH 25/48] add resp dirs to job definitions --- .../controller/controller/executor_test.go | 3 +- .../controller/fs_watcher_limitless_test.go | 3 + .../controller/controller/fs_watcher_test.go | 59 +++---------------- .../controller/controller/job_definition.go | 33 +++++++---- .../controller/job_definition_limitless.go | 25 ++++---- .../job_definition_limitless_test.go | 5 ++ .../controller/job_definition_test.go | 3 + prover/config/config.go | 26 ++++++-- prover/config/constants.go | 2 +- 9 files changed, 77 insertions(+), 82 deletions(-) diff --git a/prover/cmd/controller/controller/executor_test.go b/prover/cmd/controller/controller/executor_test.go index e9fb8cf2e..3d3d45c3a 100644 --- a/prover/cmd/controller/controller/executor_test.go +++ b/prover/cmd/controller/controller/executor_test.go @@ -24,7 +24,8 @@ func TestRetryWithLarge(t *testing.T) { Parse("output-fill-constant"), ), }, - RequestsRootDir: []string{"./testdata"}, + RequestsRootDir: []string{"./testdata"}, + ResponsesRootDir: []string{"./responses"}, } jobs := []struct { diff --git a/prover/cmd/controller/controller/fs_watcher_limitless_test.go b/prover/cmd/controller/controller/fs_watcher_limitless_test.go index 165b1e69d..92f4227c4 100644 --- a/prover/cmd/controller/controller/fs_watcher_limitless_test.go +++ b/prover/cmd/controller/controller/fs_watcher_limitless_test.go @@ -1,5 +1,6 @@ package controller +/* import ( "errors" "fmt" @@ -248,3 +249,5 @@ func createLimitlessTestInputFile( return fnames } + +*/ diff --git a/prover/cmd/controller/controller/fs_watcher_test.go b/prover/cmd/controller/controller/fs_watcher_test.go index d2d59faba..3de737601 100644 --- a/prover/cmd/controller/controller/fs_watcher_test.go +++ b/prover/cmd/controller/controller/fs_watcher_test.go @@ -179,15 +179,6 @@ func setupFsTest(t *testing.T) (confM, confL *config.Config) { execution = "execution" compression = "compression" aggregation = "aggregation" - - // Add conf. for Limitless prover: Naming convention: exec - execBootstrap = "bootstrap" - execBootstrapGL = "bootstrapGl" - execBootstrapMetadata = "bootstrapMetadata" - execGLRndBeacon = "gl-rndbeacon" - execGLConglomeration = "gl" - execRndbeaconLPP = "rndbeacon" - execLPPConglomeration = "lpp" ) // Create a configuration using temporary directories @@ -235,64 +226,30 @@ exit $CODE WorkerCmdLarge: cmdLargeInternal, DeferToOtherLargeCodes: []int{12, 137}, RetryLocallyWithLargeCodes: []int{10, 77}, - - // Limitless prover components - EnableExecBootstrap: true, - EnableExecGL: true, - EnableExecRndBeacon: true, - EnableExecLPP: true, - EnableExecConglomeration: true, }, Execution: config.Execution{ WithRequestDir: config.WithRequestDir{ RequestsRootDir: []string{path.Join(testDir, proverM, execution)}, }, + WithResponseDir: config.WithResponseDir{ + ResponsesRootDir: []string{path.Join(testDir, proverM, execution)}, + }, }, BlobDecompression: config.BlobDecompression{ WithRequestDir: config.WithRequestDir{ RequestsRootDir: []string{path.Join(testDir, proverM, compression)}, }, + WithResponseDir: config.WithResponseDir{ + ResponsesRootDir: []string{path.Join(testDir, proverM, compression)}, + }, }, Aggregation: config.Aggregation{ WithRequestDir: config.WithRequestDir{ RequestsRootDir: []string{path.Join(testDir, proverM, aggregation)}, }, - }, - - // Limitless prover components - ExecBootstrap: config.Execution{ - WithRequestDir: config.WithRequestDir{ - RequestsRootDir: []string{path.Join(testDir, proverM, execBootstrap)}, - }, - }, - ExecGL: config.Execution{ - WithRequestDir: config.WithRequestDir{ - RequestsRootDir: []string{path.Join(testDir, proverM, execBootstrapGL)}, - }, - }, - ExecRndBeacon: config.RndBeacon{ - GL: config.WithRequestDir{ - RequestsRootDir: []string{path.Join(testDir, proverM, execGLRndBeacon)}, - }, - BootstrapMetadata: config.WithRequestDir{ - RequestsRootDir: []string{path.Join(testDir, proverM, execBootstrapMetadata)}, - }, - }, - ExecLPP: config.Execution{ - WithRequestDir: config.WithRequestDir{ - RequestsRootDir: []string{path.Join(testDir, proverM, execRndbeaconLPP)}, - }, - }, - ExecConglomeration: config.Conglomeration{ - GL: config.WithRequestDir{ - RequestsRootDir: []string{path.Join(testDir, proverM, execGLConglomeration)}, - }, - LPP: config.WithRequestDir{ - RequestsRootDir: []string{path.Join(testDir, proverM, execLPPConglomeration)}, - }, - BootstrapMetadata: config.WithRequestDir{ - RequestsRootDir: []string{path.Join(testDir, proverM, execBootstrapMetadata)}, + WithResponseDir: config.WithResponseDir{ + ResponsesRootDir: []string{path.Join(testDir, proverM, aggregation)}, }, }, } diff --git a/prover/cmd/controller/controller/job_definition.go b/prover/cmd/controller/controller/job_definition.go index 7f22fa432..09b1e0231 100644 --- a/prover/cmd/controller/controller/job_definition.go +++ b/prover/cmd/controller/controller/job_definition.go @@ -48,6 +48,9 @@ type JobDefinition struct { // InputFileRegexp []*regexp2.Regexp + // Output params + ResponsesRootDir []string + // Template to use to generate the output file. The template should have the // form of a go template. For instance, // @@ -69,7 +72,7 @@ type JobDefinition struct { // and parameter regexps. The function returns a JobDefinition and an error if any occurs during the setup. func commonJobDefinition(name string, priority int, reqRootDirs []string, inputFilePatterns []string, - outputFileTmpls []string, outputFileNames []string, + respRootDirs []string, outputFileTmpls []string, outputFileNames []string, paramsRegexp []ParamsRegexp, failSuffix string) (*JobDefinition, error) { m, n := len(reqRootDirs), len(inputFilePatterns) @@ -78,10 +81,10 @@ func commonJobDefinition(name string, priority int, and input file patterns:%d specified in the job definition`, m, n) } - p, q := len(outputFileTmpls), len(outputFileNames) - if p != q { - return nil, fmt.Errorf(`length mis-match between the number of output file templates:%d - and output file names:%d specified in the job definition`, p, q) + p, q, r := len(respRootDirs), len(outputFileTmpls), len(outputFileNames) + if p != q || p != r || q != r { + return nil, fmt.Errorf(`length mis-match between the number of response directories:%d, output file templates:%d + and output file names:%d specified in the job definition`, p, q, r) } inputFileRegexps := make([]*regexp2.Regexp, m) @@ -98,13 +101,14 @@ func commonJobDefinition(name string, priority int, } return &JobDefinition{ - Name: name, - Priority: priority, - RequestsRootDir: reqRootDirs, - InputFileRegexp: inputFileRegexps, - OutputFileTmpl: outputFileTemplates, - ParamsRegexp: paramsRegexps, - FailureSuffix: matchFailureSuffix(failSuffix), + Name: name, + Priority: priority, + RequestsRootDir: reqRootDirs, + InputFileRegexp: inputFileRegexps, + ResponsesRootDir: respRootDirs, + OutputFileTmpl: outputFileTemplates, + ParamsRegexp: paramsRegexps, + FailureSuffix: matchFailureSuffix(failSuffix), }, nil } @@ -128,6 +132,7 @@ func ExecutionDefinition(conf *config.Config) JobDefinition { 0, conf.Execution.RequestsRootDir, []string{inputFilePattern}, + conf.Execution.ResponsesRootDir, []string{"exec-output-file"}, []string{"{{ index .Job.Start .Idx }}-{{ index .Job.End .Idx }}-getZkProof.json"}, cmnExecParamsRegexp(1), @@ -160,6 +165,7 @@ func CompressionDefinition(conf *config.Config) JobDefinition { 1, conf.BlobDecompression.RequestsRootDir, []string{inputFilePattern}, + conf.BlobDecompression.ResponsesRootDir, []string{"compress-output-file"}, []string{"{{ index .Job.Start .Idx }}-{{ index .Job.End .Idx }}-{{ index .Job.ContentHash .Idx }}-getZkBlobCompressionProof.json"}, []ParamsRegexp{paramsRegexp}, @@ -191,6 +197,7 @@ func AggregatedDefinition(conf *config.Config) JobDefinition { 2, conf.Aggregation.RequestsRootDir, []string{inputFilePattern}, + conf.Aggregation.ResponsesRootDir, []string{"agreg-output-file"}, []string{"{{ index .Job.Start .Idx }}-{{ index .Job.End .Idx }}-{{ index .Job.ContentHash .Idx }}-getZkAggregatedProof.json"}, []ParamsRegexp{paramsRegexp}, @@ -262,7 +269,7 @@ func (jd *JobDefinition) dirTo(opIdx int) string { utils.Panic("dirTo:%v", err.Error()) } - return filepath.Join(jd.RequestsRootDir[opIdx], config.RequestsToSubDir) + return filepath.Join(jd.ResponsesRootDir[opIdx], config.ResponsesToSubDir) } func cmnExecParamsRegexp(nInputs int) []ParamsRegexp { diff --git a/prover/cmd/controller/controller/job_definition_limitless.go b/prover/cmd/controller/controller/job_definition_limitless.go index 536a52c97..d39177978 100644 --- a/prover/cmd/controller/controller/job_definition_limitless.go +++ b/prover/cmd/controller/controller/job_definition_limitless.go @@ -60,11 +60,12 @@ func ExecBootstrapDefinition(conf *config.Config) (*JobDefinition, error) { inputFilePatterns := []string{fmt.Sprintf(execBootstrapInputPattern, inpFileExt, config.FailSuffix)} // Output files + respDirs := conf.ExecBootstrap.ResponsesRootDir outputTmpls := []string{execBootstrapGLSubmoduleTemplate, execBootstrapDistMetadataTemplate} outputFiles := []string{execBootstrapGLSubmoduleFile, execBootstrapDistMetadataFile} - return commonJobDefinition(jobExecBootstrap, priorityExecBootstrap, - reqDirs, inputFilePatterns, outputTmpls, outputFiles, cmnExecParamsRegexp(1), config.FailSuffix) + return commonJobDefinition(jobExecBootstrap, priorityExecBootstrap, reqDirs, inputFilePatterns, + respDirs, outputTmpls, outputFiles, cmnExecParamsRegexp(1), config.FailSuffix) } func ExecGLDefinition(conf *config.Config) (*JobDefinition, error) { @@ -78,11 +79,12 @@ func ExecGLDefinition(conf *config.Config) (*JobDefinition, error) { inputFilePatterns := []string{fmt.Sprintf(execBootstrapGLInputPattern, inpFileExt, config.FailSuffix)} // Output files + respDirs := conf.ExecGL.ResponsesRootDir outputTmpls := []string{execGLRndBeaconTemplate, execGLTemplate} outputFiles := []string{execGLRndBeaconFile, execGLFile} - return commonJobDefinition(jobExecGL, priorityExecGL, - reqDirs, inputFilePatterns, outputTmpls, outputFiles, cmnExecParamsRegexp(1), config.FailSuffix) + return commonJobDefinition(jobExecGL, priorityExecGL, reqDirs, inputFilePatterns, + respDirs, outputTmpls, outputFiles, cmnExecParamsRegexp(1), config.FailSuffix) } func ExecRndBeaconDefinition(conf *config.Config) (*JobDefinition, error) { @@ -99,11 +101,12 @@ func ExecRndBeaconDefinition(conf *config.Config) (*JobDefinition, error) { } // Output files + respDirs := conf.ExecRndBeacon.ResponsesRootDir outputTmpls := []string{execRndBeaconTemplate} outputFiles := []string{execRndBeaconFile} - return commonJobDefinition(jobExecRndBeacon, priorityExecRndBeacon, - reqDirs, inputFilePatterns, outputTmpls, outputFiles, cmnExecParamsRegexp(2), config.FailSuffix) + return commonJobDefinition(jobExecRndBeacon, priorityExecRndBeacon, reqDirs, inputFilePatterns, + respDirs, outputTmpls, outputFiles, cmnExecParamsRegexp(2), config.FailSuffix) } func ExecLPPDefinition(conf *config.Config) (*JobDefinition, error) { @@ -117,11 +120,12 @@ func ExecLPPDefinition(conf *config.Config) (*JobDefinition, error) { inputFilePatterns := []string{fmt.Sprintf(execLPPInputPattern, inpFileExt, config.FailSuffix)} // Output files + respDirs := conf.ExecLPP.ResponsesRootDir outputTmpls := []string{execLPPTemplate} outputFiles := []string{execLPPFile} - return commonJobDefinition(jobExecLPP, priorityExecLPP, - reqDirs, inputFilePatterns, outputTmpls, outputFiles, cmnExecParamsRegexp(1), config.FailSuffix) + return commonJobDefinition(jobExecLPP, priorityExecLPP, reqDirs, inputFilePatterns, + respDirs, outputTmpls, outputFiles, cmnExecParamsRegexp(1), config.FailSuffix) } func ExecConglomerationDefinition(conf *config.Config) (*JobDefinition, error) { @@ -139,9 +143,10 @@ func ExecConglomerationDefinition(conf *config.Config) (*JobDefinition, error) { } // Output files + respDirs := conf.ExecConglomeration.ResponsesRootDir outputTmpls := []string{execConglomerateTemplate} outputFiles := []string{execConglomerateFile} - return commonJobDefinition(jobExecCongolomerateLPP, priorityExecCongolomeration, - reqDirs, inputFilePatterns, outputTmpls, outputFiles, cmnExecParamsRegexp(3), config.FailSuffix) + return commonJobDefinition(jobExecCongolomerateLPP, priorityExecCongolomeration, reqDirs, inputFilePatterns, + respDirs, outputTmpls, outputFiles, cmnExecParamsRegexp(3), config.FailSuffix) } diff --git a/prover/cmd/controller/controller/job_definition_limitless_test.go b/prover/cmd/controller/controller/job_definition_limitless_test.go index 81d328abd..b6e92a168 100644 --- a/prover/cmd/controller/controller/job_definition_limitless_test.go +++ b/prover/cmd/controller/controller/job_definition_limitless_test.go @@ -121,6 +121,7 @@ func TestExecBootstrapInFileRegexp(t *testing.T) { conf.Version = "0.1.2" conf.ExecBootstrap.CanRunFullLarge = c.Ext == "large" conf.ExecBootstrap.RequestsRootDir = []string{""} + conf.ExecBootstrap.ResponsesRootDir = []string{"", ""} def, err := ExecBootstrapDefinition(&conf) assert.NoError(t, err) @@ -245,6 +246,7 @@ func TestExecGLInFileRegexp(t *testing.T) { conf.Version = "0.1.2" conf.ExecGL.CanRunFullLarge = c.Ext == "large" conf.ExecGL.RequestsRootDir = []string{""} + conf.ExecGL.ResponsesRootDir = []string{"", ""} def, err := ExecGLDefinition(&conf) assert.NoError(t, err) @@ -456,6 +458,7 @@ func TestExecRndBeaconInFileRegexp(t *testing.T) { conf.Version = "0.1.2" conf.ExecRndBeacon.CanRunFullLarge = c.Ext == "large" conf.ExecRndBeacon.GL.RequestsRootDir = []string{"", ""} + conf.ExecRndBeacon.ResponsesRootDir = []string{""} def, err := ExecRndBeaconDefinition(&conf) assert.NoError(t, err) @@ -572,6 +575,7 @@ func TestExecLPPInFileRegexp(t *testing.T) { conf.Version = "0.1.2" conf.ExecLPP.CanRunFullLarge = c.Ext == "large" conf.ExecLPP.RequestsRootDir = []string{""} + conf.ExecLPP.ResponsesRootDir = []string{""} def, err := ExecLPPDefinition(&conf) assert.NoError(t, err) @@ -818,6 +822,7 @@ func TestExecConglomerationInFileRegexp(t *testing.T) { conf.ExecConglomeration.GL.RequestsRootDir = []string{""} conf.ExecConglomeration.LPP.RequestsRootDir = []string{""} conf.ExecConglomeration.BootstrapMetadata.RequestsRootDir = []string{""} + conf.ExecConglomeration.ResponsesRootDir = []string{""} def, err := ExecConglomerationDefinition(&conf) assert.NoError(t, err) diff --git a/prover/cmd/controller/controller/job_definition_test.go b/prover/cmd/controller/controller/job_definition_test.go index 865a36457..c9a0cc45c 100644 --- a/prover/cmd/controller/controller/job_definition_test.go +++ b/prover/cmd/controller/controller/job_definition_test.go @@ -191,6 +191,7 @@ func TestExecutionInFileRegexp(t *testing.T) { conf.Version = "0.1.2" conf.Execution.CanRunFullLarge = c.Ext == "large" conf.Execution.RequestsRootDir = []string{""} + conf.Execution.ResponsesRootDir = []string{""} // conf.Execution.FilterInExtension = c.Ext def := ExecutionDefinition(&conf) @@ -254,6 +255,7 @@ func TestCompressionInFileRegexp(t *testing.T) { conf := config.Config{} conf.Version = "0.1.2" conf.BlobDecompression.RequestsRootDir = []string{""} + conf.BlobDecompression.ResponsesRootDir = []string{""} def := CompressionDefinition(&conf) @@ -301,6 +303,7 @@ func TestAggregatedInFileRegexp(t *testing.T) { conf := config.Config{} conf.Version = "0.1.2" conf.Aggregation.RequestsRootDir = []string{""} + conf.Aggregation.ResponsesRootDir = []string{""} def := AggregatedDefinition(&conf) diff --git a/prover/config/config.go b/prover/config/config.go index 54aedac80..f9abcf32b 100644 --- a/prover/config/config.go +++ b/prover/config/config.go @@ -151,6 +151,8 @@ type RndBeacon struct { BootstrapMetadata WithRequestDir `mapstructure:",squash"` + WithResponseDir `mapstructure:",squash"` + // ProverMode stores the kind of prover to use. ProverMode ProverMode `mapstructure:"prover_mode" validate:"required,oneof=dev partial full proofless bench check-only encode-only"` @@ -165,6 +167,8 @@ type Conglomeration struct { BootstrapMetadata WithRequestDir `mapstructure:",squash"` + WithResponseDir `mapstructure:",squash"` + // ProverMode stores the kind of prover to use. ProverMode ProverMode `mapstructure:"prover_mode" validate:"required,oneof=dev partial full proofless bench check-only encode-only"` @@ -242,6 +246,8 @@ type Prometheus struct { type Execution struct { WithRequestDir `mapstructure:",squash"` + WithResponseDir `mapstructure:",squash"` + // ProverMode stores the kind of prover to use. ProverMode ProverMode `mapstructure:"prover_mode" validate:"required,oneof=dev partial full proofless bench check-only encode-only"` @@ -255,6 +261,8 @@ type Execution struct { type BlobDecompression struct { WithRequestDir `mapstructure:",squash"` + WithResponseDir `mapstructure:",squash"` + // ProverMode stores the kind of prover to use. ProverMode ProverMode `mapstructure:"prover_mode" validate:"required,oneof=dev full"` @@ -270,6 +278,8 @@ type BlobDecompression struct { type Aggregation struct { WithRequestDir `mapstructure:",squash"` + WithResponseDir `mapstructure:",squash"` + // ProverMode stores the kind of prover to use. ProverMode ProverMode `mapstructure:"prover_mode" validate:"required,oneof=dev full"` @@ -292,16 +302,20 @@ type WithRequestDir struct { RequestsRootDir []string `mapstructure:"requests_root_dir" validate:"required"` } -func (cfg *WithRequestDir) DirFrom(idx int) string { - return path.Join(cfg.RequestsRootDir[idx], RequestsFromSubDir) +type WithResponseDir struct { + ResponsesRootDir []string `mapstructure:"responses_root_dir" validate:"required"` +} + +func (cfg *WithRequestDir) DirFrom(ipIdx int) string { + return path.Join(cfg.RequestsRootDir[ipIdx], RequestsFromSubDir) } -func (cfg *WithRequestDir) DirTo(idx int) string { - return path.Join(cfg.RequestsRootDir[idx], RequestsToSubDir) +func (cfg *WithResponseDir) DirTo(opIdx int) string { + return path.Join(cfg.ResponsesRootDir[opIdx], ResponsesToSubDir) } -func (cfg *WithRequestDir) DirDone(idx int) string { - return path.Join(cfg.RequestsRootDir[idx], RequestsDoneSubDir) +func (cfg *WithRequestDir) DirDone(ipIdx int) string { + return path.Join(cfg.RequestsRootDir[ipIdx], RequestsDoneSubDir) } type PublicInput struct { diff --git a/prover/config/constants.go b/prover/config/constants.go index 8489be0d6..6b5dfb632 100644 --- a/prover/config/constants.go +++ b/prover/config/constants.go @@ -8,7 +8,7 @@ const ( DefaultDictionaryFileName = "compressor_dict.bin" RequestsFromSubDir = "requests" - RequestsToSubDir = "responses" + ResponsesToSubDir = "responses" RequestsDoneSubDir = "requests-done" InProgressSufix = "inprogress" From cf6d7a4ce60fb8c7e70e23a8323a6505d4ef69a2 Mon Sep 17 00:00:00 2001 From: srinathln7 Date: Tue, 11 Feb 2025 14:29:52 +0000 Subject: [PATCH 26/48] add limitless fs watcher --- .../controller/fs_watcher_limitless_test.go | 158 +++++++++++------- 1 file changed, 93 insertions(+), 65 deletions(-) diff --git a/prover/cmd/controller/controller/fs_watcher_limitless_test.go b/prover/cmd/controller/controller/fs_watcher_limitless_test.go index 92f4227c4..2cee9a3e7 100644 --- a/prover/cmd/controller/controller/fs_watcher_limitless_test.go +++ b/prover/cmd/controller/controller/fs_watcher_limitless_test.go @@ -1,6 +1,5 @@ package controller -/* import ( "errors" "fmt" @@ -48,6 +47,7 @@ func setupLimitlessFsTest(t *testing.T) (confM, confL *config.Config) { execGLConglomeration = "gl" execRndbeaconLPP = "rndbeacon" execLPPConglomeration = "lpp" + execConglomeration = "execution" ) // Create a configuration using temporary directories @@ -85,9 +85,17 @@ exit $CODE Version: "0.2.4", Controller: config.Controller{ - EnableExecution: false, - EnableBlobDecompression: false, - EnableAggregation: false, + // Disable legacy + EnableExecution: false, + EnableBlobDecompression: false, + EnableAggregation: false, + + // Limitless prover components + EnableExecBootstrap: true, + EnableExecGL: true, + EnableExecRndBeacon: true, + EnableExecLPP: true, + EnableExecConglomeration: true, LocalID: proverM, Prometheus: config.Prometheus{Enabled: false}, RetryDelays: []int{0, 1}, @@ -95,13 +103,6 @@ exit $CODE WorkerCmdLarge: cmdLargeInternal, DeferToOtherLargeCodes: []int{12, 137}, RetryLocallyWithLargeCodes: []int{10, 77}, - - // Limitless prover components - EnableExecBootstrap: true, - EnableExecGL: true, - EnableExecRndBeacon: true, - EnableExecLPP: true, - EnableExecConglomeration: true, }, // Limitless prover components @@ -109,36 +110,55 @@ exit $CODE WithRequestDir: config.WithRequestDir{ RequestsRootDir: []string{path.Join(testDir, proverM, execBootstrap)}, }, + WithResponseDir: config.WithResponseDir{ + ResponsesRootDir: []string{ + path.Join(testDir, proverM, execBootstrapGL), + path.Join(testDir, proverM, execBootstrapMetadata)}, + }, + }, + ExecGL: config.Execution{ + WithRequestDir: config.WithRequestDir{ + RequestsRootDir: []string{path.Join(testDir, proverM, execBootstrapGL)}, + }, + WithResponseDir: config.WithResponseDir{ + ResponsesRootDir: []string{ + path.Join(testDir, proverM, execGLRndBeacon), + path.Join(testDir, proverM, execGLConglomeration)}, + }, + }, + ExecRndBeacon: config.RndBeacon{ + GL: config.WithRequestDir{ + RequestsRootDir: []string{path.Join(testDir, proverM, execGLRndBeacon)}, // In practice, there will be `n` files + }, + BootstrapMetadata: config.WithRequestDir{ + RequestsRootDir: []string{path.Join(testDir, proverM, execBootstrapMetadata)}, + }, + WithResponseDir: config.WithResponseDir{ + ResponsesRootDir: []string{path.Join(testDir, proverM, execRndbeaconLPP)}, + }, + }, + ExecLPP: config.Execution{ + WithRequestDir: config.WithRequestDir{ + RequestsRootDir: []string{path.Join(testDir, proverM, execRndbeaconLPP)}, + }, + WithResponseDir: config.WithResponseDir{ + ResponsesRootDir: []string{path.Join(testDir, proverM, execLPPConglomeration)}, + }, + }, + ExecConglomeration: config.Conglomeration{ + GL: config.WithRequestDir{ + RequestsRootDir: []string{path.Join(testDir, proverM, execGLConglomeration)}, + }, + LPP: config.WithRequestDir{ + RequestsRootDir: []string{path.Join(testDir, proverM, execLPPConglomeration)}, + }, + BootstrapMetadata: config.WithRequestDir{ + RequestsRootDir: []string{path.Join(testDir, proverM, execBootstrapMetadata)}, + }, + WithResponseDir: config.WithResponseDir{ + ResponsesRootDir: []string{path.Join(testDir, proverM, execConglomeration)}, + }, }, - // ExecGL: config.Execution{ - // WithRequestDir: config.WithRequestDir{ - // RequestsRootDir: []string{path.Join(testDir, proverM, execBootstrapGL)}, - // }, - // }, - // ExecRndBeacon: config.RndBeacon{ - // GL: config.WithRequestDir{ - // RequestsRootDir: []string{path.Join(testDir, proverM, execGLRndBeacon)}, - // }, - // BootstrapMetadata: config.WithRequestDir{ - // RequestsRootDir: []string{path.Join(testDir, proverM, execBootstrapMetadata)}, - // }, - // }, - // ExecLPP: config.Execution{ - // WithRequestDir: config.WithRequestDir{ - // RequestsRootDir: []string{path.Join(testDir, proverM, execRndbeaconLPP)}, - // }, - // }, - // ExecConglomeration: config.Conglomeration{ - // GL: config.WithRequestDir{ - // RequestsRootDir: []string{path.Join(testDir, proverM, execGLConglomeration)}, - // }, - // LPP: config.WithRequestDir{ - // RequestsRootDir: []string{path.Join(testDir, proverM, execLPPConglomeration)}, - // }, - // BootstrapMetadata: config.WithRequestDir{ - // RequestsRootDir: []string{path.Join(testDir, proverM, execBootstrapMetadata)}, - // }, - // }, } _confL := *confM @@ -159,36 +179,46 @@ exit $CODE err := errors.Join( // Add stuff for Limitless prover + + // Bootstrap: 1 input -> 2 output os.MkdirAll(confM.ExecBootstrap.DirFrom(0), permCode), - os.MkdirAll(confM.ExecBootstrap.DirTo(0), permCode), os.MkdirAll(confM.ExecBootstrap.DirDone(0), permCode), + os.MkdirAll(confM.ExecBootstrap.DirTo(0), permCode), + os.MkdirAll(confM.ExecBootstrap.DirTo(1), permCode), + + // GL: 1 input -> 2 output + os.MkdirAll(confM.ExecGL.DirFrom(0), permCode), + os.MkdirAll(confM.ExecGL.DirDone(0), permCode), + // In practice there will be `n` files here + os.MkdirAll(confM.ExecGL.DirTo(0), permCode), + os.MkdirAll(confM.ExecGL.DirTo(1), permCode), - // os.MkdirAll(confM.ExecGL.DirFrom(), permCode), - // os.MkdirAll(confM.ExecGL.DirTo(), permCode), - // os.MkdirAll(confM.ExecGL.DirDone(), permCode), + // RndBeacon: 2 input -> 1 output + // In practice there will be `n` files here + os.MkdirAll(confM.ExecRndBeacon.GL.DirFrom(0), permCode), + os.MkdirAll(confM.ExecRndBeacon.GL.DirDone(0), permCode), + os.MkdirAll(confM.ExecRndBeacon.BootstrapMetadata.DirFrom(0), permCode), + os.MkdirAll(confM.ExecRndBeacon.BootstrapMetadata.DirDone(0), permCode), + os.MkdirAll(confM.ExecRndBeacon.DirTo(0), permCode), - // os.MkdirAll(confM.ExecRndBeacon.GL.DirFrom(), permCode), - // os.MkdirAll(confM.ExecRndBeacon.GL.DirTo(), permCode), - // os.MkdirAll(confM.ExecRndBeacon.GL.DirDone(), permCode), - // os.MkdirAll(confM.ExecRndBeacon.BootstrapMetadata.DirFrom(), permCode), - // os.MkdirAll(confM.ExecRndBeacon.BootstrapMetadata.DirTo(), permCode), - // os.MkdirAll(confM.ExecRndBeacon.BootstrapMetadata.DirDone(), permCode), + // LPP: 1 input -> 1 output + // In practice there will be `n` files + os.MkdirAll(confM.ExecLPP.DirFrom(0), permCode), + os.MkdirAll(confM.ExecLPP.DirDone(0), permCode), + os.MkdirAll(confM.ExecLPP.DirTo(0), permCode), - // os.MkdirAll(confM.ExecLPP.DirFrom(), permCode), - // os.MkdirAll(confM.ExecLPP.DirTo(), permCode), - // os.MkdirAll(confM.ExecLPP.DirDone(), permCode), + // Conglomeration: 3 input -> 1 ouput + // In practice there will be `2n+1` inputs => 1 output file + os.MkdirAll(confM.ExecConglomeration.GL.DirFrom(0), permCode), + os.MkdirAll(confM.ExecConglomeration.GL.DirDone(0), permCode), - // os.MkdirAll(confM.ExecConglomeration.GL.DirFrom(), permCode), - // os.MkdirAll(confM.ExecConglomeration.GL.DirTo(), permCode), - // os.MkdirAll(confM.ExecConglomeration.GL.DirDone(), permCode), + os.MkdirAll(confM.ExecConglomeration.LPP.DirFrom(0), permCode), + os.MkdirAll(confM.ExecConglomeration.LPP.DirDone(0), permCode), - // os.MkdirAll(confM.ExecConglomeration.LPP.DirFrom(), permCode), - // os.MkdirAll(confM.ExecConglomeration.LPP.DirTo(), permCode), - // os.MkdirAll(confM.ExecConglomeration.LPP.DirDone(), permCode), + os.MkdirAll(confM.ExecConglomeration.BootstrapMetadata.DirFrom(0), permCode), + os.MkdirAll(confM.ExecConglomeration.BootstrapMetadata.DirDone(0), permCode), - // os.MkdirAll(confM.ExecConglomeration.BootstrapMetadata.DirFrom(), permCode), - // os.MkdirAll(confM.ExecConglomeration.BootstrapMetadata.DirTo(), permCode), - // os.MkdirAll(confM.ExecConglomeration.BootstrapMetadata.DirDone(), permCode), + os.MkdirAll(confM.ExecConglomeration.DirTo(0), permCode), ) if err != nil { @@ -227,7 +257,7 @@ func createLimitlessTestInputFile( m, n := len(dirFrom), len(fmtStrArr) if m != n { - utils.Panic("number of entries in dirFrom:%d should match with the length of formated input files:%d", m, n) + utils.Panic("number of entries in dirFrom:%d should match with the number of formated input files:%d", m, n) } fnames = make([]string, len(fmtStrArr)) @@ -249,5 +279,3 @@ func createLimitlessTestInputFile( return fnames } - -*/ From 8b4c2c8aad6c1eb2eaf5a0c93def37aee318241f Mon Sep 17 00:00:00 2001 From: srinathln7 Date: Tue, 11 Feb 2025 15:22:24 +0000 Subject: [PATCH 27/48] init fs_watcher_limitess_test.go --- .../controller/fs_watcher_limitless_test.go | 59 ++++++++++++++++++- 1 file changed, 57 insertions(+), 2 deletions(-) diff --git a/prover/cmd/controller/controller/fs_watcher_limitless_test.go b/prover/cmd/controller/controller/fs_watcher_limitless_test.go index 2cee9a3e7..1c13d08e8 100644 --- a/prover/cmd/controller/controller/fs_watcher_limitless_test.go +++ b/prover/cmd/controller/controller/fs_watcher_limitless_test.go @@ -11,6 +11,7 @@ import ( "github.com/consensys/linea-monorepo/prover/config" "github.com/consensys/linea-monorepo/prover/utils" + "github.com/stretchr/testify/assert" ) const ( @@ -21,8 +22,62 @@ const ( Conglomeration ) -// TODO: Write this test func TestLimitlessProverFileWatcherL(t *testing.T) { + _, confL := setupLimitlessFsTest(t) + + exitCode := 0 // we are not interested in the exit code here + + // Create a list of files for each job type + execBootstrapFrom := []string{confL.ExecBootstrap.DirFrom(0)} + execGLFrom := []string{confL.ExecGL.DirFrom(0)} + execRndBeaconFrom := []string{ + confL.ExecRndBeacon.BootstrapMetadata.DirFrom(0), + confL.ExecRndBeacon.GL.DirFrom(0), + } + // execLPPFrom := []string{confL.ExecLPP.DirFrom(0)} + // execConglomerationFrom := []string{ + // confL.ExecConglomeration.GL.DirFrom(0), + // confL.ExecConglomeration.LPP.DirFrom(0), + // confL.ExecConglomeration.BootstrapMetadata.DirFrom(0), + // } + + // The jobs, declared in the order in which they are expected to be found + expectedFNames := []struct { + FName []string + Skip bool + }{ + { + FName: createLimitlessTestInputFiles(execBootstrapFrom, 0, 1, Bootstrap, exitCode), + }, + { + FName: createLimitlessTestInputFiles(execGLFrom, 0, 1, GL, exitCode), + }, + { + FName: createLimitlessTestInputFiles(execRndBeaconFrom, 0, 1, RndBeacon, exitCode), + }, + // { + // FName: createLimitlessTestInputFiles(execLPPFrom, 0, 1, LPP, exitCode), + // }, + // { + // FName: createLimitlessTestInputFiles(execConglomerationFrom, 0, 1, Conglomeration, exitCode), + // }, + } + + fw := NewFsWatcher(confL) + for _, f := range expectedFNames { + if f.Skip { + continue + } + t.Logf("Looking for job with file: %s", f.FName) + found := fw.GetBest() + if found == nil { + t.Logf("Did not find the job for file: %s", f.FName) + } + if assert.NotNil(t, found, "did not find the job") { + assert.Equal(t, f.FName, found.OriginalFile) + } + } + assert.Nil(t, fw.GetBest(), "the queue should be empty now") } @@ -229,7 +284,7 @@ exit $CODE } // Creates test input files with specific filenames and exit codes to simulate job files for the file system watcher. -func createLimitlessTestInputFile( +func createLimitlessTestInputFiles( dirFrom []string, start, end, jobType, exitWith int, large ...bool, From 79c5ee4176b3e1d09d75b880600287d2109a13a3 Mon Sep 17 00:00:00 2001 From: srinathln7 Date: Wed, 12 Feb 2025 14:42:21 +0000 Subject: [PATCH 28/48] commit progress --- .../cmd/controller/controller/fs_watcher.go | 98 +++++++++++-- .../controller/fs_watcher_limitless_test.go | 135 +++++++++++++----- .../controller/controller/job_definition.go | 29 +++- .../controller/job_definition_limitless.go | 15 +- .../job_definition_limitless_test.go | 4 +- prover/cmd/controller/controller/jobs.go | 9 ++ prover/config/config.go | 5 +- prover/config/constants.go | 3 + 8 files changed, 240 insertions(+), 58 deletions(-) diff --git a/prover/cmd/controller/controller/fs_watcher.go b/prover/cmd/controller/controller/fs_watcher.go index 49bfefd90..598c66e5a 100644 --- a/prover/cmd/controller/controller/fs_watcher.go +++ b/prover/cmd/controller/controller/fs_watcher.go @@ -98,6 +98,7 @@ func (fs *FsWatcher) GetBest() (job *Job) { return nil } + numsMatched := 0 for i := range fs.JobToWatch { // Don't try to pass &jdef, where jdef is a loop variable as // `for i, jdef := range f.JobToWatch {...}` @@ -105,14 +106,25 @@ func (fs *FsWatcher) GetBest() (job *Job) { // of every jobs found so far and they will all be attributed to the // last job definition. jdef := &fs.JobToWatch[i] - for j := range jdef.RequestsRootDir { - if err := fs.appendJobFromDef(jdef, &jobs, j); err != nil { + + // For multi-input jobs + if len(jdef.RequestsRootDir) > 1 { + if err := fs.appendMultiInputJobFromDef(jdef, &jobs, &numsMatched); err != nil { + fs.Logger.Errorf( + "Got an error trying to fetch job `%v`: %v", + jdef.Name, err, + ) + } + } else { + // For single input jobs + if err := fs.appendJobFromDef(jdef, &jobs, &numsMatched); err != nil { fs.Logger.Errorf( - "error trying to fetch job `%v` from dir %v: %v", - jdef.Name, jdef.dirFrom(j), err, + "Got an error trying to fetch job `%v` from dir %v: %v", + jdef.Name, jdef.dirFrom(0), err, ) } } + } if len(jobs) == 0 { @@ -150,11 +162,67 @@ func (f *FsWatcher) lockBest(jobs []*Job) (pos int, success bool) { return 0, false } -// Try appending a list of jobs that are parsed from a given directory. An error -// is returned if the function fails to read the directory. -func (fs *FsWatcher) appendJobFromDef(jdef *JobDefinition, jobs *[]*Job, ipIdx int) (err error) { +func (fs *FsWatcher) appendMultiInputJobFromDef(jdef *JobDefinition, jobs *[]*Job, numsMatched *int) (err error) { + + switch jdef.Name { + case jobExecRndBeacon: + dir1From, dir2From := jdef.dirFrom(0), jdef.dirFrom(1) + dir1Ent, err := lsname(dir1From) + if err != nil { + return err + } + dir2Ent, err := lsname(dir2From) + if err != nil { + return err + } + // Create a map to group files by their common prefix + fileMap := make(map[string][]string) + + // Populate the map with files from dir1 + for _, entry := range dir1Ent { + if !entry.IsDir() { + prefix := getCommonPrefix(entry.Name()) + fileMap[prefix] = append(fileMap[prefix], entry.Name()) + } + } + + // Populate the map with files from dir2 + for _, entry := range dir2Ent { + if !entry.IsDir() { + prefix := getCommonPrefix(entry.Name()) + fileMap[prefix] = append(fileMap[prefix], entry.Name()) + } + } + + // Convert the map to the desired output format + for _, files := range fileMap { + //inputFileNames = append(inputFileNames, files) + job, err := NewJob(jdef, files) + if err != nil { + fs.Logger.Debugf("Found invalid files `%v` : %v", files, err) + continue + } + *jobs = append(*jobs, job) + *numsMatched++ + + // Pass prometheus metrics + // metrics.CollectFS(jdef.Name, len(dirents), *numsMatched) + } + return nil + case jobExecCongolomeration: + return nil + default: + return fmt.Errorf("unsupported job type:%s", jdef.Name) + } + +} + +// Try appending a list of single-input jobs that are parsed from a given directory. +// An error is returned if the function fails to read the directory. +func (fs *FsWatcher) appendJobFromDef(jdef *JobDefinition, jobs *[]*Job, numsMatched *int) (err error) { - dirFrom := jdef.dirFrom(ipIdx) + // ASSUMED 0 index here for jobs with only single inputs + dirFrom := jdef.dirFrom(0) fs.Logger.Tracef("Seeking jobs for %v in %v", jdef.Name, dirFrom) // This will fail if the provided directory is not a directory @@ -162,7 +230,6 @@ func (fs *FsWatcher) appendJobFromDef(jdef *JobDefinition, jobs *[]*Job, ipIdx i if err != nil { return fmt.Errorf("cannot ls `%s` : %v", dirFrom, err) } - numMatched := 0 // Search and append the valid files into the list. for _, dirent := range dirents { @@ -186,11 +253,11 @@ func (fs *FsWatcher) appendJobFromDef(jdef *JobDefinition, jobs *[]*Job, ipIdx i // If all the checks passes, we append the filename to the list of the // clean ones. *jobs = append(*jobs, job) - numMatched++ + *numsMatched++ } // Pass prometheus metrics - metrics.CollectFS(jdef.Name, len(dirents), numMatched) + metrics.CollectFS(jdef.Name, len(dirents), *numsMatched) return nil } @@ -261,3 +328,12 @@ func lsname(dirname string) (finfos []fs.DirEntry, err error) { return finfos, err } + +// getCommonPrefix extracts the common prefix from a filename +func getCommonPrefix(filename string) string { + parts := strings.Split(filename, "-getZkProof") + if len(parts) > 0 { + return parts[0] + } + return "" +} diff --git a/prover/cmd/controller/controller/fs_watcher_limitless_test.go b/prover/cmd/controller/controller/fs_watcher_limitless_test.go index 1c13d08e8..dae90cb56 100644 --- a/prover/cmd/controller/controller/fs_watcher_limitless_test.go +++ b/prover/cmd/controller/controller/fs_watcher_limitless_test.go @@ -30,11 +30,15 @@ func TestLimitlessProverFileWatcherL(t *testing.T) { // Create a list of files for each job type execBootstrapFrom := []string{confL.ExecBootstrap.DirFrom(0)} execGLFrom := []string{confL.ExecGL.DirFrom(0)} + // execRndBeaconFrom := []string{ + // confL.ExecRndBeacon.BootstrapMetadata.DirFrom(0), + // confL.ExecRndBeacon.GL.DirFrom(0), + // } execRndBeaconFrom := []string{ - confL.ExecRndBeacon.BootstrapMetadata.DirFrom(0), - confL.ExecRndBeacon.GL.DirFrom(0), + confL.ExecRndBeacon.DirFrom(0), + confL.ExecRndBeacon.DirFrom(1), } - // execLPPFrom := []string{confL.ExecLPP.DirFrom(0)} + execLPPFrom := []string{confL.ExecLPP.DirFrom(0)} // execConglomerationFrom := []string{ // confL.ExecConglomeration.GL.DirFrom(0), // confL.ExecConglomeration.LPP.DirFrom(0), @@ -55,9 +59,9 @@ func TestLimitlessProverFileWatcherL(t *testing.T) { { FName: createLimitlessTestInputFiles(execRndBeaconFrom, 0, 1, RndBeacon, exitCode), }, - // { - // FName: createLimitlessTestInputFiles(execLPPFrom, 0, 1, LPP, exitCode), - // }, + { + FName: createLimitlessTestInputFiles(execLPPFrom, 0, 1, LPP, exitCode), + }, // { // FName: createLimitlessTestInputFiles(execConglomerationFrom, 0, 1, Conglomeration, exitCode), // }, @@ -70,6 +74,7 @@ func TestLimitlessProverFileWatcherL(t *testing.T) { } t.Logf("Looking for job with file: %s", f.FName) found := fw.GetBest() + t.Logf("Found job: %+v", found) if found == nil { t.Logf("Did not find the job for file: %s", f.FName) } @@ -105,35 +110,82 @@ func setupLimitlessFsTest(t *testing.T) (confM, confL *config.Config) { execConglomeration = "execution" ) + // // Create a configuration using temporary directories + // // Defines three command templates for different types of jobs. + // // These templates will be used to create shell commands for the worker processes. + // cmd := ` + // /bin/sh {{index .InFile 0}} + // CODE=$? + // if [ $CODE -eq 0 ]; then + // touch {{index .OutFile 0}} + // fi + // exit $CODE + // ` + // cmdLarge := ` + // /bin/sh {{index .InFile 0}} + // CODE=$? + // CODE=$(($CODE - 12)) + // if [ $CODE -eq 0 ]; then + // touch {{index .OutFile 0}} + // fi + // exit $CODE + // ` + + // cmdLargeInternal := ` + // /bin/sh {{index .InFile 0}} + // CODE=$? + // CODE=$(($CODE - 10)) + // if [ $CODE -eq 0 ]; then + // touch {{index .OutFile 0}} + // fi + // exit $CODE + // ` + // Create a configuration using temporary directories // Defines three command templates for different types of jobs. // These templates will be used to create shell commands for the worker processes. cmd := ` -/bin/sh {{index .InFile 0}} -CODE=$? -if [ $CODE -eq 0 ]; then - touch {{index .OutFile 0}} -fi -exit $CODE +for infile in {{range .InFile}} {{.}} {{end}}; do + /bin/sh $infile + CODE=$? + if [ $CODE -ne 0 ]; then + exit $CODE + fi +done +for outfile in {{range .OutFile}} {{.}} {{end}}; do + touch $outfile +done +exit 0 ` + cmdLarge := ` -/bin/sh {{index .InFile 0}} -CODE=$? -CODE=$(($CODE - 12)) -if [ $CODE -eq 0 ]; then - touch {{index .OutFile 0}} -fi -exit $CODE +for infile in {{range .InFile}} {{.}} {{end}}; do + /bin/sh $infile + CODE=$? + CODE=$(($CODE - 12)) + if [ $CODE -ne 0 ]; then + exit $CODE + fi +done +for outfile in {{range .OutFile}} {{.}} {{end}}; do + touch $outfile +done +exit 0 ` cmdLargeInternal := ` -/bin/sh {{index .InFile 0}} -CODE=$? -CODE=$(($CODE - 10)) -if [ $CODE -eq 0 ]; then - touch {{index .OutFile 0}} -fi -exit $CODE +for infile in {{range .InFile}} {{.}} {{end}}; do + /bin/sh $infile + CODE=$? + CODE=$(($CODE - 10)) + if [ $CODE -ne 0]; then + exit $CODE + fi +done +for outfile in {{range .OutFile}} {{.}} {{end}}; do + touch $outfile +done +exit 0 ` // For a prover M confM = &config.Config{ @@ -182,11 +234,17 @@ exit $CODE }, }, ExecRndBeacon: config.RndBeacon{ - GL: config.WithRequestDir{ - RequestsRootDir: []string{path.Join(testDir, proverM, execGLRndBeacon)}, // In practice, there will be `n` files - }, - BootstrapMetadata: config.WithRequestDir{ - RequestsRootDir: []string{path.Join(testDir, proverM, execBootstrapMetadata)}, + // GL: config.WithRequestDir{ + // RequestsRootDir: []string{path.Join(testDir, proverM, execGLRndBeacon)}, // In practice, there will be `n` files + // }, + // BootstrapMetadata: config.WithRequestDir{ + // RequestsRootDir: []string{path.Join(testDir, proverM, execBootstrapMetadata)}, + // }, + + WithRequestDir: config.WithRequestDir{ + RequestsRootDir: []string{path.Join(testDir, proverM, execBootstrapMetadata), + path.Join(testDir, proverM, execGLRndBeacon), + }, }, WithResponseDir: config.WithResponseDir{ ResponsesRootDir: []string{path.Join(testDir, proverM, execRndbeaconLPP)}, @@ -250,10 +308,15 @@ exit $CODE // RndBeacon: 2 input -> 1 output // In practice there will be `n` files here - os.MkdirAll(confM.ExecRndBeacon.GL.DirFrom(0), permCode), - os.MkdirAll(confM.ExecRndBeacon.GL.DirDone(0), permCode), - os.MkdirAll(confM.ExecRndBeacon.BootstrapMetadata.DirFrom(0), permCode), - os.MkdirAll(confM.ExecRndBeacon.BootstrapMetadata.DirDone(0), permCode), + // os.MkdirAll(confM.ExecRndBeacon.GL.DirFrom(0), permCode), + // os.MkdirAll(confM.ExecRndBeacon.GL.DirDone(0), permCode), + // os.MkdirAll(confM.ExecRndBeacon.BootstrapMetadata.DirFrom(0), permCode), + // os.MkdirAll(confM.ExecRndBeacon.BootstrapMetadata.DirDone(0), permCode), + + os.MkdirAll(confM.ExecRndBeacon.DirFrom(0), permCode), + os.MkdirAll(confM.ExecRndBeacon.DirDone(0), permCode), + os.MkdirAll(confM.ExecRndBeacon.DirFrom(1), permCode), + os.MkdirAll(confM.ExecRndBeacon.DirDone(1), permCode), os.MkdirAll(confM.ExecRndBeacon.DirTo(0), permCode), // LPP: 1 input -> 1 output @@ -299,7 +362,7 @@ func createLimitlessTestInputFiles( fmtStrArr = []string{"%v-%v-etv0.1.2-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json"} case RndBeacon: fmtStrArr = []string{"%v-%v-etv0.1.2-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json", - "%v-%v-etv0.1.2-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json"} + "%v-%v-etv0.1.2-stv1.2.3-getZkProof_GL_RndBeacon.json"} case LPP: fmtStrArr = []string{"%v-%v-etv0.1.2-stv1.2.3-getZkProof_RndBeacon.json"} case Conglomeration: diff --git a/prover/cmd/controller/controller/job_definition.go b/prover/cmd/controller/controller/job_definition.go index 09b1e0231..c6c092cb9 100644 --- a/prover/cmd/controller/controller/job_definition.go +++ b/prover/cmd/controller/controller/job_definition.go @@ -268,10 +268,37 @@ func (jd *JobDefinition) dirTo(opIdx int) string { if err := jd.isValidOutputFileIdx(opIdx); err != nil { utils.Panic("dirTo:%v", err.Error()) } - return filepath.Join(jd.ResponsesRootDir[opIdx], config.ResponsesToSubDir) } +/* + +TODO: Do it like this to avoid confusion +func (jd *JobDefinition) dirFrom() (dirs []string) { + dirs = make([]string, len(jd.RequestsRootDir)) + for ipIdx := 0; ipIdx < len(jd.RequestsRootDir); ipIdx++ { + dirs[ipIdx] = filepath.Join(jd.RequestsRootDir[ipIdx], config.RequestsFromSubDir) + } + return dirs +} + +func (jd *JobDefinition) dirDone() []string { + dirs := make([]string, len(jd.RequestsRootDir)) + for ipIdx := 0; ipIdx < len(jd.RequestsRootDir); ipIdx++ { + dirs[ipIdx] = filepath.Join(jd.RequestsRootDir[ipIdx], config.RequestsDoneSubDir) + } + return dirs +} + +func (jd *JobDefinition) dirTo() []string { + dirs := make([]string, len(jd.ResponsesRootDir)) + for opIdx := 0; opIdx < len(jd.ResponsesRootDir); opIdx++ { + dirs[opIdx] = filepath.Join(jd.ResponsesRootDir[opIdx], config.ResponsesToSubDir) + } + return dirs +} +*/ + func cmnExecParamsRegexp(nInputs int) []ParamsRegexp { paramsRegexp := make([]ParamsRegexp, nInputs) for i := 0; i < nInputs; i++ { diff --git a/prover/cmd/controller/controller/job_definition_limitless.go b/prover/cmd/controller/controller/job_definition_limitless.go index d39177978..20952e21c 100644 --- a/prover/cmd/controller/controller/job_definition_limitless.go +++ b/prover/cmd/controller/controller/job_definition_limitless.go @@ -9,11 +9,11 @@ import ( const ( // Job definitions - jobExecBootstrap = "execBootstrap" - jobExecGL = "execGL" - jobExecRndBeacon = "execRndbeacon" - jobExecLPP = "execLPP" - jobExecCongolomerateLPP = "execConglomeration" + jobExecBootstrap = "execBootstrap" + jobExecGL = "execGL" + jobExecRndBeacon = "execRndbeacon" + jobExecLPP = "execLPP" + jobExecCongolomeration = "execConglomeration" // Priorities priorityExecBootstrap = 0 @@ -94,7 +94,8 @@ func ExecRndBeaconDefinition(conf *config.Config) (*JobDefinition, error) { } // Input files - reqDirs := utils.CombineRequests(conf.ExecRndBeacon.BootstrapMetadata.RequestsRootDir, conf.ExecRndBeacon.GL.RequestsRootDir) + // reqDirs := utils.CombineRequests(conf.ExecRndBeacon.BootstrapMetadata.RequestsRootDir, conf.ExecRndBeacon.GL.RequestsRootDir) + reqDirs := conf.ExecRndBeacon.RequestsRootDir inputFilePatterns := []string{ fmt.Sprintf(execBootstrapRndBeaconInputPattern, inpFileExt, config.FailSuffix), fmt.Sprintf(execGLRndBeaconInputPattern, inpFileExt, config.FailSuffix), @@ -147,6 +148,6 @@ func ExecConglomerationDefinition(conf *config.Config) (*JobDefinition, error) { outputTmpls := []string{execConglomerateTemplate} outputFiles := []string{execConglomerateFile} - return commonJobDefinition(jobExecCongolomerateLPP, priorityExecCongolomeration, reqDirs, inputFilePatterns, + return commonJobDefinition(jobExecCongolomeration, priorityExecCongolomeration, reqDirs, inputFilePatterns, respDirs, outputTmpls, outputFiles, cmnExecParamsRegexp(3), config.FailSuffix) } diff --git a/prover/cmd/controller/controller/job_definition_limitless_test.go b/prover/cmd/controller/controller/job_definition_limitless_test.go index b6e92a168..44af94005 100644 --- a/prover/cmd/controller/controller/job_definition_limitless_test.go +++ b/prover/cmd/controller/controller/job_definition_limitless_test.go @@ -457,7 +457,9 @@ func TestExecRndBeaconInFileRegexp(t *testing.T) { conf := config.Config{} conf.Version = "0.1.2" conf.ExecRndBeacon.CanRunFullLarge = c.Ext == "large" - conf.ExecRndBeacon.GL.RequestsRootDir = []string{"", ""} + // conf.ExecRndBeacon.GL.RequestsRootDir = []string{"", ""} + + conf.ExecRndBeacon.RequestsRootDir = []string{"", ""} conf.ExecRndBeacon.ResponsesRootDir = []string{""} def, err := ExecRndBeaconDefinition(&conf) diff --git a/prover/cmd/controller/controller/jobs.go b/prover/cmd/controller/controller/jobs.go index 379c57c80..4e0140ea2 100644 --- a/prover/cmd/controller/controller/jobs.go +++ b/prover/cmd/controller/controller/jobs.go @@ -101,6 +101,15 @@ func (j *Job) InProgressPath(ipIdx int) string { return filepath.Join(j.Def.dirFrom(ipIdx), j.LockedFile[ipIdx]) } +// func (j *Job) InProgressPath() []string { +// dirs := j.Def.dirFrom() +// inProgressPaths := make([]string, len(dirs)) +// for ipIdx := 0; ipIdx < len(inProgressPaths); ipIdx++ { +// inProgressPaths[ipIdx] = filepath.Join(dirs[ipIdx], j.LockedFile[ipIdx]) +// } +// return inProgressPaths +// } + // Returns the name of the output file for the job at the specified index func (j *Job) ResponseFile(opIdx int) (s string, err error) { diff --git a/prover/config/config.go b/prover/config/config.go index f9abcf32b..2a1a97c83 100644 --- a/prover/config/config.go +++ b/prover/config/config.go @@ -147,10 +147,11 @@ type Config struct { } type RndBeacon struct { - GL WithRequestDir `mapstructure:",squash"` + // GL WithRequestDir `mapstructure:",squash"` - BootstrapMetadata WithRequestDir `mapstructure:",squash"` + // BootstrapMetadata WithRequestDir `mapstructure:",squash"` + WithRequestDir `mapstructure:",squash"` WithResponseDir `mapstructure:",squash"` // ProverMode stores the kind of prover to use. diff --git a/prover/config/constants.go b/prover/config/constants.go index 6b5dfb632..4578658ba 100644 --- a/prover/config/constants.go +++ b/prover/config/constants.go @@ -17,4 +17,7 @@ const ( // Extension to add in order to defer the job to the large prover LargeSuffix = "large" + + // Limitless prover + NoOfSegments = 1 ) From 7e2b337cf8a312fb6d323a10fbe07a15eb994cfe Mon Sep 17 00:00:00 2001 From: srinathln7 Date: Wed, 12 Feb 2025 15:00:24 +0000 Subject: [PATCH 29/48] refactor multi-input job --- .../cmd/controller/controller/fs_watcher.go | 85 +++++++++---------- .../controller/fs_watcher_limitless_test.go | 2 +- 2 files changed, 39 insertions(+), 48 deletions(-) diff --git a/prover/cmd/controller/controller/fs_watcher.go b/prover/cmd/controller/controller/fs_watcher.go index 598c66e5a..425805e4d 100644 --- a/prover/cmd/controller/controller/fs_watcher.go +++ b/prover/cmd/controller/controller/fs_watcher.go @@ -163,58 +163,14 @@ func (f *FsWatcher) lockBest(jobs []*Job) (pos int, success bool) { } func (fs *FsWatcher) appendMultiInputJobFromDef(jdef *JobDefinition, jobs *[]*Job, numsMatched *int) (err error) { - switch jdef.Name { case jobExecRndBeacon: - dir1From, dir2From := jdef.dirFrom(0), jdef.dirFrom(1) - dir1Ent, err := lsname(dir1From) - if err != nil { - return err - } - dir2Ent, err := lsname(dir2From) - if err != nil { - return err - } - // Create a map to group files by their common prefix - fileMap := make(map[string][]string) - - // Populate the map with files from dir1 - for _, entry := range dir1Ent { - if !entry.IsDir() { - prefix := getCommonPrefix(entry.Name()) - fileMap[prefix] = append(fileMap[prefix], entry.Name()) - } - } - - // Populate the map with files from dir2 - for _, entry := range dir2Ent { - if !entry.IsDir() { - prefix := getCommonPrefix(entry.Name()) - fileMap[prefix] = append(fileMap[prefix], entry.Name()) - } - } - - // Convert the map to the desired output format - for _, files := range fileMap { - //inputFileNames = append(inputFileNames, files) - job, err := NewJob(jdef, files) - if err != nil { - fs.Logger.Debugf("Found invalid files `%v` : %v", files, err) - continue - } - *jobs = append(*jobs, job) - *numsMatched++ - - // Pass prometheus metrics - // metrics.CollectFS(jdef.Name, len(dirents), *numsMatched) - } - return nil + return fs.processDirectories(jdef, jobs, numsMatched, 2) case jobExecCongolomeration: - return nil + return fs.processDirectories(jdef, jobs, numsMatched, 3) default: - return fmt.Errorf("unsupported job type:%s", jdef.Name) + return fmt.Errorf("unsupported multi-input job type:%s", jdef.Name) } - } // Try appending a list of single-input jobs that are parsed from a given directory. @@ -329,6 +285,41 @@ func lsname(dirname string) (finfos []fs.DirEntry, err error) { return finfos, err } +func (fs *FsWatcher) processDirectories(jdef *JobDefinition, jobs *[]*Job, numsMatched *int, numDirs int) error { + // Create a map to group files by their common prefix + fileMap := make(map[string][]string) + + // Read and populate the map with files from each directory + for i := 0; i < numDirs; i++ { + dirFrom := jdef.dirFrom(i) + dirEnt, err := lsname(dirFrom) + if err != nil { + return err + } + for _, entry := range dirEnt { + if !entry.IsDir() { + prefix := getCommonPrefix(entry.Name()) + fileMap[prefix] = append(fileMap[prefix], entry.Name()) + } + } + } + + // Convert the map to the desired output format + for _, files := range fileMap { + job, err := NewJob(jdef, files) + if err != nil { + fs.Logger.Debugf("Found invalid files `%v` : %v", files, err) + continue + } + *jobs = append(*jobs, job) + *numsMatched++ + + // Pass prometheus metrics + // metrics.CollectFS(jdef.Name, len(dirents), *numsMatched) + } + return nil +} + // getCommonPrefix extracts the common prefix from a filename func getCommonPrefix(filename string) string { parts := strings.Split(filename, "-getZkProof") diff --git a/prover/cmd/controller/controller/fs_watcher_limitless_test.go b/prover/cmd/controller/controller/fs_watcher_limitless_test.go index dae90cb56..846387d54 100644 --- a/prover/cmd/controller/controller/fs_watcher_limitless_test.go +++ b/prover/cmd/controller/controller/fs_watcher_limitless_test.go @@ -367,7 +367,7 @@ func createLimitlessTestInputFiles( fmtStrArr = []string{"%v-%v-etv0.1.2-stv1.2.3-getZkProof_RndBeacon.json"} case Conglomeration: fmtStrArr = []string{"%v-%v-etv0.1.2-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json", - "%v-%v-etv0.1.2-stv1.2.3-getZkProof_GLjson", + "%v-%v-etv0.1.2-stv1.2.3-getZkProof_GL.json", "%v-%v-etv0.1.2-stv1.2.3-getZkProof_LPP.json"} default: panic("incorrect job type") From ba67fb254ab9acbbf3b24541dfe8d2fa72b6d614 Mon Sep 17 00:00:00 2001 From: srinathln7 Date: Wed, 12 Feb 2025 16:36:20 +0000 Subject: [PATCH 30/48] conglomeration file watcher to be fixed --- .../controller/fs_watcher_limitless_test.go | 104 +++++++++--------- .../controller/job_definition_limitless.go | 4 +- .../job_definition_limitless_test.go | 7 +- prover/config/config.go | 25 +---- 4 files changed, 63 insertions(+), 77 deletions(-) diff --git a/prover/cmd/controller/controller/fs_watcher_limitless_test.go b/prover/cmd/controller/controller/fs_watcher_limitless_test.go index 846387d54..d6f5e1bc7 100644 --- a/prover/cmd/controller/controller/fs_watcher_limitless_test.go +++ b/prover/cmd/controller/controller/fs_watcher_limitless_test.go @@ -28,21 +28,22 @@ func TestLimitlessProverFileWatcherL(t *testing.T) { exitCode := 0 // we are not interested in the exit code here // Create a list of files for each job type - execBootstrapFrom := []string{confL.ExecBootstrap.DirFrom(0)} - execGLFrom := []string{confL.ExecGL.DirFrom(0)} + // execBootstrapFrom := []string{confL.ExecBootstrap.DirFrom(0)} + // execGLFrom := []string{confL.ExecGL.DirFrom(0)} // execRndBeaconFrom := []string{ - // confL.ExecRndBeacon.BootstrapMetadata.DirFrom(0), - // confL.ExecRndBeacon.GL.DirFrom(0), + // confL.ExecRndBeacon.DirFrom(0), + // confL.ExecRndBeacon.DirFrom(1), // } - execRndBeaconFrom := []string{ - confL.ExecRndBeacon.DirFrom(0), - confL.ExecRndBeacon.DirFrom(1), + // execLPPFrom := []string{confL.ExecLPP.DirFrom(0)} + execConglomerationFrom := []string{ + confL.ExecConglomeration.DirFrom(0), + confL.ExecConglomeration.DirFrom(0), + confL.ExecConglomeration.DirFrom(0), } - execLPPFrom := []string{confL.ExecLPP.DirFrom(0)} - // execConglomerationFrom := []string{ - // confL.ExecConglomeration.GL.DirFrom(0), - // confL.ExecConglomeration.LPP.DirFrom(0), - // confL.ExecConglomeration.BootstrapMetadata.DirFrom(0), + + // execRndBeaconFrom := []string{ + // confL.ExecRndBeacon.BootstrapMetadata.DirFrom(0), + // confL.ExecRndBeacon.GL.DirFrom(0), // } // The jobs, declared in the order in which they are expected to be found @@ -50,21 +51,21 @@ func TestLimitlessProverFileWatcherL(t *testing.T) { FName []string Skip bool }{ - { - FName: createLimitlessTestInputFiles(execBootstrapFrom, 0, 1, Bootstrap, exitCode), - }, - { - FName: createLimitlessTestInputFiles(execGLFrom, 0, 1, GL, exitCode), - }, - { - FName: createLimitlessTestInputFiles(execRndBeaconFrom, 0, 1, RndBeacon, exitCode), - }, - { - FName: createLimitlessTestInputFiles(execLPPFrom, 0, 1, LPP, exitCode), - }, // { - // FName: createLimitlessTestInputFiles(execConglomerationFrom, 0, 1, Conglomeration, exitCode), + // FName: createLimitlessTestInputFiles(execBootstrapFrom, 0, 1, Bootstrap, exitCode), + // }, + // { + // FName: createLimitlessTestInputFiles(execGLFrom, 0, 1, GL, exitCode), // }, + // { + // FName: createLimitlessTestInputFiles(execRndBeaconFrom, 0, 1, RndBeacon, exitCode), + // }, + // { + // FName: createLimitlessTestInputFiles(execLPPFrom, 0, 1, LPP, exitCode), + // }, + { + FName: createLimitlessTestInputFiles(execConglomerationFrom, 0, 1, Conglomeration, exitCode), + }, } fw := NewFsWatcher(confL) @@ -198,10 +199,10 @@ exit 0 EnableAggregation: false, // Limitless prover components - EnableExecBootstrap: true, - EnableExecGL: true, - EnableExecRndBeacon: true, - EnableExecLPP: true, + EnableExecBootstrap: false, + EnableExecGL: false, + EnableExecRndBeacon: false, + EnableExecLPP: false, EnableExecConglomeration: true, LocalID: proverM, Prometheus: config.Prometheus{Enabled: false}, @@ -233,14 +234,7 @@ exit 0 path.Join(testDir, proverM, execGLConglomeration)}, }, }, - ExecRndBeacon: config.RndBeacon{ - // GL: config.WithRequestDir{ - // RequestsRootDir: []string{path.Join(testDir, proverM, execGLRndBeacon)}, // In practice, there will be `n` files - // }, - // BootstrapMetadata: config.WithRequestDir{ - // RequestsRootDir: []string{path.Join(testDir, proverM, execBootstrapMetadata)}, - // }, - + ExecRndBeacon: config.Execution{ WithRequestDir: config.WithRequestDir{ RequestsRootDir: []string{path.Join(testDir, proverM, execBootstrapMetadata), path.Join(testDir, proverM, execGLRndBeacon), @@ -258,15 +252,12 @@ exit 0 ResponsesRootDir: []string{path.Join(testDir, proverM, execLPPConglomeration)}, }, }, - ExecConglomeration: config.Conglomeration{ - GL: config.WithRequestDir{ - RequestsRootDir: []string{path.Join(testDir, proverM, execGLConglomeration)}, - }, - LPP: config.WithRequestDir{ - RequestsRootDir: []string{path.Join(testDir, proverM, execLPPConglomeration)}, - }, - BootstrapMetadata: config.WithRequestDir{ - RequestsRootDir: []string{path.Join(testDir, proverM, execBootstrapMetadata)}, + ExecConglomeration: config.Execution{ + WithRequestDir: config.WithRequestDir{ + RequestsRootDir: []string{path.Join(testDir, proverM, execBootstrapMetadata), + path.Join(testDir, proverM, execGLConglomeration), + path.Join(testDir, proverM, execLPPConglomeration), + }, }, WithResponseDir: config.WithResponseDir{ ResponsesRootDir: []string{path.Join(testDir, proverM, execConglomeration)}, @@ -327,14 +318,23 @@ exit 0 // Conglomeration: 3 input -> 1 ouput // In practice there will be `2n+1` inputs => 1 output file - os.MkdirAll(confM.ExecConglomeration.GL.DirFrom(0), permCode), - os.MkdirAll(confM.ExecConglomeration.GL.DirDone(0), permCode), + // os.MkdirAll(confM.ExecConglomeration.BootstrapMetadata.DirFrom(0), permCode), + // os.MkdirAll(confM.ExecConglomeration.BootstrapMetadata.DirDone(0), permCode), + + // os.MkdirAll(confM.ExecConglomeration.GL.DirFrom(0), permCode), + // os.MkdirAll(confM.ExecConglomeration.GL.DirDone(0), permCode), + + // os.MkdirAll(confM.ExecConglomeration.LPP.DirFrom(0), permCode), + // os.MkdirAll(confM.ExecConglomeration.LPP.DirDone(0), permCode), + + os.MkdirAll(confM.ExecConglomeration.DirFrom(0), permCode), + os.MkdirAll(confM.ExecConglomeration.DirDone(0), permCode), - os.MkdirAll(confM.ExecConglomeration.LPP.DirFrom(0), permCode), - os.MkdirAll(confM.ExecConglomeration.LPP.DirDone(0), permCode), + os.MkdirAll(confM.ExecConglomeration.DirFrom(1), permCode), + os.MkdirAll(confM.ExecConglomeration.DirDone(1), permCode), - os.MkdirAll(confM.ExecConglomeration.BootstrapMetadata.DirFrom(0), permCode), - os.MkdirAll(confM.ExecConglomeration.BootstrapMetadata.DirDone(0), permCode), + os.MkdirAll(confM.ExecConglomeration.DirFrom(2), permCode), + os.MkdirAll(confM.ExecConglomeration.DirDone(2), permCode), os.MkdirAll(confM.ExecConglomeration.DirTo(0), permCode), ) diff --git a/prover/cmd/controller/controller/job_definition_limitless.go b/prover/cmd/controller/controller/job_definition_limitless.go index 20952e21c..9036d614c 100644 --- a/prover/cmd/controller/controller/job_definition_limitless.go +++ b/prover/cmd/controller/controller/job_definition_limitless.go @@ -4,7 +4,6 @@ import ( "fmt" "github.com/consensys/linea-monorepo/prover/config" - "github.com/consensys/linea-monorepo/prover/utils" ) const ( @@ -136,7 +135,8 @@ func ExecConglomerationDefinition(conf *config.Config) (*JobDefinition, error) { } // Input files - reqDirs := utils.CombineRequests(conf.ExecConglomeration.BootstrapMetadata.RequestsRootDir, conf.ExecConglomeration.GL.RequestsRootDir, conf.ExecConglomeration.LPP.RequestsRootDir) + //reqDirs := utils.CombineRequests(conf.ExecConglomeration.BootstrapMetadata.RequestsRootDir, conf.ExecConglomeration.GL.RequestsRootDir, conf.ExecConglomeration.LPP.RequestsRootDir) + reqDirs := conf.ExecConglomeration.RequestsRootDir inputFilePatterns := []string{ fmt.Sprintf(execConglomerateBootstrapDistMetadataPattern, inpFileExt, config.FailSuffix), fmt.Sprintf(execConglomerateGLInputPattern, inpFileExt, config.FailSuffix), diff --git a/prover/cmd/controller/controller/job_definition_limitless_test.go b/prover/cmd/controller/controller/job_definition_limitless_test.go index 44af94005..6d470dea9 100644 --- a/prover/cmd/controller/controller/job_definition_limitless_test.go +++ b/prover/cmd/controller/controller/job_definition_limitless_test.go @@ -821,9 +821,10 @@ func TestExecConglomerationInFileRegexp(t *testing.T) { conf := config.Config{} conf.Version = "0.1.2" conf.ExecConglomeration.CanRunFullLarge = c.Ext == "large" - conf.ExecConglomeration.GL.RequestsRootDir = []string{""} - conf.ExecConglomeration.LPP.RequestsRootDir = []string{""} - conf.ExecConglomeration.BootstrapMetadata.RequestsRootDir = []string{""} + // conf.ExecConglomeration.GL.RequestsRootDir = []string{""} + // conf.ExecConglomeration.LPP.RequestsRootDir = []string{""} + // conf.ExecConglomeration.BootstrapMetadata.RequestsRootDir = []string{""} + conf.ExecConglomeration.RequestsRootDir = []string{"", "", ""} conf.ExecConglomeration.ResponsesRootDir = []string{""} def, err := ExecConglomerationDefinition(&conf) diff --git a/prover/config/config.go b/prover/config/config.go index 2a1a97c83..0e047ba12 100644 --- a/prover/config/config.go +++ b/prover/config/config.go @@ -112,11 +112,11 @@ type Config struct { PublicInputInterconnection PublicInput `mapstructure:"public_input_interconnection"` // TODO add wizard compilation params // LIMITLESS PROVER Components - ExecBootstrap Execution `mapstructure:"execution_bootstrap"` - ExecGL Execution `mapstructure:"execution_gl"` - ExecLPP Execution `mapstructure:"execution_lpp"` - ExecRndBeacon RndBeacon `mapstructure:"execution_rndbeacon"` - ExecConglomeration Conglomeration `mapstructure:"execution_conglomeration"` + ExecBootstrap Execution `mapstructure:"execution_bootstrap"` + ExecGL Execution `mapstructure:"execution_gl"` + ExecLPP Execution `mapstructure:"execution_lpp"` + ExecRndBeacon Execution `mapstructure:"execution_rndbeacon"` + ExecConglomeration Execution `mapstructure:"execution_conglomeration"` Debug struct { // Profiling indicates whether we want to generate profiles using the [runtime/pprof] pkg. @@ -146,21 +146,6 @@ type Config struct { TracesLimitsLarge TracesLimits `mapstructure:"traces_limits_large" validate:"required"` } -type RndBeacon struct { - // GL WithRequestDir `mapstructure:",squash"` - - // BootstrapMetadata WithRequestDir `mapstructure:",squash"` - - WithRequestDir `mapstructure:",squash"` - WithResponseDir `mapstructure:",squash"` - - // ProverMode stores the kind of prover to use. - ProverMode ProverMode `mapstructure:"prover_mode" validate:"required,oneof=dev partial full proofless bench check-only encode-only"` - - // CanRunFullLarge indicates whether the prover is running on a large machine (and can run full large traces). - CanRunFullLarge bool `mapstructure:"can_run_full_large"` -} - type Conglomeration struct { GL WithRequestDir `mapstructure:",squash"` From 761430c41e84f3335a666f4d4d526884df7f6b76 Mon Sep 17 00:00:00 2001 From: srinathln7 Date: Wed, 12 Feb 2025 19:58:47 +0000 Subject: [PATCH 31/48] conglomeration individual pass --- .../controller/fs_watcher_limitless_test.go | 43 +++++++------------ 1 file changed, 15 insertions(+), 28 deletions(-) diff --git a/prover/cmd/controller/controller/fs_watcher_limitless_test.go b/prover/cmd/controller/controller/fs_watcher_limitless_test.go index d6f5e1bc7..427c5bc5a 100644 --- a/prover/cmd/controller/controller/fs_watcher_limitless_test.go +++ b/prover/cmd/controller/controller/fs_watcher_limitless_test.go @@ -30,22 +30,17 @@ func TestLimitlessProverFileWatcherL(t *testing.T) { // Create a list of files for each job type // execBootstrapFrom := []string{confL.ExecBootstrap.DirFrom(0)} // execGLFrom := []string{confL.ExecGL.DirFrom(0)} - // execRndBeaconFrom := []string{ - // confL.ExecRndBeacon.DirFrom(0), - // confL.ExecRndBeacon.DirFrom(1), - // } - // execLPPFrom := []string{confL.ExecLPP.DirFrom(0)} + execRndBeaconFrom := []string{ + confL.ExecRndBeacon.DirFrom(0), + confL.ExecRndBeacon.DirFrom(1), + } + //execLPPFrom := []string{confL.ExecLPP.DirFrom(0)} execConglomerationFrom := []string{ confL.ExecConglomeration.DirFrom(0), - confL.ExecConglomeration.DirFrom(0), - confL.ExecConglomeration.DirFrom(0), + confL.ExecConglomeration.DirFrom(1), + confL.ExecConglomeration.DirFrom(2), } - // execRndBeaconFrom := []string{ - // confL.ExecRndBeacon.BootstrapMetadata.DirFrom(0), - // confL.ExecRndBeacon.GL.DirFrom(0), - // } - // The jobs, declared in the order in which they are expected to be found expectedFNames := []struct { FName []string @@ -57,9 +52,9 @@ func TestLimitlessProverFileWatcherL(t *testing.T) { // { // FName: createLimitlessTestInputFiles(execGLFrom, 0, 1, GL, exitCode), // }, - // { - // FName: createLimitlessTestInputFiles(execRndBeaconFrom, 0, 1, RndBeacon, exitCode), - // }, + { + FName: createLimitlessTestInputFiles(execRndBeaconFrom, 0, 1, RndBeacon, exitCode), + }, // { // FName: createLimitlessTestInputFiles(execLPPFrom, 0, 1, LPP, exitCode), // }, @@ -108,7 +103,7 @@ func setupLimitlessFsTest(t *testing.T) (confM, confL *config.Config) { execGLConglomeration = "gl" execRndbeaconLPP = "rndbeacon" execLPPConglomeration = "lpp" - execConglomeration = "execution" + execConglomeration = "executionOutput" ) // // Create a configuration using temporary directories @@ -199,10 +194,10 @@ exit 0 EnableAggregation: false, // Limitless prover components - EnableExecBootstrap: false, - EnableExecGL: false, - EnableExecRndBeacon: false, - EnableExecLPP: false, + EnableExecBootstrap: true, + EnableExecGL: true, + EnableExecRndBeacon: true, + EnableExecLPP: true, EnableExecConglomeration: true, LocalID: proverM, Prometheus: config.Prometheus{Enabled: false}, @@ -299,11 +294,6 @@ exit 0 // RndBeacon: 2 input -> 1 output // In practice there will be `n` files here - // os.MkdirAll(confM.ExecRndBeacon.GL.DirFrom(0), permCode), - // os.MkdirAll(confM.ExecRndBeacon.GL.DirDone(0), permCode), - // os.MkdirAll(confM.ExecRndBeacon.BootstrapMetadata.DirFrom(0), permCode), - // os.MkdirAll(confM.ExecRndBeacon.BootstrapMetadata.DirDone(0), permCode), - os.MkdirAll(confM.ExecRndBeacon.DirFrom(0), permCode), os.MkdirAll(confM.ExecRndBeacon.DirDone(0), permCode), os.MkdirAll(confM.ExecRndBeacon.DirFrom(1), permCode), @@ -329,13 +319,10 @@ exit 0 os.MkdirAll(confM.ExecConglomeration.DirFrom(0), permCode), os.MkdirAll(confM.ExecConglomeration.DirDone(0), permCode), - os.MkdirAll(confM.ExecConglomeration.DirFrom(1), permCode), os.MkdirAll(confM.ExecConglomeration.DirDone(1), permCode), - os.MkdirAll(confM.ExecConglomeration.DirFrom(2), permCode), os.MkdirAll(confM.ExecConglomeration.DirDone(2), permCode), - os.MkdirAll(confM.ExecConglomeration.DirTo(0), permCode), ) From d1419fcec79e1848b250745ea644876c683edb85 Mon Sep 17 00:00:00 2001 From: srinathln7 Date: Wed, 12 Feb 2025 20:19:56 +0000 Subject: [PATCH 32/48] limitless file system watcher all tests pass --- .../controller/fs_watcher_limitless_test.go | 185 +++++++++--------- 1 file changed, 95 insertions(+), 90 deletions(-) diff --git a/prover/cmd/controller/controller/fs_watcher_limitless_test.go b/prover/cmd/controller/controller/fs_watcher_limitless_test.go index 427c5bc5a..e3979777e 100644 --- a/prover/cmd/controller/controller/fs_watcher_limitless_test.go +++ b/prover/cmd/controller/controller/fs_watcher_limitless_test.go @@ -28,13 +28,13 @@ func TestLimitlessProverFileWatcherL(t *testing.T) { exitCode := 0 // we are not interested in the exit code here // Create a list of files for each job type - // execBootstrapFrom := []string{confL.ExecBootstrap.DirFrom(0)} - // execGLFrom := []string{confL.ExecGL.DirFrom(0)} + execBootstrapFrom := []string{confL.ExecBootstrap.DirFrom(0)} + execGLFrom := []string{confL.ExecGL.DirFrom(0)} execRndBeaconFrom := []string{ confL.ExecRndBeacon.DirFrom(0), confL.ExecRndBeacon.DirFrom(1), } - //execLPPFrom := []string{confL.ExecLPP.DirFrom(0)} + execLPPFrom := []string{confL.ExecLPP.DirFrom(0)} execConglomerationFrom := []string{ confL.ExecConglomeration.DirFrom(0), confL.ExecConglomeration.DirFrom(1), @@ -46,18 +46,18 @@ func TestLimitlessProverFileWatcherL(t *testing.T) { FName []string Skip bool }{ - // { - // FName: createLimitlessTestInputFiles(execBootstrapFrom, 0, 1, Bootstrap, exitCode), - // }, - // { - // FName: createLimitlessTestInputFiles(execGLFrom, 0, 1, GL, exitCode), - // }, + { + FName: createLimitlessTestInputFiles(execBootstrapFrom, 0, 1, Bootstrap, exitCode), + }, + { + FName: createLimitlessTestInputFiles(execGLFrom, 0, 1, GL, exitCode), + }, { FName: createLimitlessTestInputFiles(execRndBeaconFrom, 0, 1, RndBeacon, exitCode), }, - // { - // FName: createLimitlessTestInputFiles(execLPPFrom, 0, 1, LPP, exitCode), - // }, + { + FName: createLimitlessTestInputFiles(execLPPFrom, 0, 1, LPP, exitCode), + }, { FName: createLimitlessTestInputFiles(execConglomerationFrom, 0, 1, Conglomeration, exitCode), }, @@ -96,93 +96,98 @@ func setupLimitlessFsTest(t *testing.T) (confM, confL *config.Config) { proverL = "prover-full-L" // Add conf. for Limitless prover: Naming convention: exec - execBootstrap = "execution" - execBootstrapGL = "bootstrapGl" - execBootstrapMetadata = "bootstrapMetadata" - execGLRndBeacon = "gl-rndbeacon" - execGLConglomeration = "gl" - execRndbeaconLPP = "rndbeacon" - execLPPConglomeration = "lpp" - execConglomeration = "executionOutput" + execBootstrap = "execution" + execBootstrapGL = "bootstrap-gl" + execBootstrapMetadata = "bootstrap-metadata" + execBootstrapMetadataRndBeacon = "bootstrap-metadata-rndbeacon" + execGLRndBeacon = "gl-rndbeacon" + execGLConglomeration = "gl" + execRndbeaconLPP = "rndbeacon" + execLPPConglomeration = "lpp" + execBootstrapMetadataConglomeration = "bootstrap-metadata-conglomeration" + execConglomeration = "execution" ) - // // Create a configuration using temporary directories - // // Defines three command templates for different types of jobs. - // // These templates will be used to create shell commands for the worker processes. - // cmd := ` - // /bin/sh {{index .InFile 0}} - // CODE=$? - // if [ $CODE -eq 0 ]; then - // touch {{index .OutFile 0}} - // fi - // exit $CODE - // ` - // cmdLarge := ` - // /bin/sh {{index .InFile 0}} - // CODE=$? - // CODE=$(($CODE - 12)) - // if [ $CODE -eq 0 ]; then - // touch {{index .OutFile 0}} - // fi - // exit $CODE - // ` - - // cmdLargeInternal := ` - // /bin/sh {{index .InFile 0}} - // CODE=$? - // CODE=$(($CODE - 10)) - // if [ $CODE -eq 0 ]; then - // touch {{index .OutFile 0}} - // fi - // exit $CODE - // ` + /* + // Create a configuration using temporary directories + // Defines three command templates for different types of jobs. + // These templates will be used to create shell commands for the worker processes. + cmd := ` + /bin/sh {{index .InFile 0}} + CODE=$? + if [ $CODE -eq 0 ]; then + touch {{index .OutFile 0}} + fi + exit $CODE + ` + cmdLarge := ` + /bin/sh {{index .InFile 0}} + CODE=$? + CODE=$(($CODE - 12)) + if [ $CODE -eq 0 ]; then + touch {{index .OutFile 0}} + fi + exit $CODE + ` + + cmdLargeInternal := ` + /bin/sh {{index .InFile 0}} + CODE=$? + CODE=$(($CODE - 10)) + if [ $CODE -eq 0 ]; then + touch {{index .OutFile 0}} + fi + exit $CODE + ` + */ // Create a configuration using temporary directories // Defines three command templates for different types of jobs. // These templates will be used to create shell commands for the worker processes. cmd := ` -for infile in {{range .InFile}} {{.}} {{end}}; do - /bin/sh $infile - CODE=$? - if [ $CODE -ne 0 ]; then - exit $CODE - fi -done -for outfile in {{range .OutFile}} {{.}} {{end}}; do - touch $outfile -done -exit 0 -` + for infile in {{range .InFile}} {{.}} {{end}}; do + /bin/sh $infile + CODE=$? + if [ $CODE -ne 0 ]; then + exit $CODE + fi + done + for outfile in {{range .OutFile}} {{.}} {{end}}; do + touch $outfile + done + exit 0 + ` cmdLarge := ` -for infile in {{range .InFile}} {{.}} {{end}}; do - /bin/sh $infile - CODE=$? - CODE=$(($CODE - 12)) - if [ $CODE -ne 0 ]; then - exit $CODE - fi -done -for outfile in {{range .OutFile}} {{.}} {{end}}; do - touch $outfile -done -exit 0 -` + for infile in {{range .InFile}} {{.}} {{end}}; do + /bin/sh $infile + CODE=$? + CODE=$(($CODE - 12)) + if [ $CODE -ne 0 ]; then + exit $CODE + fi + done + for outfile in {{range .OutFile}} {{.}} {{end}}; do + touch $outfile + done + exit 0 + ` cmdLargeInternal := ` -for infile in {{range .InFile}} {{.}} {{end}}; do - /bin/sh $infile - CODE=$? - CODE=$(($CODE - 10)) - if [ $CODE -ne 0]; then - exit $CODE - fi -done -for outfile in {{range .OutFile}} {{.}} {{end}}; do - touch $outfile -done -exit 0 -` + for infile in {{range .InFile}} {{.}} {{end}}; do + /bin/sh $infile + CODE=$? + CODE=$(($CODE - 10)) + if [ $CODE -ne 0]; then + exit $CODE + fi + done + for outfile in {{range .OutFile}} {{.}} {{end}}; do + touch $outfile + done + exit 0 + ` + // For a prover M confM = &config.Config{ Version: "0.2.4", @@ -231,7 +236,7 @@ exit 0 }, ExecRndBeacon: config.Execution{ WithRequestDir: config.WithRequestDir{ - RequestsRootDir: []string{path.Join(testDir, proverM, execBootstrapMetadata), + RequestsRootDir: []string{path.Join(testDir, proverM, execBootstrapMetadataRndBeacon), path.Join(testDir, proverM, execGLRndBeacon), }, }, @@ -249,7 +254,7 @@ exit 0 }, ExecConglomeration: config.Execution{ WithRequestDir: config.WithRequestDir{ - RequestsRootDir: []string{path.Join(testDir, proverM, execBootstrapMetadata), + RequestsRootDir: []string{path.Join(testDir, proverM, execBootstrapMetadataConglomeration), path.Join(testDir, proverM, execGLConglomeration), path.Join(testDir, proverM, execLPPConglomeration), }, From 576e2da22885f6be2a36a3f8dec6121553193eee Mon Sep 17 00:00:00 2001 From: srinathln7 Date: Fri, 14 Feb 2025 09:46:28 +0000 Subject: [PATCH 33/48] make executor generic --- prover/cmd/controller/controller/executor.go | 114 ++++++++++++------ .../cmd/controller/controller/fs_watcher.go | 8 +- .../controller/fs_watcher_limitless_test.go | 9 -- prover/config/config_default.go | 1 - 4 files changed, 79 insertions(+), 53 deletions(-) diff --git a/prover/cmd/controller/controller/executor.go b/prover/cmd/controller/controller/executor.go index 20f1f31d5..8e44e1c26 100644 --- a/prover/cmd/controller/controller/executor.go +++ b/prover/cmd/controller/controller/executor.go @@ -63,22 +63,19 @@ func NewExecutor(cfg *config.Config) *Executor { // matter what happens we want to be able to gracefully shutdown. func (e *Executor) Run(job *Job) (status Status) { - // The job should be locked - // ASSUMED 0 index here - - if len(job.LockedFile[0]) == 0 { - return Status{ - ExitCode: CodeFatal, - What: "the job is not locked", + for ipIdx := 0; ipIdx < len(job.LockedFile); ipIdx++ { + // The job should be locked + if len(job.LockedFile[ipIdx]) == 0 { + return Status{ + ExitCode: CodeFatal, + What: "the job is not locked", + } } } - // if we are on a large instance and the job is execution with large suffix, - // we directly run with large. - // note: checking that locked job contains "large" is not super typesafe... - - // ASSUMED 0 index here - largeRun := job.Def.Name == jobNameExecution && e.Config.Execution.CanRunFullLarge && strings.Contains(job.LockedFile[0], config.LargeSuffix) + // Note: if we are on a large instance and the job is execution with large suffix, we directly run with large. + // Checking that locked job contains "large" is not super typesafe... + largeRun := e.canRunFullLarge(*job, config.LargeSuffix) // First, run the job normally cmd, err := e.buildCmd(job, largeRun) @@ -92,7 +89,7 @@ func (e *Executor) Run(job *Job) (status Status) { status = runCmd(cmd, job, false) - // if it's a blob decompression or aggregation, we never retry with a large + // If it's a blob decompression or aggregation, we never retry with a large // command. We can return the status as is. if largeRun || job.Def.Name == jobNameBlobDecompression || job.Def.Name == jobNameAggregation { return status @@ -130,32 +127,39 @@ func (e *Executor) Run(job *Job) (status Status) { // Builds a command from a template to run, returns a status if it failed func (e *Executor) buildCmd(job *Job, large bool) (cmd string, err error) { - // Generate names for the output files. Also attempts to generate the - // names of the final response files so that we can be sure they will not - // fail being generated after having run the command. - - // ASSUMED 0 index here - if _, err := job.ResponseFile(0); err != nil { - logrus.Errorf( - "could not generate the tmp response filename for %s: %v", - job.OriginalFile, err, - ) - return "", err - } - // ASSUMED 0 index here - outFile := job.TmpResponseFile(e.Config, 0) - tmpl := e.Config.Controller.WorkerCmdTmpl if large { tmpl = e.Config.Controller.WorkerCmdLargeTmpl } - // use the template to generate the command - // ASSUMED 0 index + // m, n => #input, #output files + m, n := len(job.Def.RequestsRootDir), len(job.Def.ResponsesRootDir) + inFiles, outFiles := make([]string, m), make([]string, n) + + // Input Files + for ipIdx := 0; ipIdx < m; ipIdx++ { + inFiles[ipIdx] = job.InProgressPath(ipIdx) + } + + // Generate names for the output files. Also attempts to generate the + // names of the final response files so that we can be sure they will not + // fail being generated after having run the command. + for opIdx := 0; opIdx < n; opIdx++ { + if _, err := job.ResponseFile(opIdx); err != nil { + logrus.Errorf( + "could not generate the tmp response filename for %s: %v", + job.OriginalFile, err, + ) + return "", err + } + outFiles[opIdx] = job.TmpResponseFile(e.Config, opIdx) + } + + // Use the template to generate the command resource := Resource{ ConfFile: fConfig, - InFile: []string{job.InProgressPath(0)}, - OutFile: []string{outFile}, + InFile: inFiles, + OutFile: outFiles, } // Build the command and args from the job @@ -165,7 +169,6 @@ func (e *Executor) buildCmd(job *Job, large bool) (cmd string, err error) { "tried to generate the command for job %s but got %v", job.OriginalFile, err, ) - // Returns a status indicating that the command templating failed return "", err } @@ -178,8 +181,7 @@ func (e *Executor) buildCmd(job *Job, large bool) (cmd string, err error) { // this is a local retry or not. func runCmd(cmd string, job *Job, retry bool) Status { - // Split the command into a list of argvs that can be passed to the os - // package. + // Split the command into a list of argvs that can be passed to the os package. logrus.Infof("The executor is about to run the command: %s", cmd) // The command is run through shell, that way it sparses us the requirement @@ -199,9 +201,9 @@ func runCmd(cmd string, job *Job, retry bool) Status { } pname := processName(job, cmd) - - // ASSUMED 0 index - metrics.CollectPreProcess(job.Def.Name, job.Start[0], job.End[0], false) + for ipIdx := 0; ipIdx < len(job.Def.RequestsRootDir); ipIdx++ { + metrics.CollectPreProcess(job.Def.Name, job.Start[ipIdx], job.End[ipIdx], false) + } // Starts a new process from our command startTime := time.Now() @@ -273,7 +275,6 @@ func runCmd(cmd string, job *Job, retry bool) Status { } metrics.CollectPostProcess(job.Def.Name, status.ExitCode, processingTime, retry) - return status } @@ -327,3 +328,36 @@ func unixExitCode(proc *os.ProcessState) (int, error) { return -1, fmt.Errorf("getting the unix exit code : the process has an unexpected status : %v, it should be terminated", proc.String()) } + +// canRunFullLarge checks if a job can run in full large mode +func (e *Executor) canRunFullLarge(job Job, largeSuffix string) bool { + for ipIdx := 0; ipIdx < len(job.LockedFile); ipIdx++ { + switch job.Def.Name { + case jobNameExecution: + if e.Config.Execution.CanRunFullLarge && strings.Contains(job.LockedFile[ipIdx], largeSuffix) { + return true + } + case jobExecBootstrap: + if e.Config.ExecBootstrap.CanRunFullLarge && strings.Contains(job.LockedFile[ipIdx], largeSuffix) { + return true + } + case jobExecGL: + if e.Config.ExecGL.CanRunFullLarge && strings.Contains(job.LockedFile[ipIdx], largeSuffix) { + return true + } + case jobExecRndBeacon: + if e.Config.ExecRndBeacon.CanRunFullLarge && strings.Contains(job.LockedFile[ipIdx], largeSuffix) { + return true + } + case jobExecLPP: + if e.Config.ExecLPP.CanRunFullLarge && strings.Contains(job.LockedFile[ipIdx], largeSuffix) { + return true + } + case jobExecCongolomeration: + if e.Config.ExecConglomeration.CanRunFullLarge && strings.Contains(job.LockedFile[ipIdx], largeSuffix) { + return true + } + } + } + return false +} diff --git a/prover/cmd/controller/controller/fs_watcher.go b/prover/cmd/controller/controller/fs_watcher.go index 425805e4d..965f1759c 100644 --- a/prover/cmd/controller/controller/fs_watcher.go +++ b/prover/cmd/controller/controller/fs_watcher.go @@ -313,10 +313,12 @@ func (fs *FsWatcher) processDirectories(jdef *JobDefinition, jobs *[]*Job, numsM } *jobs = append(*jobs, job) *numsMatched++ - - // Pass prometheus metrics - // metrics.CollectFS(jdef.Name, len(dirents), *numsMatched) } + + // Pass prometheus metrics + // TODO: Define a new function to collect the metrics here: + // metrics.CollectFS(jdef.Name, len(dirents), *numsMatched) + return nil } diff --git a/prover/cmd/controller/controller/fs_watcher_limitless_test.go b/prover/cmd/controller/controller/fs_watcher_limitless_test.go index e3979777e..b3f72397d 100644 --- a/prover/cmd/controller/controller/fs_watcher_limitless_test.go +++ b/prover/cmd/controller/controller/fs_watcher_limitless_test.go @@ -313,15 +313,6 @@ func setupLimitlessFsTest(t *testing.T) (confM, confL *config.Config) { // Conglomeration: 3 input -> 1 ouput // In practice there will be `2n+1` inputs => 1 output file - // os.MkdirAll(confM.ExecConglomeration.BootstrapMetadata.DirFrom(0), permCode), - // os.MkdirAll(confM.ExecConglomeration.BootstrapMetadata.DirDone(0), permCode), - - // os.MkdirAll(confM.ExecConglomeration.GL.DirFrom(0), permCode), - // os.MkdirAll(confM.ExecConglomeration.GL.DirDone(0), permCode), - - // os.MkdirAll(confM.ExecConglomeration.LPP.DirFrom(0), permCode), - // os.MkdirAll(confM.ExecConglomeration.LPP.DirDone(0), permCode), - os.MkdirAll(confM.ExecConglomeration.DirFrom(0), permCode), os.MkdirAll(confM.ExecConglomeration.DirDone(0), permCode), os.MkdirAll(confM.ExecConglomeration.DirFrom(1), permCode), diff --git a/prover/config/config_default.go b/prover/config/config_default.go index 5c444012f..dbbe17719 100644 --- a/prover/config/config_default.go +++ b/prover/config/config_default.go @@ -33,7 +33,6 @@ func setDefaultValues() { // TODO @gbotrel binary to run prover is hardcoded here. viper.SetDefault("controller.worker_cmd_tmpl", "prover prove --config {{.ConfFile}} --in {{.InFile}} --out {{.OutFile}}") viper.SetDefault("controller.worker_cmd_large_tmpl", "prover prove --config {{.ConfFile}} --in {{.InFile}} --out {{.OutFile}} --large") - } func setDefaultPaths() { From e9a4b008117dfa3e05f544c94be93afb6c4fd16b Mon Sep 17 00:00:00 2001 From: srinathln7 Date: Fri, 14 Feb 2025 09:59:05 +0000 Subject: [PATCH 34/48] init limitless executor testfiles --- .../controller/executor_limitless_test.go | 96 ++++++++ .../controller/controller/executor_test.go | 2 +- prover/cmd/controller/controller/tmp_test.go | 211 ------------------ 3 files changed, 97 insertions(+), 212 deletions(-) create mode 100644 prover/cmd/controller/controller/executor_limitless_test.go delete mode 100644 prover/cmd/controller/controller/tmp_test.go diff --git a/prover/cmd/controller/controller/executor_limitless_test.go b/prover/cmd/controller/controller/executor_limitless_test.go new file mode 100644 index 000000000..7783a883c --- /dev/null +++ b/prover/cmd/controller/controller/executor_limitless_test.go @@ -0,0 +1,96 @@ +package controller + +import ( + "testing" + "text/template" + + "github.com/consensys/linea-monorepo/prover/config" + "github.com/stretchr/testify/assert" +) + +func TestExecBootstrapRetryWithLarge(t *testing.T) { + // A test command useful for testing the command generation + var testDefinition = JobDefinition{ + + // Give a name to the command + Name: jobExecBootstrap, + + // The template of the output file (returns a constant template with no + // parameters) + OutputFileTmpl: []*template.Template{ + template.Must(template.New("output-file").Parse("output-fill-constant")), + template.Must(template.New("output-file").Parse("output-fill-constant")), + }, + + RequestsRootDir: []string{"./testdata"}, + ResponsesRootDir: []string{"./responses", "./responses"}, + } + + jobs := []struct { + Job + ExpCode int + }{ + { + Job: Job{ + Def: &testDefinition, + LockedFile: []string{"exit-0.sh"}, + // Not directly needed but helpful to track the process name + Start: []int{0}, + End: []int{0}, + }, + ExpCode: 0, + }, + { + Job: Job{ + Def: &testDefinition, + LockedFile: []string{"exit-1.sh"}, + // Not directly needed but helpful to track the process name + Start: []int{1}, + End: []int{1}, + }, + ExpCode: 1, + }, + { + Job: Job{ + Def: &testDefinition, + LockedFile: []string{"exit-77.sh"}, + // Not directly needed but helpful to track the process name + Start: []int{2}, + End: []int{2}, + }, + ExpCode: 77 + 10, + }, + { + Job: Job{ + Def: &testDefinition, + LockedFile: []string{"sigkill.sh"}, + // Not directly needed but helpful to track the process name + Start: []int{3}, + End: []int{3}, + }, + ExpCode: 137, + }, + } + + e := NewExecutor(&config.Config{ + Controller: config.Controller{ + WorkerCmdTmpl: template.Must( + template.New("test-cmd"). + Parse("/bin/sh {{index .InFile 0}}"), + ), + // And the large fields. The commands adds a +10 to the return code + // to leave an evidence that the return code was obtained through + // running the large command. + WorkerCmdLargeTmpl: template.Must( + template.New("test-cmd-large"). + Parse(`/bin/sh -c "/bin/sh {{index .InFile 0}}"; exit $(($? + 10))`), + ), + RetryLocallyWithLargeCodes: config.DefaultRetryLocallyWithLargeCodes, + }, + }) + + for i := range jobs { + status := e.Run(&jobs[i].Job) + assert.Equalf(t, jobs[i].ExpCode, status.ExitCode, "got status %++v", status) + } +} diff --git a/prover/cmd/controller/controller/executor_test.go b/prover/cmd/controller/controller/executor_test.go index 3d3d45c3a..973622c0f 100644 --- a/prover/cmd/controller/controller/executor_test.go +++ b/prover/cmd/controller/controller/executor_test.go @@ -8,7 +8,7 @@ import ( "github.com/stretchr/testify/assert" ) -func TestRetryWithLarge(t *testing.T) { +func TestExecutionRetryWithLarge(t *testing.T) { // A test command useful for testing the command generation var testDefinition = JobDefinition{ diff --git a/prover/cmd/controller/controller/tmp_test.go b/prover/cmd/controller/controller/tmp_test.go deleted file mode 100644 index 8646df72f..000000000 --- a/prover/cmd/controller/controller/tmp_test.go +++ /dev/null @@ -1,211 +0,0 @@ -package controller - -// func TestExecRndBeaconInFileRegexp(t *testing.T) { -// var ( -// correctBootstrapMetadataM = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json" -// // correctBootstrapMetadataL = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large" -// // correctBootstrapMetadataWithFailM = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.failure.code_77" -// // correctBootstrapMetadataWithFailL = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_77" -// // correctBootstrapMetadataWith2FailsM = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.failure.code_77.failure.code_77" -// // correctBootstrapMetadataWith2FailsL = "102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_77.failure.code_77" -// // missingBootstrapMetadataEtv = "102-103-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json" -// // missingBootstrapMetadataStv = "102-103-etv0.2.3-getZkProof_Bootstrap_DistMetadata.json" - -// correctGLM = "102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json" -// // correctGLL = "102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.large" -// // correctGLWithFailM = "102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.failure.code_77" -// // correctGLWithFailL = "102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.large.failure.code_77" -// // correctGLWith2FailsM = "102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.failure.code_77.failure.code_77" -// // correctGLWith2FailsL = "102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.large.failure.code_77.failure.code_77" -// // missingGLEtv = "102-103-stv1.2.3-getZkProof_GL_RndBeacon.json" -// // missingGLStv = "102-103-etv0.2.3-getZkProof_GL_RndBeacon.json" -// // notAPoint = "102-103-etv0.2.3-getZkProofAjson" -// // badName = "102-103-etv0.2.3-stv1.2.3-getAggregatedProof.json" -// ) - -// // The responses in case of success -// var ( -// respRndBeaconM = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json" -// // respRndBeaconL = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json" - -// // respRndBeaconWithFailM = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json" -// // respRndBeaconWithFailL = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json" -// // respRndBeaconWith2FailsM = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json" -// // respRndBeaconWith2FailsL = "responses/102-103-etv0.2.3-stv1.2.3-getZkProof_RndBeacon.json" - -// // respRndBeaconWoEtv = "responses/102-103-etv-stv1.2.3-getZkProof_RndBeacon.json" -// // respRndBeaconWoStv = "responses/102-103-etv0.2.3-stv-getZkProof_RndBeacon.json" -// ) - -// // The rename in case it is deferred to the large prover -// var ( -// toLargeBootstrapMetadataM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_137" -// // toLargeBootstrapMetadataWithFailM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_137" -// // toLargeBootstrapMetadataWith2FailsM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_137" -// // toLargeBootstrapMetadataWoEtv = "requests/102-103-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_137" -// // toLargeBootstrapMetadataWoStv = "requests/102-103-etv0.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_137" - -// toLargeGLM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.large.failure.code_137" -// // toLargeGLWithFailM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.large.failure.code_137" -// // toLargeGLWith2FailsM = "requests/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.large.failure.code_137" -// // toLargeGLWoEtv = "requests/102-103-stv1.2.3-getZkProof_GL_RndBeacon.json.large.failure.code_137" -// // toLargeGLWoStv = "requests/102-103-etv0.2.3-getZkProof_GL_RndBeacon.json.large.failure.code_137" -// ) - -// // The rename in case it is a success -// var ( -// successBootstrapMetadataM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.success" -// // successBootstrapMetadataMWoStv = "requests-done/102-103-etv0.2.3-getZkProof_Bootstrap_DistMetadata.json.success" -// // successBootstrapMetadatastWoEtv = "requests-done/102-103-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.success" -// // successBootstrapMetadataL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.success" -// // successBootstrapMetadataWithFailM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.success" -// // successBootstrapMetadataWithFailL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.success" -// // successBootstrapMetadataWith2FailsM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.success" -// // successBootstrapMetadataWith2FailsL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.success" - -// successGLM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.success" -// // successGLMWoStv = "requests-done/102-103-etv0.2.3-getZkProof_GL_RndBeacon.json.success" -// // successGLstWoEtv = "requests-done/102-103-stv1.2.3-getZkProof_GL_RndBeacon.json.success" -// // successGLL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.large.success" -// // successGLWithFailM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.success" -// // successGLWithFailL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.large.success" -// // successGLWith2FailsM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.success" -// // successGLWith2FailsL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.large.success" -// ) - -// // The rename in case it is a panic (code = 2) -// var ( -// failBootstrapMetadataM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.failure.code_2" -// // failBootstrapMetadataMWoStv = "requests-done/102-103-etv0.2.3-getZkProof_Bootstrap_DistMetadata.json.failure.code_2" -// // failBootstrapMetadatastWoEtv = "requests-done/102-103-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.failure.code_2" -// // failBootstrapMetadataL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_2" -// // failBootstrapMetadataWithFailM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.failure.code_2" -// // failBootstrapMetadataWithFailL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_2" -// // failBootstrapMetadataWith2FailsM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.failure.code_2" -// // failBootstrapMetadataWith2FailsL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_2" - -// failGLM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.failure.code_2" -// // failGLMWoStv = "requests-done/102-103-etv0.2.3-getZkProof_GL_RndBeacon.json.failure.code_2" -// // failGLstWoEtv = "requests-done/102-103-stv1.2.3-getZkProof_GL_RndBeacon.json.failure.code_2" -// // failGLL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.large.failure.code_2" -// // failGLWithFailM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.failure.code_2" -// // failGLWithFailL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.large.failure.code_2" -// // failGLWith2FailsM = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.failure.code_2" -// // failGLWith2FailsL = "requests-done/102-103-etv0.2.3-stv1.2.3-getZkProof_GL_RndBeacon.json.large.failure.code_2" -// ) - -// testcase := []inpFileNamesCases{ -// { -// Ext: "", Fail: "code", ShouldMatch: true, -// Fnames: [][]string{ -// {correctBootstrapMetadataM, correctGLM}, -// // {correctBootstrapMetadataWithFailM, correctGLWithFailM}, -// // {correctBootstrapMetadataWith2FailsM, correctGLWith2FailsM}, -// // {missingBootstrapMetadataEtv, missingGLEtv}, -// // {missingBootstrapMetadataStv, missingGLStv}, -// }, -// Explainer: "happy path, case M", -// ExpectedOutput: [][]string{ -// {respRndBeaconM}, -// // {respRndBeaconWithFailM}, -// // {respRndBeaconWith2FailsM}, -// // {respRndBeaconWoEtv}, -// // {respRndBeaconWoStv}, -// }, -// ExpToLarge: [][]string{ -// {toLargeBootstrapMetadataM, toLargeGLM}, -// // {toLargeBootstrapMetadataWithFailM, toLargeGLWithFailM}, -// // {toLargeBootstrapMetadataWith2FailsM, toLargeGLWith2FailsM}, -// // {toLargeBootstrapMetadataWoEtv, toLargeGLWoEtv}, -// // {toLargeBootstrapMetadataWoStv, toLargeGLWoStv}, -// }, -// ExpSuccess: [][]string{ -// {successBootstrapMetadataM, successGLM}, -// // {successBootstrapMetadataWithFailM, successGLWithFailM}, -// // {successBootstrapMetadataWith2FailsM, successGLWith2FailsM}, -// // {successBootstrapMetadatastWoEtv, successGLstWoEtv}, -// // {successBootstrapMetadataMWoStv, successGLMWoStv}, -// }, -// ExpFailW2: [][]string{ -// {failBootstrapMetadataM, failGLM}, -// // {failBootstrapMetadataWithFailM, failGLWithFailM}, -// // {failBootstrapMetadataWith2FailsM, failGLWith2FailsM}, -// // {failBootstrapMetadatastWoEtv, failGLstWoEtv}, -// // {failBootstrapMetadataMWoStv, failGLMWoStv}, -// }, -// }, -// // { -// // Ext: "large", Fail: "code", ShouldMatch: true, -// // Fnames: [][]string{ -// // {correctBootstrapMetadataL, correctGLL}, -// // {correctBootstrapMetadataWithFailL, correctGLWithFailL}, -// // {correctBootstrapMetadataWith2FailsL, correctGLWith2FailsL}, -// // }, -// // Explainer: "happy path, case L", -// // ExpectedOutput: [][]string{ -// // {respRndBeaconL}, -// // {respRndBeaconWithFailL}, -// // {respRndBeaconWith2FailsL}, -// // }, -// // ExpSuccess: [][]string{ -// // {successBootstrapMetadataL, successGLL}, -// // {successBootstrapMetadataWithFailL, successGLWithFailL}, -// // {successBootstrapMetadataWith2FailsL, successGLWith2FailsL}, -// // }, -// // ExpFailW2: [][]string{ -// // {failBootstrapMetadataL, failGLL}, -// // {failBootstrapMetadataWithFailL, failGLWithFailL}, -// // {failBootstrapMetadataWith2FailsL, failGLWith2FailsL}, -// // }, -// // }, -// // { -// // Ext: "", Fail: "code", ShouldMatch: false, -// // Fnames: [][]string{ -// // {correctBootstrapMetadataL, correctGLL}, -// // {correctBootstrapMetadataWithFailL, correctGLWithFailL}, -// // {correctBootstrapMetadataWith2FailsL, correctGLWith2FailsL}, -// // }, -// // Explainer: "M does not pick the files reserved for L", -// // }, -// // { -// // Ext: "large", Fail: "code", ShouldMatch: false, -// // Fnames: [][]string{ -// // {correctBootstrapMetadataM, correctGLM}, -// // {correctBootstrapMetadataWithFailM, correctGLWithFailM}, -// // {correctBootstrapMetadataWith2FailsM, correctGLWith2FailsM}, -// // }, -// // Explainer: "L does not pick the files reserved for M", -// // }, -// // { -// // Ext: "", Fail: "code", ShouldMatch: false, -// // Fnames: [][]string{ -// // {notAPoint}, -// // {badName}, -// // }, -// // Explainer: "M does not pick obviously invalid files", -// // }, -// // { -// // Ext: "large", Fail: "code", ShouldMatch: false, -// // Fnames: [][]string{ -// // {missingBootstrapMetadataEtv, missingGLEtv}, -// // {missingBootstrapMetadataStv, missingGLStv}, -// // {notAPoint}, -// // {badName}, -// // }, -// // Explainer: "L does not pick obviously invalid files", -// // }, -// } - -// for _, c := range testcase { -// conf := config.Config{} -// conf.Version = "0.1.2" -// conf.RndBeacon.CanRunFullLarge = c.Ext == "large" - -// def, err := ExecRndBeaconDefinition(&conf) -// assert.NoError(t, err) - -// t.Run(c.Explainer, func(t *testing.T) { -// runInpFileTestCase(t, def, c) -// }) -// } -// } From a26094205bb4d30ecf5fb5be553037715b90ce1d Mon Sep 17 00:00:00 2001 From: srinathln7 Date: Fri, 14 Feb 2025 14:46:35 +0000 Subject: [PATCH 35/48] refactor controller --- .../cmd/controller/controller/controller.go | 177 +++++------------- .../controller/executor_limitless_test.go | 114 +++++++++++ 2 files changed, 166 insertions(+), 125 deletions(-) diff --git a/prover/cmd/controller/controller/controller.go b/prover/cmd/controller/controller/controller.go index 999bfcaaa..47463786c 100644 --- a/prover/cmd/controller/controller/controller.go +++ b/prover/cmd/controller/controller/controller.go @@ -13,7 +13,7 @@ import ( "github.com/sirupsen/logrus" ) -// function to run the controller +// runController: Runs the controller with the given config func runController(ctx context.Context, cfg *config.Config) { var ( cLog = cfg.Logger().WithField("component", "main-loop") @@ -35,13 +35,6 @@ func runController(ctx context.Context, cfg *config.Config) { defer stop() go func() { - // This goroutine's raison d'etre is to log a message immediately when a - // cancellation request (e.g., ctx expiration/cancellation, SIGTERM, etc.) - // is received. It ensures timely logging of the request's reception, - // which may be important for diagnostics. Without this - // goroutine, if the prover is busy with a proof when, for example, a - // SIGTERM is received, there would be no log entry about the signal - // until the proof completes. <-ctx.Done() cLog.Infoln("Received cancellation request, will exit as soon as possible or once current proof task is complete.") }() @@ -49,23 +42,12 @@ func runController(ctx context.Context, cfg *config.Config) { for { select { case <-ctx.Done(): - // Graceful shutdown. - // This case captures both cancellations initiated by the caller - // through ctx and SIGTERM signals. Even if the cancellation - // request is first intercepted by the goroutine at line 34, Go - // allows the ctx.Done channel to be read multiple times, which, in - // our scenario, ensures cancellation requests are effectively - // detected and handled. cLog.Infoln("Context canceled by caller or SIGTERM. Exiting") metrics.ShutdownServer(ctx) return - // Processing a new job case <-retryDelay(cfg.Controller.RetryDelays, numRetrySoFar): - // Fetch the best block we can fetch job := fsWatcher.GetBest() - - // No jobs, waiting a little before we retry if job == nil { numRetrySoFar++ noJobFoundMsg := "found no jobs in the queue" @@ -77,123 +59,68 @@ func runController(ctx context.Context, cfg *config.Config) { continue } - // Else, reset the retry counter numRetrySoFar = 0 - - // Run the command (potentially retrying in large mode) status := executor.Run(job) - - // createColumns the job according to the status we got switch { - - // Success case status.ExitCode == CodeSuccess: - for idx := range job.OriginalFile { - // NB: we already check that the response filename can be - // generated prior to running the command. So this actually - // will not panic. - respFile, err := job.ResponseFile(idx) - tmpRespFile := job.TmpResponseFile(cfg, idx) - if err != nil { - formatStr := "Could not generate the response file: %v (original request file: %v)" - utils.Panic(formatStr, err, job.OriginalFile[idx]) - } - - logrus.Infof( - "Moving the response file from the tmp response file `%v`, to the final response file: `%v`", - tmpRespFile, respFile, - ) - - if err := os.Rename(tmpRespFile, respFile); err != nil { - // @Alex: it is unclear how the rename operation could fail - // here. If this happens, we prefer removing the tmp file. - // Note that the operation is an `mv -f` - os.Remove(tmpRespFile) - - cLog.Errorf( - "Error renaming %v to %v: %v, removed the tmp file", - tmpRespFile, respFile, err, - ) - } - - // Move the inprogress to the done directory - cLog.Infof( - "Moving %v to %v with the success prefix", - job.OriginalFile[idx], job.Def.dirDone(idx), - ) - - jobDone := job.DoneFile(status, idx) - if err := os.Rename(job.InProgressPath(idx), jobDone); err != nil { - // When that happens, the only thing left to do is to log - // the error and let the inprogress file where it is. It - // will likely require a human intervention. - cLog.Errorf( - "Error renaming %v to %v: %v", - job.InProgressPath(idx), jobDone, err, - ) - } - } - - // Defer to the large prover + handleSuccess(job, cfg, status, cLog) case job.Def.Name == jobNameExecution && isIn(status.ExitCode, cfg.Controller.DeferToOtherLargeCodes): - for idx := range job.OriginalFile { - cLog.Infof("Renaming %v for the large prover", job.OriginalFile[idx]) - // Move the inprogress file back in the from directory with - // the new suffix - toLargePath, err := job.DeferToLargeFile(status, idx) - if err != nil { - // There are two possibilities of errors. (1), the status - // we success but the above cases prevents that. The other - // case is that the suffix was not provided. But, during - // the config validation, we check already that the suffix - // must be provided if the size of the list of - // deferToOtherLargeCodes is non-zero. If the size of the - // list was zero, then there would be no way to reach this - // portion of the code given that the current exit code - // cannot be part of the empty list. Thus, this section is - // unreachable. - cLog.Errorf( - "error deriving the to-large-name of %v: %v", - job.InProgressPath(idx), err, - ) - } - - if err := os.Rename(job.InProgressPath(idx), toLargePath); err != nil { - // When that happens, the only thing left to do is to log - // the error and let the inprogress file where it is. It - // will likely require a human intervention. - cLog.Errorf( - "error renaming %v to %v: %v", - job.InProgressPath(idx), toLargePath, err, - ) - } - } - - // Failure case + handleDeferToLarge(job, status, cLog) default: - for idx := range job.OriginalFile { - // Move the inprogress to the done directory - cLog.Infof( - "Moving %v with in %v with a failure suffix for code %v", - job.OriginalFile[idx], job.Def.dirDone(idx), status.ExitCode, - ) - - jobFailed := job.DoneFile(status, idx) - if err := os.Rename(job.InProgressPath(idx), jobFailed); err != nil { - // When that happens, the only thing left to do is to log - // the error and let the inprogress file where it is. It - // will likely require a human intervention. - cLog.Errorf( - "Error renaming %v to %v: %v", - job.InProgressPath(idx), jobFailed, err, - ) - } - } + handleFailure(job, status, cLog) } } } } +func handleSuccess(job *Job, cfg *config.Config, status Status, cLog *logrus.Entry) { + for opIdx := range job.Def.ResponsesRootDir { + respFile, err := job.ResponseFile(opIdx) + tmpRespFile := job.TmpResponseFile(cfg, opIdx) + if err != nil { + utils.Panic("Could not generate the response file: %v (original request file: %v)", err, job.OriginalFile[opIdx]) + } + + logrus.Infof("Moving the response file from the tmp response file `%v`, to the final response file: `%v`", tmpRespFile, respFile) + + if err := os.Rename(tmpRespFile, respFile); err != nil { + os.Remove(tmpRespFile) + cLog.Errorf("Error renaming %v to %v: %v, removed the tmp file", tmpRespFile, respFile, err) + } + } + for ipIdx := range job.OriginalFile { + cLog.Infof("Moving %v to %v with the success prefix", job.OriginalFile[ipIdx], job.Def.dirDone(ipIdx)) + jobDone := job.DoneFile(status, ipIdx) + if err := os.Rename(job.InProgressPath(ipIdx), jobDone); err != nil { + cLog.Errorf("Error renaming %v to %v: %v", job.InProgressPath(ipIdx), jobDone, err) + } + } +} + +func handleDeferToLarge(job *Job, status Status, cLog *logrus.Entry) { + for ipIdx := range job.OriginalFile { + cLog.Infof("Renaming %v for the large prover", job.OriginalFile[ipIdx]) + toLargePath, err := job.DeferToLargeFile(status, ipIdx) + if err != nil { + cLog.Errorf("error deriving the to-large-name of %v: %v", job.InProgressPath(ipIdx), err) + } + + if err := os.Rename(job.InProgressPath(ipIdx), toLargePath); err != nil { + cLog.Errorf("error renaming %v to %v: %v", job.InProgressPath(ipIdx), toLargePath, err) + } + } +} + +func handleFailure(job *Job, status Status, cLog *logrus.Entry) { + for ipIdx := range job.OriginalFile { + cLog.Infof("Moving %v with in %v with a failure suffix for code %v", job.OriginalFile[ipIdx], job.Def.dirDone(ipIdx), status.ExitCode) + jobFailed := job.DoneFile(status, ipIdx) + if err := os.Rename(job.InProgressPath(ipIdx), jobFailed); err != nil { + cLog.Errorf("Error renaming %v to %v: %v", job.InProgressPath(ipIdx), jobFailed, err) + } + } +} + // Returns the duration to wait before retrying to find a job in the queue. This // is to avoid spamming the FS with LS queries. func retryDelay(retryDelaysSec []int, numRetrySoFar int) <-chan time.Time { diff --git a/prover/cmd/controller/controller/executor_limitless_test.go b/prover/cmd/controller/controller/executor_limitless_test.go index 7783a883c..8a9655218 100644 --- a/prover/cmd/controller/controller/executor_limitless_test.go +++ b/prover/cmd/controller/controller/executor_limitless_test.go @@ -94,3 +94,117 @@ func TestExecBootstrapRetryWithLarge(t *testing.T) { assert.Equalf(t, jobs[i].ExpCode, status.ExitCode, "got status %++v", status) } } + +func TestExecConglomerationRetryWithLarge(t *testing.T) { + // A test command useful for testing the command generation + var testDefinition = JobDefinition{ + // Give a name to the command + Name: jobExecCongolomeration, + + // The template of the output file (returns a constant template with no + // parameters) + OutputFileTmpl: []*template.Template{ + template.Must(template.New("output-file").Parse("output-fill-constant")), + }, + + RequestsRootDir: []string{"./testdata", "./testdata", "./testdata"}, + ResponsesRootDir: []string{"./responses"}, + } + + jobs := []struct { + Job + ExpCode int + }{ + { + Job: Job{ + Def: &testDefinition, + LockedFile: []string{"exit-0.sh", "exit-0.sh", "exit-0.sh"}, + // Not directly needed but helpful to track the process name + Start: []int{0, 0, 0}, + End: []int{0, 0, 0}, + }, + ExpCode: 0, + }, + { + Job: Job{ + Def: &testDefinition, + LockedFile: []string{"exit-1.sh", "exit-1.sh", "exit-1.sh"}, + // Not directly needed but helpful to track the process name + Start: []int{1, 1, 1}, + End: []int{1, 1, 1}, + }, + ExpCode: 1, + }, + { + Job: Job{ + Def: &testDefinition, + LockedFile: []string{"exit-77.sh", "exit-77.sh", "exit-77.sh"}, + // Not directly needed but helpful to track the process name + Start: []int{2, 2, 2}, + End: []int{2, 2, 2}, + }, + ExpCode: 77 + 10, + }, + { + Job: Job{ + Def: &testDefinition, + LockedFile: []string{"sigkill.sh", "sigkill.sh", "sigkill.sh"}, + // Not directly needed but helpful to track the process name + Start: []int{3, 3, 3}, + End: []int{3, 3, 3}, + }, + ExpCode: 137, + }, + { + Job: Job{ + Def: &testDefinition, + LockedFile: []string{"sigkill.sh", "exit0.sh", "exit1.sh"}, + // Not directly needed but helpful to track the process name + Start: []int{3, 3, 3}, + End: []int{3, 3, 3}, + }, + ExpCode: 137, + }, + } + + e := NewExecutor(&config.Config{ + Controller: config.Controller{ + WorkerCmdTmpl: template.Must( + template.New("test-cmd"). + Parse(` + for infile in {{range .InFile}} {{.}} {{end}}; do + /bin/sh $infile + CODE=$? + if [ $CODE -ne 0 ]; then + exit $CODE + fi + done + exit 0 + `), + ), + // And the large fields. The commands adds a +10 to the return code + // to leave an evidence that the return code was obtained through + // running the large command. + WorkerCmdLargeTmpl: template.Must( + template.New("test-cmd-large"). + Parse(` + for infile in {{range .InFile}} {{.}} {{end}}; do + /bin/sh $infile + CODE=$? + if [ $CODE -ne 0 ]; then + exit $(($CODE + 10)) + fi + done + exit 0 + `), + ), + + RetryLocallyWithLargeCodes: config.DefaultRetryLocallyWithLargeCodes, + }, + }) + + for i := range jobs { + status := e.Run(&jobs[i].Job) + assert.Equalf(t, jobs[i].ExpCode, status.ExitCode, "got status %++v", status) + } +} From 21eac814240017acdbc58d5b24c9b36c8ae495de Mon Sep 17 00:00:00 2001 From: srinathln7 Date: Fri, 14 Feb 2025 14:54:16 +0000 Subject: [PATCH 36/48] refactor controller comments --- .../cmd/controller/controller/controller.go | 44 +++++++++++++++++++ 1 file changed, 44 insertions(+) diff --git a/prover/cmd/controller/controller/controller.go b/prover/cmd/controller/controller/controller.go index 47463786c..68cd3de78 100644 --- a/prover/cmd/controller/controller/controller.go +++ b/prover/cmd/controller/controller/controller.go @@ -34,6 +34,13 @@ func runController(ctx context.Context, cfg *config.Config) { ctx, stop := signal.NotifyContext(ctx, syscall.SIGTERM) defer stop() + // This goroutine's raison d'etre is to log a message immediately when a + // cancellation request (e.g., ctx expiration/cancellation, SIGTERM, etc.) + // is received. It ensures timely logging of the request's reception, + // which may be important for diagnostics. Without this + // goroutine, if the prover is busy with a proof when, for example, a + // SIGTERM is received, there would be no log entry about the signal + // until the proof completes. go func() { <-ctx.Done() cLog.Infoln("Received cancellation request, will exit as soon as possible or once current proof task is complete.") @@ -41,11 +48,19 @@ func runController(ctx context.Context, cfg *config.Config) { for { select { + // Graceful shutdown. + // This case captures both cancellations initiated by the caller + // through ctx and SIGTERM signals. Even if the cancellation + // request is first intercepted by the goroutine at line 34, Go + // allows the ctx.Done channel to be read multiple times, which, in + // our scenario, ensures cancellation requests are effectively + // detected and handled. case <-ctx.Done(): cLog.Infoln("Context canceled by caller or SIGTERM. Exiting") metrics.ShutdownServer(ctx) return + // Processing a new job case <-retryDelay(cfg.Controller.RetryDelays, numRetrySoFar): job := fsWatcher.GetBest() if job == nil { @@ -62,10 +77,13 @@ func runController(ctx context.Context, cfg *config.Config) { numRetrySoFar = 0 status := executor.Run(job) switch { + // Success case status.ExitCode == CodeSuccess: handleSuccess(job, cfg, status, cLog) + // Defer to the large prover case job.Def.Name == jobNameExecution && isIn(status.ExitCode, cfg.Controller.DeferToOtherLargeCodes): handleDeferToLarge(job, status, cLog) + // Failure case default: handleFailure(job, status, cLog) } @@ -74,6 +92,9 @@ func runController(ctx context.Context, cfg *config.Config) { } func handleSuccess(job *Job, cfg *config.Config, status Status, cLog *logrus.Entry) { + // NB: we already check that the response filename can be + // generated prior to running the command. So this actually + // will not panic. for opIdx := range job.Def.ResponsesRootDir { respFile, err := job.ResponseFile(opIdx) tmpRespFile := job.TmpResponseFile(cfg, opIdx) @@ -83,11 +104,16 @@ func handleSuccess(job *Job, cfg *config.Config, status Status, cLog *logrus.Ent logrus.Infof("Moving the response file from the tmp response file `%v`, to the final response file: `%v`", tmpRespFile, respFile) + // @Alex: it is unclear how the rename operation could fail + // here. If this happens, we prefer removing the tmp file. + // Note that the operation is an `mv -f` if err := os.Rename(tmpRespFile, respFile); err != nil { os.Remove(tmpRespFile) cLog.Errorf("Error renaming %v to %v: %v, removed the tmp file", tmpRespFile, respFile, err) } } + + // Move the inprogress to the done directory for ipIdx := range job.OriginalFile { cLog.Infof("Moving %v to %v with the success prefix", job.OriginalFile[ipIdx], job.Def.dirDone(ipIdx)) jobDone := job.DoneFile(status, ipIdx) @@ -100,12 +126,26 @@ func handleSuccess(job *Job, cfg *config.Config, status Status, cLog *logrus.Ent func handleDeferToLarge(job *Job, status Status, cLog *logrus.Entry) { for ipIdx := range job.OriginalFile { cLog.Infof("Renaming %v for the large prover", job.OriginalFile[ipIdx]) + // Move the inprogress file back in the from directory with the new suffix toLargePath, err := job.DeferToLargeFile(status, ipIdx) if err != nil { + // There are two possibilities of errors. (1), the status + // we success but the above cases prevents that. The other + // case is that the suffix was not provided. But, during + // the config validation, we check already that the suffix + // must be provided if the size of the list of + // deferToOtherLargeCodes is non-zero. If the size of the + // list was zero, then there would be no way to reach this + // portion of the code given that the current exit code + // cannot be part of the empty list. Thus, this section is + // unreachable. cLog.Errorf("error deriving the to-large-name of %v: %v", job.InProgressPath(ipIdx), err) } if err := os.Rename(job.InProgressPath(ipIdx), toLargePath); err != nil { + // When that happens, the only thing left to do is to log + // the error and let the inprogress file where it is. It + // will likely require a human intervention. cLog.Errorf("error renaming %v to %v: %v", job.InProgressPath(ipIdx), toLargePath, err) } } @@ -113,9 +153,13 @@ func handleDeferToLarge(job *Job, status Status, cLog *logrus.Entry) { func handleFailure(job *Job, status Status, cLog *logrus.Entry) { for ipIdx := range job.OriginalFile { + // Move the inprogress to the done directory cLog.Infof("Moving %v with in %v with a failure suffix for code %v", job.OriginalFile[ipIdx], job.Def.dirDone(ipIdx), status.ExitCode) jobFailed := job.DoneFile(status, ipIdx) if err := os.Rename(job.InProgressPath(ipIdx), jobFailed); err != nil { + // When that happens, the only thing left to do is to log + // the error and let the inprogress file where it is. It + // will likely require a human intervention. cLog.Errorf("Error renaming %v to %v: %v", job.InProgressPath(ipIdx), jobFailed, err) } } From fff8fb67d4ba26682f6feac34d381eda0b9ae420 Mon Sep 17 00:00:00 2001 From: srinathln7 Date: Fri, 14 Feb 2025 16:18:32 +0000 Subject: [PATCH 37/48] add more limitless fw tests --- .../controller/controller_limitless_test.go | 1 + .../cmd/controller/controller/controller_test.go | 6 +++--- prover/cmd/controller/controller/fs_watcher.go | 1 - .../controller/fs_watcher_limitless_test.go | 14 ++++++++++++-- .../cmd/controller/controller/fs_watcher_test.go | 1 - .../controller/job_definition_limitless.go | 1 - 6 files changed, 16 insertions(+), 8 deletions(-) create mode 100644 prover/cmd/controller/controller/controller_limitless_test.go diff --git a/prover/cmd/controller/controller/controller_limitless_test.go b/prover/cmd/controller/controller/controller_limitless_test.go new file mode 100644 index 000000000..b0b429f89 --- /dev/null +++ b/prover/cmd/controller/controller/controller_limitless_test.go @@ -0,0 +1 @@ +package controller diff --git a/prover/cmd/controller/controller/controller_test.go b/prover/cmd/controller/controller/controller_test.go index 216b2a10a..63b6b8150 100644 --- a/prover/cmd/controller/controller/controller_test.go +++ b/prover/cmd/controller/controller/controller_test.go @@ -30,7 +30,7 @@ func TestRunCommand(t *testing.T) { // Populate the filesystem with job files - // execution + // Execution createTestInputFile(eFrom, 0, 1, execJob, exit0) createTestInputFile(eFrom, 1, 2, execJob, exit12, forLarge) createTestInputFile(eFrom, 2, 3, execJob, exit77) @@ -42,13 +42,13 @@ func TestRunCommand(t *testing.T) { createTestInputFile(eFrom, 8, 9, execJob, exit10) createTestInputFile(eFrom, 9, 10, execJob, exit12) - // compression + // Compression createTestInputFile(cFrom, 0, 2, compressionJob, exit0) createTestInputFile(cFrom, 2, 4, compressionJob, exit2) createTestInputFile(cFrom, 4, 6, compressionJob, exit77) createTestInputFile(cFrom, 6, 8, compressionJob, exit137) - // aggregation + // Aggregation createTestInputFile(aFrom, 0, 2, aggregationJob, exit0) createTestInputFile(aFrom, 2, 4, aggregationJob, exit2) createTestInputFile(aFrom, 4, 6, aggregationJob, exit77) diff --git a/prover/cmd/controller/controller/fs_watcher.go b/prover/cmd/controller/controller/fs_watcher.go index 965f1759c..9ddb8aa57 100644 --- a/prover/cmd/controller/controller/fs_watcher.go +++ b/prover/cmd/controller/controller/fs_watcher.go @@ -176,7 +176,6 @@ func (fs *FsWatcher) appendMultiInputJobFromDef(jdef *JobDefinition, jobs *[]*Jo // Try appending a list of single-input jobs that are parsed from a given directory. // An error is returned if the function fails to read the directory. func (fs *FsWatcher) appendJobFromDef(jdef *JobDefinition, jobs *[]*Job, numsMatched *int) (err error) { - // ASSUMED 0 index here for jobs with only single inputs dirFrom := jdef.dirFrom(0) fs.Logger.Tracef("Seeking jobs for %v in %v", jdef.Name, dirFrom) diff --git a/prover/cmd/controller/controller/fs_watcher_limitless_test.go b/prover/cmd/controller/controller/fs_watcher_limitless_test.go index b3f72397d..c12e41364 100644 --- a/prover/cmd/controller/controller/fs_watcher_limitless_test.go +++ b/prover/cmd/controller/controller/fs_watcher_limitless_test.go @@ -10,7 +10,7 @@ import ( "text/template" "github.com/consensys/linea-monorepo/prover/config" - "github.com/consensys/linea-monorepo/prover/utils" + "github.com/sirupsen/logrus" "github.com/stretchr/testify/assert" ) @@ -42,6 +42,7 @@ func TestLimitlessProverFileWatcherL(t *testing.T) { } // The jobs, declared in the order in which they are expected to be found + // NOTE: It is important to test for the same starting and ending block ranges expectedFNames := []struct { FName []string Skip bool @@ -61,6 +62,14 @@ func TestLimitlessProverFileWatcherL(t *testing.T) { { FName: createLimitlessTestInputFiles(execConglomerationFrom, 0, 1, Conglomeration, exitCode), }, + { + Skip: true, // not large + FName: createLimitlessTestInputFiles(execConglomerationFrom, 0, 1, Conglomeration, exitCode), + }, + { + Skip: true, // wrong dir + FName: createLimitlessTestInputFiles(execBootstrapFrom, 0, 1, Conglomeration, exitCode), + }, } fw := NewFsWatcher(confL) @@ -358,7 +367,8 @@ func createLimitlessTestInputFiles( m, n := len(dirFrom), len(fmtStrArr) if m != n { - utils.Panic("number of entries in dirFrom:%d should match with the number of formated input files:%d", m, n) + logrus.Debugf("number of entries in dirFrom:%d should match with the number of formated input files:%d", m, n) + return nil } fnames = make([]string, len(fmtStrArr)) diff --git a/prover/cmd/controller/controller/fs_watcher_test.go b/prover/cmd/controller/controller/fs_watcher_test.go index 3de737601..1e5303e6a 100644 --- a/prover/cmd/controller/controller/fs_watcher_test.go +++ b/prover/cmd/controller/controller/fs_watcher_test.go @@ -147,7 +147,6 @@ func TestFileWatcherL(t *testing.T) { } fw := NewFsWatcher(confL) - for _, f := range expectedFNames { if f.Skip { continue diff --git a/prover/cmd/controller/controller/job_definition_limitless.go b/prover/cmd/controller/controller/job_definition_limitless.go index 9036d614c..c413b6691 100644 --- a/prover/cmd/controller/controller/job_definition_limitless.go +++ b/prover/cmd/controller/controller/job_definition_limitless.go @@ -135,7 +135,6 @@ func ExecConglomerationDefinition(conf *config.Config) (*JobDefinition, error) { } // Input files - //reqDirs := utils.CombineRequests(conf.ExecConglomeration.BootstrapMetadata.RequestsRootDir, conf.ExecConglomeration.GL.RequestsRootDir, conf.ExecConglomeration.LPP.RequestsRootDir) reqDirs := conf.ExecConglomeration.RequestsRootDir inputFilePatterns := []string{ fmt.Sprintf(execConglomerateBootstrapDistMetadataPattern, inpFileExt, config.FailSuffix), From 05e158ba3cc9406185971af89e24342d5d8b3dad Mon Sep 17 00:00:00 2001 From: srinathln7 Date: Sun, 16 Feb 2025 07:15:03 +0000 Subject: [PATCH 38/48] init limitless controller test --- .../controller/controller_limitless_test.go | 61 +++++++++++++++++++ 1 file changed, 61 insertions(+) diff --git a/prover/cmd/controller/controller/controller_limitless_test.go b/prover/cmd/controller/controller/controller_limitless_test.go index b0b429f89..4463f58b0 100644 --- a/prover/cmd/controller/controller/controller_limitless_test.go +++ b/prover/cmd/controller/controller/controller_limitless_test.go @@ -1 +1,62 @@ package controller + +// func TestLimitlessRun(t *testing.T) { +// var ( +// exit0 int = 0 +// // exit2 int = 2 +// // exit10 int = 10 +// // exit12 int = 12 +// // exit77 int = 77 +// // exit137 int = 137 +// ) + +// _, confL := setupLimitlessFsTest(t) + +// // Dirs +// execBootstrapFrom := []string{confL.ExecBootstrap.DirFrom(0)} + +// // Populate the filesystem with job files + +// // Bootstrap +// createLimitlessTestInputFiles(execBootstrapFrom, 0, 1, Bootstrap, exit0) +// // createLimitlessTestInputFiles(execBootstrapFrom, 1, 2, execBootstrapPriority, exit12, forLarge) +// // createLimitlessTestInputFiles(execBootstrapFrom, 2, 3, execBootstrapPriority, exit77) +// // createLimitlessTestInputFiles(execBootstrapFrom, 3, 4, execBootstrapPriority, exit77, forLarge) +// // createLimitlessTestInputFiles(execBootstrapFrom, 4, 5, execBootstrapPriority, exit137) +// // createLimitlessTestInputFiles(execBootstrapFrom, 5, 6, execBootstrapPriority, exit137, forLarge) +// // createLimitlessTestInputFiles(execBootstrapFrom, 6, 7, execBootstrapPriority, exit2) +// // createLimitlessTestInputFiles(execBootstrapFrom, 7, 8, execBootstrapPriority, exit2) +// // createLimitlessTestInputFiles(execBootstrapFrom, 8, 9, execBootstrapPriority, exit10) +// // createLimitlessTestInputFiles(execBootstrapFrom, 9, 10, execBootstrapPriority, exit12) + +// ctxL, stopL := context.WithCancel(context.Background()) + +// go runController(ctxL, confL) + +// // Give one sec, for the test to complete +// <-time.After(4 * time.Second) + +// // Shutdown the controller +// stopL() + +// expectedStructure := []struct { +// Path []string +// Entries [][]string +// }{ +// { +// Path: []string{confL.ExecBootstrap.DirFrom(0)}, +// Entries: [][]string{}, // all files should be processed +// }, +// } + +// for _, dirVal := range expectedStructure { +// for i, dirPath := range dirVal.Path { +// dir, err := os.Open(dirPath) +// require.NoErrorf(t, err, "dir %v", dirPath) +// filesFound, err := dir.Readdirnames(-1) +// require.NoErrorf(t, err, "dir %v", dirPath) +// slices.Sort(filesFound) +// assert.Equalf(t, dirVal.Entries[i], filesFound, "dir %v", dirVal.Path) +// } +// } +// } From ee60b9d10b48a7f2f763c12efb1203a7667c1481 Mon Sep 17 00:00:00 2001 From: srinathln7 Date: Sun, 16 Feb 2025 08:45:44 +0000 Subject: [PATCH 39/48] commit progress in limitless controller testing --- .../cmd/controller/controller/controller.go | 7 +- .../controller/controller_limitless_test.go | 157 +++++++++++------- .../controller/fs_watcher_limitless_test.go | 147 ++++++++-------- prover/cmd/controller/controller/jobs.go | 3 +- prover/cmd/controller/controller/tmp_test.go | 98 +++++++++++ 5 files changed, 279 insertions(+), 133 deletions(-) create mode 100644 prover/cmd/controller/controller/tmp_test.go diff --git a/prover/cmd/controller/controller/controller.go b/prover/cmd/controller/controller/controller.go index 68cd3de78..0299fd1df 100644 --- a/prover/cmd/controller/controller/controller.go +++ b/prover/cmd/controller/controller/controller.go @@ -81,7 +81,12 @@ func runController(ctx context.Context, cfg *config.Config) { case status.ExitCode == CodeSuccess: handleSuccess(job, cfg, status, cLog) // Defer to the large prover - case job.Def.Name == jobNameExecution && isIn(status.ExitCode, cfg.Controller.DeferToOtherLargeCodes): + case job.Def.Name == jobNameExecution && isIn(status.ExitCode, cfg.Controller.DeferToOtherLargeCodes) || + job.Def.Name == jobExecBootstrap && isIn(status.ExitCode, cfg.Controller.DeferToOtherLargeCodes) || + job.Def.Name == jobExecGL && isIn(status.ExitCode, cfg.Controller.DeferToOtherLargeCodes) || + job.Def.Name == jobExecRndBeacon && isIn(status.ExitCode, cfg.Controller.DeferToOtherLargeCodes) || + job.Def.Name == jobExecLPP && isIn(status.ExitCode, cfg.Controller.DeferToOtherLargeCodes) || + job.Def.Name == jobExecCongolomeration && isIn(status.ExitCode, cfg.Controller.DeferToOtherLargeCodes): handleDeferToLarge(job, status, cLog) // Failure case default: diff --git a/prover/cmd/controller/controller/controller_limitless_test.go b/prover/cmd/controller/controller/controller_limitless_test.go index 4463f58b0..490a3abd2 100644 --- a/prover/cmd/controller/controller/controller_limitless_test.go +++ b/prover/cmd/controller/controller/controller_limitless_test.go @@ -1,62 +1,99 @@ package controller -// func TestLimitlessRun(t *testing.T) { -// var ( -// exit0 int = 0 -// // exit2 int = 2 -// // exit10 int = 10 -// // exit12 int = 12 -// // exit77 int = 77 -// // exit137 int = 137 -// ) - -// _, confL := setupLimitlessFsTest(t) - -// // Dirs -// execBootstrapFrom := []string{confL.ExecBootstrap.DirFrom(0)} - -// // Populate the filesystem with job files - -// // Bootstrap -// createLimitlessTestInputFiles(execBootstrapFrom, 0, 1, Bootstrap, exit0) -// // createLimitlessTestInputFiles(execBootstrapFrom, 1, 2, execBootstrapPriority, exit12, forLarge) -// // createLimitlessTestInputFiles(execBootstrapFrom, 2, 3, execBootstrapPriority, exit77) -// // createLimitlessTestInputFiles(execBootstrapFrom, 3, 4, execBootstrapPriority, exit77, forLarge) -// // createLimitlessTestInputFiles(execBootstrapFrom, 4, 5, execBootstrapPriority, exit137) -// // createLimitlessTestInputFiles(execBootstrapFrom, 5, 6, execBootstrapPriority, exit137, forLarge) -// // createLimitlessTestInputFiles(execBootstrapFrom, 6, 7, execBootstrapPriority, exit2) -// // createLimitlessTestInputFiles(execBootstrapFrom, 7, 8, execBootstrapPriority, exit2) -// // createLimitlessTestInputFiles(execBootstrapFrom, 8, 9, execBootstrapPriority, exit10) -// // createLimitlessTestInputFiles(execBootstrapFrom, 9, 10, execBootstrapPriority, exit12) - -// ctxL, stopL := context.WithCancel(context.Background()) - -// go runController(ctxL, confL) - -// // Give one sec, for the test to complete -// <-time.After(4 * time.Second) - -// // Shutdown the controller -// stopL() - -// expectedStructure := []struct { -// Path []string -// Entries [][]string -// }{ -// { -// Path: []string{confL.ExecBootstrap.DirFrom(0)}, -// Entries: [][]string{}, // all files should be processed -// }, -// } - -// for _, dirVal := range expectedStructure { -// for i, dirPath := range dirVal.Path { -// dir, err := os.Open(dirPath) -// require.NoErrorf(t, err, "dir %v", dirPath) -// filesFound, err := dir.Readdirnames(-1) -// require.NoErrorf(t, err, "dir %v", dirPath) -// slices.Sort(filesFound) -// assert.Equalf(t, dirVal.Entries[i], filesFound, "dir %v", dirVal.Path) -// } -// } -// } +import ( + "context" + "os" + "testing" + "time" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "golang.org/x/exp/slices" +) + +func TestLimitlessRun(t *testing.T) { + var ( + exit0 int = 0 + // exit2 int = 2 + // exit10 int = 10 + exit12 int = 12 + exit77 int = 77 + // exit137 int = 137 + ) + + confM, confL := setupLimitlessFsTest(t) + + // Dirs + execBootstrapFrom := []string{confL.ExecBootstrap.DirFrom(0)} + + // Populate the filesystem with job files + + // Bootstrap + createLimitlessTestInputFiles(execBootstrapFrom, 0, 1, execBootstrapPriority, exit0) + createLimitlessTestInputFiles(execBootstrapFrom, 1, 2, execBootstrapPriority, exit12, forLarge) + createLimitlessTestInputFiles(execBootstrapFrom, 2, 3, execBootstrapPriority, exit77) + // createLimitlessTestInputFiles(execBootstrapFrom, 3, 4, execBootstrapPriority, exit77, forLarge) + // createLimitlessTestInputFiles(execBootstrapFrom, 4, 5, execBootstrapPriority, exit137) + // createLimitlessTestInputFiles(execBootstrapFrom, 5, 6, execBootstrapPriority, exit137, forLarge) + // createLimitlessTestInputFiles(execBootstrapFrom, 6, 7, execBootstrapPriority, exit2) + // createLimitlessTestInputFiles(execBootstrapFrom, 7, 8, execBootstrapPriority, exit2) + // createLimitlessTestInputFiles(execBootstrapFrom, 8, 9, execBootstrapPriority, exit10) + // createLimitlessTestInputFiles(execBootstrapFrom, 9, 10, execBootstrapPriority, exit12) + + ctxM, stopM := context.WithCancel(context.Background()) + ctxL, stopL := context.WithCancel(context.Background()) + + go runController(ctxM, confM) + go runController(ctxL, confL) + + // For Debug mode only + // runController(ctxL, confL) + + // Wait for a few secs, for the test to complete + <-time.After(2 * time.Second) + + // Shutdown the controller + stopM() + stopL() + + expectedStructure := []struct { + Path string + Entries []string + }{ + { + Path: confL.ExecBootstrap.DirFrom(0), + Entries: []string{}, // all files should be processed + }, + { + Path: confL.ExecBootstrap.DirDone(0), + Entries: []string{ + "0-1-etv0.1.2-stv1.2.3-getZkProof.json.success", + "1-2-etv0.1.2-stv1.2.3-getZkProof.json.large.success", + "2-3-etv0.1.2-stv1.2.3-getZkProof.json.failure.code_67", + }, + }, + { + Path: confL.ExecBootstrap.DirTo(0), + Entries: []string{ + "0-1-etv0.1.2-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json", + "1-2-etv0.1.2-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json", + }, + }, + { + Path: confL.ExecBootstrap.DirTo(1), + Entries: []string{ + "0-1-etv0.1.2-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json", + "1-2-etv0.1.2-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json", + }, + }, + } + + for _, dirVal := range expectedStructure { + dir, err := os.Open(dirVal.Path) + require.NoErrorf(t, err, "dir %v", dirVal.Path) + filesFound, err := dir.Readdirnames(-1) + require.NoErrorf(t, err, "dir %v", dirVal.Path) + slices.Sort(filesFound) + assert.Equalf(t, dirVal.Entries, filesFound, "dir %v", dirVal.Path) + } +} diff --git a/prover/cmd/controller/controller/fs_watcher_limitless_test.go b/prover/cmd/controller/controller/fs_watcher_limitless_test.go index c12e41364..5005bf422 100644 --- a/prover/cmd/controller/controller/fs_watcher_limitless_test.go +++ b/prover/cmd/controller/controller/fs_watcher_limitless_test.go @@ -15,11 +15,11 @@ import ( ) const ( - Bootstrap int = iota - GL - RndBeacon - LPP - Conglomeration + execBootstrapPriority int = iota + execGLPriority + execRndBeaconPriority + execLPPPriority + execConglomerationPriority ) func TestLimitlessProverFileWatcherL(t *testing.T) { @@ -48,27 +48,27 @@ func TestLimitlessProverFileWatcherL(t *testing.T) { Skip bool }{ { - FName: createLimitlessTestInputFiles(execBootstrapFrom, 0, 1, Bootstrap, exitCode), + FName: createLimitlessTestInputFiles(execBootstrapFrom, 0, 1, execBootstrapPriority, exitCode), }, { - FName: createLimitlessTestInputFiles(execGLFrom, 0, 1, GL, exitCode), + FName: createLimitlessTestInputFiles(execGLFrom, 0, 1, execGLPriority, exitCode), }, { - FName: createLimitlessTestInputFiles(execRndBeaconFrom, 0, 1, RndBeacon, exitCode), + FName: createLimitlessTestInputFiles(execRndBeaconFrom, 0, 1, execRndBeaconPriority, exitCode), }, { - FName: createLimitlessTestInputFiles(execLPPFrom, 0, 1, LPP, exitCode), + FName: createLimitlessTestInputFiles(execLPPFrom, 0, 1, execLPPPriority, exitCode), }, { - FName: createLimitlessTestInputFiles(execConglomerationFrom, 0, 1, Conglomeration, exitCode), + FName: createLimitlessTestInputFiles(execConglomerationFrom, 0, 1, execConglomerationPriority, exitCode), }, { Skip: true, // not large - FName: createLimitlessTestInputFiles(execConglomerationFrom, 0, 1, Conglomeration, exitCode), + FName: createLimitlessTestInputFiles(execConglomerationFrom, 0, 1, execConglomerationPriority, exitCode), }, { Skip: true, // wrong dir - FName: createLimitlessTestInputFiles(execBootstrapFrom, 0, 1, Conglomeration, exitCode), + FName: createLimitlessTestInputFiles(execBootstrapFrom, 0, 1, execConglomerationPriority, exitCode), }, } @@ -117,11 +117,10 @@ func setupLimitlessFsTest(t *testing.T) (confM, confL *config.Config) { execConglomeration = "execution" ) - /* - // Create a configuration using temporary directories - // Defines three command templates for different types of jobs. - // These templates will be used to create shell commands for the worker processes. - cmd := ` + // Create a configuration using temporary directories + // Defines three command templates for different types of jobs. + // These templates will be used to create shell commands for the worker processes. + cmd := ` /bin/sh {{index .InFile 0}} CODE=$? if [ $CODE -eq 0 ]; then @@ -129,7 +128,7 @@ func setupLimitlessFsTest(t *testing.T) (confM, confL *config.Config) { fi exit $CODE ` - cmdLarge := ` + cmdLarge := ` /bin/sh {{index .InFile 0}} CODE=$? CODE=$(($CODE - 12)) @@ -139,7 +138,7 @@ func setupLimitlessFsTest(t *testing.T) (confM, confL *config.Config) { exit $CODE ` - cmdLargeInternal := ` + cmdLargeInternal := ` /bin/sh {{index .InFile 0}} CODE=$? CODE=$(($CODE - 10)) @@ -148,55 +147,55 @@ func setupLimitlessFsTest(t *testing.T) (confM, confL *config.Config) { fi exit $CODE ` - */ - // Create a configuration using temporary directories - // Defines three command templates for different types of jobs. - // These templates will be used to create shell commands for the worker processes. - cmd := ` - for infile in {{range .InFile}} {{.}} {{end}}; do - /bin/sh $infile - CODE=$? - if [ $CODE -ne 0 ]; then - exit $CODE - fi - done - for outfile in {{range .OutFile}} {{.}} {{end}}; do - touch $outfile - done - exit 0 - ` - - cmdLarge := ` - for infile in {{range .InFile}} {{.}} {{end}}; do - /bin/sh $infile - CODE=$? - CODE=$(($CODE - 12)) - if [ $CODE -ne 0 ]; then - exit $CODE - fi - done - for outfile in {{range .OutFile}} {{.}} {{end}}; do - touch $outfile - done - exit 0 - ` + /* + // Create a configuration using temporary directories + // Defines three command templates for different types of jobs. + // These templates will be used to create shell commands for the worker processes. + cmd := ` + for infile in {{range .InFile}} {{.}} {{end}}; do + /bin/sh $infile + CODE=$? + if [ $CODE -ne 0 ]; then + exit $CODE + fi + done + for outfile in {{range .OutFile}} {{.}} {{end}}; do + touch $outfile + done + exit 0 + ` - cmdLargeInternal := ` - for infile in {{range .InFile}} {{.}} {{end}}; do - /bin/sh $infile - CODE=$? - CODE=$(($CODE - 10)) - if [ $CODE -ne 0]; then - exit $CODE - fi - done - for outfile in {{range .OutFile}} {{.}} {{end}}; do - touch $outfile - done - exit 0 - ` + cmdLarge := ` + for infile in {{range .InFile}} {{.}} {{end}}; do + /bin/sh $infile + CODE=$? + CODE=$(($CODE - 12)) + if [ $CODE -ne 0 ]; then + exit $CODE + fi + done + for outfile in {{range .OutFile}} {{.}} {{end}}; do + touch $outfile + done + exit 0 + ` + cmdLargeInternal := ` + for infile in {{range .InFile}} {{.}} {{end}}; do + /bin/sh $infile + CODE=$? + CODE=$(($CODE - 10)) + if [ $CODE -ne 0]; then + exit $CODE + fi + done + for outfile in {{range .OutFile}} {{.}} {{end}}; do + touch $outfile + done + exit 0 + ` + */ // For a prover M confM = &config.Config{ Version: "0.2.4", @@ -278,7 +277,13 @@ func setupLimitlessFsTest(t *testing.T) (confM, confL *config.Config) { confL = &_confL confL.Controller.LocalID = proverL confL.Controller.WorkerCmdLarge = cmdLarge - confL.Execution.CanRunFullLarge = true + + // Allow Limitless job to run in large mode + confL.ExecBootstrap.CanRunFullLarge = true + confL.ExecGL.CanRunFullLarge = true + confL.ExecRndBeacon.CanRunFullLarge = true + confL.ExecLPP.CanRunFullLarge = true + confL.ExecConglomeration.CanRunFullLarge = true // ensure the template are parsed confM.Controller.WorkerCmdTmpl = template.Must(template.New("worker").Parse(confM.Controller.WorkerCmd)) @@ -348,16 +353,16 @@ func createLimitlessTestInputFiles( // the job definition. var fmtStrArr []string switch jobType { - case Bootstrap: + case execBootstrapPriority: fmtStrArr = []string{"%v-%v-etv0.1.2-stv1.2.3-getZkProof.json"} - case GL: + case execGLPriority: fmtStrArr = []string{"%v-%v-etv0.1.2-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json"} - case RndBeacon: + case execRndBeaconPriority: fmtStrArr = []string{"%v-%v-etv0.1.2-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json", "%v-%v-etv0.1.2-stv1.2.3-getZkProof_GL_RndBeacon.json"} - case LPP: + case execLPPPriority: fmtStrArr = []string{"%v-%v-etv0.1.2-stv1.2.3-getZkProof_RndBeacon.json"} - case Conglomeration: + case execConglomerationPriority: fmtStrArr = []string{"%v-%v-etv0.1.2-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json", "%v-%v-etv0.1.2-stv1.2.3-getZkProof_GL.json", "%v-%v-etv0.1.2-stv1.2.3-getZkProof_LPP.json"} diff --git a/prover/cmd/controller/controller/jobs.go b/prover/cmd/controller/controller/jobs.go index 4e0140ea2..b072b7feb 100644 --- a/prover/cmd/controller/controller/jobs.go +++ b/prover/cmd/controller/controller/jobs.go @@ -45,6 +45,8 @@ type Job struct { // file. type OutputFileResource struct { Job Job + + // TODO: Remove this attribute. Not required and change the regex patterns defined in the job definition Idx int } @@ -123,7 +125,6 @@ func (j *Job) ResponseFile(opIdx int) (s string, err error) { w := &strings.Builder{} err = j.Def.OutputFileTmpl[opIdx].Execute(w, OutputFileResource{ Job: *j, - Idx: opIdx, }) if err != nil { return "", err diff --git a/prover/cmd/controller/controller/tmp_test.go b/prover/cmd/controller/controller/tmp_test.go new file mode 100644 index 000000000..f033a8814 --- /dev/null +++ b/prover/cmd/controller/controller/tmp_test.go @@ -0,0 +1,98 @@ +package controller + +import ( + "context" + "os" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "golang.org/x/exp/slices" +) + +func TestTmpLimitlessRun(t *testing.T) { + var ( + // exit0 int = 0 + // exit2 int = 2 + // exit10 int = 10 + // exit12 int = 12 + exit77 int = 77 + // exit137 int = 137 + ) + + _, confL := setupLimitlessFsTest(t) + + // Dirs + execBootstrapFrom := []string{confL.ExecBootstrap.DirFrom(0)} + + // Populate the filesystem with job files + + // Bootstrap + // createLimitlessTestInputFiles(execBootstrapFrom, 0, 1, execBootstrapPriority, exit0) + // createLimitlessTestInputFiles(execBootstrapFrom, 1, 2, execBootstrapPriority, exit12, forLarge) + createLimitlessTestInputFiles(execBootstrapFrom, 2, 3, execBootstrapPriority, exit77) + // createLimitlessTestInputFiles(execBootstrapFrom, 3, 4, execBootstrapPriority, exit77, forLarge) + // createLimitlessTestInputFiles(execBootstrapFrom, 4, 5, execBootstrapPriority, exit137) + // createLimitlessTestInputFiles(execBootstrapFrom, 5, 6, execBootstrapPriority, exit137, forLarge) + // createLimitlessTestInputFiles(execBootstrapFrom, 6, 7, execBootstrapPriority, exit2) + // createLimitlessTestInputFiles(execBootstrapFrom, 7, 8, execBootstrapPriority, exit2) + // createLimitlessTestInputFiles(execBootstrapFrom, 8, 9, execBootstrapPriority, exit10) + // createLimitlessTestInputFiles(execBootstrapFrom, 9, 10, execBootstrapPriority, exit12) + + // ctxM, stopM := context.WithCancel(context.Background()) + ctxL, stopL := context.WithCancel(context.Background()) + + // go runController(ctxM, confM) + // go runController(ctxL, confL) + + // For Debug mode only + runController(ctxL, confL) + + // Wait for a few secs, for the test to complete + // <-time.After(2 * time.Second) + + // Shutdown the controller + // stopM() + stopL() + + expectedStructure := []struct { + Path string + Entries []string + }{ + { + Path: confL.ExecBootstrap.DirFrom(0), + Entries: []string{}, // all files should be processed + }, + { + Path: confL.ExecBootstrap.DirDone(0), + Entries: []string{ + // "0-1-etv0.1.2-stv1.2.3-getZkProof.json.success", + // "1-2-etv0.1.2-stv1.2.3-getZkProof.json.large.success", + "2-3-etv0.1.2-stv1.2.3-getZkProof.json.failure.code_67", + }, + }, + // { + // Path: confL.ExecBootstrap.DirTo(0), + // Entries: []string{ + // "0-1-etv0.1.2-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json", + // "1-2-etv0.1.2-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json", + // }, + // }, + // { + // Path: confL.ExecBootstrap.DirTo(1), + // Entries: []string{ + // "0-1-etv0.1.2-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json", + // "1-2-etv0.1.2-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json", + // }, + // }, + } + + for _, dirVal := range expectedStructure { + dir, err := os.Open(dirVal.Path) + require.NoErrorf(t, err, "dir %v", dirVal.Path) + filesFound, err := dir.Readdirnames(-1) + require.NoErrorf(t, err, "dir %v", dirVal.Path) + slices.Sort(filesFound) + assert.Equalf(t, dirVal.Entries, filesFound, "dir %v", dirVal.Path) + } +} From 11cdcd48bed497e8cf58a7040198d0c8d779dc50 Mon Sep 17 00:00:00 2001 From: srinathln7 Date: Mon, 17 Feb 2025 06:56:12 +0000 Subject: [PATCH 40/48] controller unit tests for bootstrap successful --- .../controller/controller_limitless_test.go | 39 +++--- .../controller/fs_watcher_limitless_test.go | 111 +++++++++--------- 2 files changed, 79 insertions(+), 71 deletions(-) diff --git a/prover/cmd/controller/controller/controller_limitless_test.go b/prover/cmd/controller/controller/controller_limitless_test.go index 490a3abd2..ace145227 100644 --- a/prover/cmd/controller/controller/controller_limitless_test.go +++ b/prover/cmd/controller/controller/controller_limitless_test.go @@ -13,12 +13,12 @@ import ( func TestLimitlessRun(t *testing.T) { var ( - exit0 int = 0 - // exit2 int = 2 - // exit10 int = 10 - exit12 int = 12 - exit77 int = 77 - // exit137 int = 137 + exit0 int = 0 + exit2 int = 2 + exit10 int = 10 + exit12 int = 12 + exit77 int = 77 + exit137 int = 137 ) confM, confL := setupLimitlessFsTest(t) @@ -32,13 +32,13 @@ func TestLimitlessRun(t *testing.T) { createLimitlessTestInputFiles(execBootstrapFrom, 0, 1, execBootstrapPriority, exit0) createLimitlessTestInputFiles(execBootstrapFrom, 1, 2, execBootstrapPriority, exit12, forLarge) createLimitlessTestInputFiles(execBootstrapFrom, 2, 3, execBootstrapPriority, exit77) - // createLimitlessTestInputFiles(execBootstrapFrom, 3, 4, execBootstrapPriority, exit77, forLarge) - // createLimitlessTestInputFiles(execBootstrapFrom, 4, 5, execBootstrapPriority, exit137) - // createLimitlessTestInputFiles(execBootstrapFrom, 5, 6, execBootstrapPriority, exit137, forLarge) - // createLimitlessTestInputFiles(execBootstrapFrom, 6, 7, execBootstrapPriority, exit2) - // createLimitlessTestInputFiles(execBootstrapFrom, 7, 8, execBootstrapPriority, exit2) - // createLimitlessTestInputFiles(execBootstrapFrom, 8, 9, execBootstrapPriority, exit10) - // createLimitlessTestInputFiles(execBootstrapFrom, 9, 10, execBootstrapPriority, exit12) + createLimitlessTestInputFiles(execBootstrapFrom, 3, 4, execBootstrapPriority, exit77, forLarge) + createLimitlessTestInputFiles(execBootstrapFrom, 4, 5, execBootstrapPriority, exit137) + createLimitlessTestInputFiles(execBootstrapFrom, 5, 6, execBootstrapPriority, exit137, forLarge) + createLimitlessTestInputFiles(execBootstrapFrom, 6, 7, execBootstrapPriority, exit2) + createLimitlessTestInputFiles(execBootstrapFrom, 7, 8, execBootstrapPriority, exit2) + createLimitlessTestInputFiles(execBootstrapFrom, 8, 9, execBootstrapPriority, exit10) + createLimitlessTestInputFiles(execBootstrapFrom, 9, 10, execBootstrapPriority, exit12) ctxM, stopM := context.WithCancel(context.Background()) ctxL, stopL := context.WithCancel(context.Background()) @@ -50,7 +50,7 @@ func TestLimitlessRun(t *testing.T) { // runController(ctxL, confL) // Wait for a few secs, for the test to complete - <-time.After(2 * time.Second) + <-time.After(4 * time.Second) // Shutdown the controller stopM() @@ -70,6 +70,13 @@ func TestLimitlessRun(t *testing.T) { "0-1-etv0.1.2-stv1.2.3-getZkProof.json.success", "1-2-etv0.1.2-stv1.2.3-getZkProof.json.large.success", "2-3-etv0.1.2-stv1.2.3-getZkProof.json.failure.code_67", + "3-4-etv0.1.2-stv1.2.3-getZkProof.json.large.failure.code_65", + "4-5-etv0.1.2-stv1.2.3-getZkProof.json.large.failure.code_125", + "5-6-etv0.1.2-stv1.2.3-getZkProof.json.large.failure.code_125", + "6-7-etv0.1.2-stv1.2.3-getZkProof.json.failure.code_2", + "7-8-etv0.1.2-stv1.2.3-getZkProof.json.failure.code_2", + "8-9-etv0.1.2-stv1.2.3-getZkProof.json.success", + "9-10-etv0.1.2-stv1.2.3-getZkProof.json.large.success", }, }, { @@ -77,6 +84,8 @@ func TestLimitlessRun(t *testing.T) { Entries: []string{ "0-1-etv0.1.2-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json", "1-2-etv0.1.2-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json", + "8-9-etv0.1.2-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json", + "9-10-etv0.1.2-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json", }, }, { @@ -84,6 +93,8 @@ func TestLimitlessRun(t *testing.T) { Entries: []string{ "0-1-etv0.1.2-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json", "1-2-etv0.1.2-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json", + "8-9-etv0.1.2-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json", + "9-10-etv0.1.2-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json", }, }, } diff --git a/prover/cmd/controller/controller/fs_watcher_limitless_test.go b/prover/cmd/controller/controller/fs_watcher_limitless_test.go index 5005bf422..a725223d2 100644 --- a/prover/cmd/controller/controller/fs_watcher_limitless_test.go +++ b/prover/cmd/controller/controller/fs_watcher_limitless_test.go @@ -117,85 +117,82 @@ func setupLimitlessFsTest(t *testing.T) (confM, confL *config.Config) { execConglomeration = "execution" ) - // Create a configuration using temporary directories - // Defines three command templates for different types of jobs. - // These templates will be used to create shell commands for the worker processes. + /* + // Create a configuration using temporary directories + // Defines three command templates for different types of jobs. + // These templates will be used to create shell commands for the worker processes. + cmd := ` + /bin/sh {{index .InFile 0}} + CODE=$? + if [ $CODE -eq 0 ]; then + touch {{index .OutFile 0}} + touch {{index .OutFile 1}} + fi + exit $CODE + ` + cmdLarge := ` + /bin/sh {{index .InFile 0}} + CODE=$? + CODE=$(($CODE - 12)) + if [ $CODE -eq 0 ]; then + touch {{index .OutFile 0}} + touch {{index .OutFile 1}} + fi + exit $CODE + ` + + cmdLargeInternal := ` + /bin/sh {{index .InFile 0}} + CODE=$? + CODE=$(($CODE - 10)) + if [ $CODE -eq 0 ]; then + touch {{index .OutFile 0}} + touch {{index .OutFile 1}} + fi + exit $CODE + ` + */ + cmd := ` - /bin/sh {{index .InFile 0}} + {{- range .InFile }} + /bin/sh {{ . }} + {{- end }} CODE=$? if [ $CODE -eq 0 ]; then - touch {{index .OutFile 0}} + {{- range .OutFile }} + touch {{ . }} + {{- end }} fi exit $CODE ` cmdLarge := ` - /bin/sh {{index .InFile 0}} + {{- range .InFile }} + /bin/sh {{ . }} + {{- end }} CODE=$? CODE=$(($CODE - 12)) if [ $CODE -eq 0 ]; then - touch {{index .OutFile 0}} + {{- range .OutFile }} + touch {{ . }} + {{- end }} fi exit $CODE ` cmdLargeInternal := ` - /bin/sh {{index .InFile 0}} + {{- range .InFile }} + /bin/sh {{ . }} + {{- end }} CODE=$? CODE=$(($CODE - 10)) if [ $CODE -eq 0 ]; then - touch {{index .OutFile 0}} + {{- range .OutFile }} + touch {{ . }} + {{- end }} fi exit $CODE ` - /* - // Create a configuration using temporary directories - // Defines three command templates for different types of jobs. - // These templates will be used to create shell commands for the worker processes. - cmd := ` - for infile in {{range .InFile}} {{.}} {{end}}; do - /bin/sh $infile - CODE=$? - if [ $CODE -ne 0 ]; then - exit $CODE - fi - done - for outfile in {{range .OutFile}} {{.}} {{end}}; do - touch $outfile - done - exit 0 - ` - - cmdLarge := ` - for infile in {{range .InFile}} {{.}} {{end}}; do - /bin/sh $infile - CODE=$? - CODE=$(($CODE - 12)) - if [ $CODE -ne 0 ]; then - exit $CODE - fi - done - for outfile in {{range .OutFile}} {{.}} {{end}}; do - touch $outfile - done - exit 0 - ` - - cmdLargeInternal := ` - for infile in {{range .InFile}} {{.}} {{end}}; do - /bin/sh $infile - CODE=$? - CODE=$(($CODE - 10)) - if [ $CODE -ne 0]; then - exit $CODE - fi - done - for outfile in {{range .OutFile}} {{.}} {{end}}; do - touch $outfile - done - exit 0 - ` - */ // For a prover M confM = &config.Config{ Version: "0.2.4", From 6c02a5c79ecf4ebba3dca55a60a74da3f821a1e0 Mon Sep 17 00:00:00 2001 From: srinathln7 Date: Mon, 17 Feb 2025 10:03:13 +0000 Subject: [PATCH 41/48] (tests) ctrler all unit test pass --- .../controller/controller_limitless_test.go | 2 +- .../controller/fs_watcher_limitless_test.go | 85 ++++++++++++++-- prover/cmd/controller/controller/jobs.go | 19 ++-- prover/cmd/controller/controller/tmp_test.go | 98 ------------------- 4 files changed, 88 insertions(+), 116 deletions(-) delete mode 100644 prover/cmd/controller/controller/tmp_test.go diff --git a/prover/cmd/controller/controller/controller_limitless_test.go b/prover/cmd/controller/controller/controller_limitless_test.go index ace145227..f9c32a522 100644 --- a/prover/cmd/controller/controller/controller_limitless_test.go +++ b/prover/cmd/controller/controller/controller_limitless_test.go @@ -46,7 +46,7 @@ func TestLimitlessRun(t *testing.T) { go runController(ctxM, confM) go runController(ctxL, confL) - // For Debug mode only + // For DEBUG TEST mode only // runController(ctxL, confL) // Wait for a few secs, for the test to complete diff --git a/prover/cmd/controller/controller/fs_watcher_limitless_test.go b/prover/cmd/controller/controller/fs_watcher_limitless_test.go index a725223d2..49306d858 100644 --- a/prover/cmd/controller/controller/fs_watcher_limitless_test.go +++ b/prover/cmd/controller/controller/fs_watcher_limitless_test.go @@ -22,10 +22,81 @@ const ( execConglomerationPriority ) +func TestLimitlessProverFileWatcherM(t *testing.T) { + confM, _ := setupLimitlessFsTest(t) + + // We are not interested in the exit code here + exitCode := 0 + + // Create a list of files for each job type + execBootstrapFrom := []string{confM.ExecBootstrap.DirFrom(0)} + execGLFrom := []string{confM.ExecGL.DirFrom(0)} + execRndBeaconFrom := []string{ + confM.ExecRndBeacon.DirFrom(0), + confM.ExecRndBeacon.DirFrom(1), + } + execLPPFrom := []string{confM.ExecLPP.DirFrom(0)} + execConglomerationFrom := []string{ + confM.ExecConglomeration.DirFrom(0), + confM.ExecConglomeration.DirFrom(1), + confM.ExecConglomeration.DirFrom(2), + } + + // The jobs, declared in the order in which they are expected to be found + // NOTE: It is important to test for the same starting and ending block ranges + expectedFNames := []struct { + FName []string + Skip bool + }{ + { + FName: createLimitlessTestInputFiles(execBootstrapFrom, 0, 1, execBootstrapPriority, exitCode), + }, + { + FName: createLimitlessTestInputFiles(execGLFrom, 0, 1, execGLPriority, exitCode), + }, + { + FName: createLimitlessTestInputFiles(execRndBeaconFrom, 0, 1, execRndBeaconPriority, exitCode), + }, + { + FName: createLimitlessTestInputFiles(execLPPFrom, 0, 1, execLPPPriority, exitCode), + }, + { + FName: createLimitlessTestInputFiles(execConglomerationFrom, 0, 1, execConglomerationPriority, exitCode), + }, + { + Skip: true, // not large + FName: createLimitlessTestInputFiles(execConglomerationFrom, 0, 1, execConglomerationPriority, exitCode), + }, + { + Skip: true, // wrong dir + FName: createLimitlessTestInputFiles(execBootstrapFrom, 0, 1, execConglomerationPriority, exitCode), + }, + } + + fw := NewFsWatcher(confM) + for _, f := range expectedFNames { + if f.Skip { + continue + } + t.Logf("Looking for job with file: %s", f.FName) + found := fw.GetBest() + t.Logf("Found job: %+v", found) + if found == nil { + t.Logf("Did not find the job for file: %s", f.FName) + } + if assert.NotNil(t, found, "did not find the job") { + assert.Equal(t, f.FName, found.OriginalFile) + } + } + assert.Nil(t, fw.GetBest(), "the queue should be empty now") + +} + func TestLimitlessProverFileWatcherL(t *testing.T) { _, confL := setupLimitlessFsTest(t) - exitCode := 0 // we are not interested in the exit code here + // We are not interested in the exit code here + exitCode := 0 // Create a list of files for each job type execBootstrapFrom := []string{confL.ExecBootstrap.DirFrom(0)} @@ -48,23 +119,23 @@ func TestLimitlessProverFileWatcherL(t *testing.T) { Skip bool }{ { - FName: createLimitlessTestInputFiles(execBootstrapFrom, 0, 1, execBootstrapPriority, exitCode), + FName: createLimitlessTestInputFiles(execBootstrapFrom, 0, 1, execBootstrapPriority, exitCode, forLarge), }, { - FName: createLimitlessTestInputFiles(execGLFrom, 0, 1, execGLPriority, exitCode), + FName: createLimitlessTestInputFiles(execGLFrom, 0, 1, execGLPriority, exitCode, forLarge), }, { - FName: createLimitlessTestInputFiles(execRndBeaconFrom, 0, 1, execRndBeaconPriority, exitCode), + FName: createLimitlessTestInputFiles(execRndBeaconFrom, 0, 1, execRndBeaconPriority, exitCode, forLarge), }, { - FName: createLimitlessTestInputFiles(execLPPFrom, 0, 1, execLPPPriority, exitCode), + FName: createLimitlessTestInputFiles(execLPPFrom, 0, 1, execLPPPriority, exitCode, forLarge), }, { - FName: createLimitlessTestInputFiles(execConglomerationFrom, 0, 1, execConglomerationPriority, exitCode), + FName: createLimitlessTestInputFiles(execConglomerationFrom, 0, 1, execConglomerationPriority, exitCode, forLarge), }, { Skip: true, // not large - FName: createLimitlessTestInputFiles(execConglomerationFrom, 0, 1, execConglomerationPriority, exitCode), + FName: createLimitlessTestInputFiles(execConglomerationFrom, 4, 5, execConglomerationPriority, exitCode), }, { Skip: true, // wrong dir diff --git a/prover/cmd/controller/controller/jobs.go b/prover/cmd/controller/controller/jobs.go index b072b7feb..bb7a10693 100644 --- a/prover/cmd/controller/controller/jobs.go +++ b/prover/cmd/controller/controller/jobs.go @@ -103,18 +103,8 @@ func (j *Job) InProgressPath(ipIdx int) string { return filepath.Join(j.Def.dirFrom(ipIdx), j.LockedFile[ipIdx]) } -// func (j *Job) InProgressPath() []string { -// dirs := j.Def.dirFrom() -// inProgressPaths := make([]string, len(dirs)) -// for ipIdx := 0; ipIdx < len(inProgressPaths); ipIdx++ { -// inProgressPaths[ipIdx] = filepath.Join(dirs[ipIdx], j.LockedFile[ipIdx]) -// } -// return inProgressPaths -// } - // Returns the name of the output file for the job at the specified index func (j *Job) ResponseFile(opIdx int) (s string, err error) { - // Sanity check if err := j.Def.isValidOutputFileIdx(opIdx); err != nil { return "", err @@ -274,3 +264,12 @@ func intIfRegexpNotNil(r *regexp2.Regexp, s string) int { } return res } + +// func (j *Job) InProgressPath() []string { +// dirs := j.Def.dirFrom() +// inProgressPaths := make([]string, len(dirs)) +// for ipIdx := 0; ipIdx < len(inProgressPaths); ipIdx++ { +// inProgressPaths[ipIdx] = filepath.Join(dirs[ipIdx], j.LockedFile[ipIdx]) +// } +// return inProgressPaths +// } diff --git a/prover/cmd/controller/controller/tmp_test.go b/prover/cmd/controller/controller/tmp_test.go deleted file mode 100644 index f033a8814..000000000 --- a/prover/cmd/controller/controller/tmp_test.go +++ /dev/null @@ -1,98 +0,0 @@ -package controller - -import ( - "context" - "os" - "testing" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" - "golang.org/x/exp/slices" -) - -func TestTmpLimitlessRun(t *testing.T) { - var ( - // exit0 int = 0 - // exit2 int = 2 - // exit10 int = 10 - // exit12 int = 12 - exit77 int = 77 - // exit137 int = 137 - ) - - _, confL := setupLimitlessFsTest(t) - - // Dirs - execBootstrapFrom := []string{confL.ExecBootstrap.DirFrom(0)} - - // Populate the filesystem with job files - - // Bootstrap - // createLimitlessTestInputFiles(execBootstrapFrom, 0, 1, execBootstrapPriority, exit0) - // createLimitlessTestInputFiles(execBootstrapFrom, 1, 2, execBootstrapPriority, exit12, forLarge) - createLimitlessTestInputFiles(execBootstrapFrom, 2, 3, execBootstrapPriority, exit77) - // createLimitlessTestInputFiles(execBootstrapFrom, 3, 4, execBootstrapPriority, exit77, forLarge) - // createLimitlessTestInputFiles(execBootstrapFrom, 4, 5, execBootstrapPriority, exit137) - // createLimitlessTestInputFiles(execBootstrapFrom, 5, 6, execBootstrapPriority, exit137, forLarge) - // createLimitlessTestInputFiles(execBootstrapFrom, 6, 7, execBootstrapPriority, exit2) - // createLimitlessTestInputFiles(execBootstrapFrom, 7, 8, execBootstrapPriority, exit2) - // createLimitlessTestInputFiles(execBootstrapFrom, 8, 9, execBootstrapPriority, exit10) - // createLimitlessTestInputFiles(execBootstrapFrom, 9, 10, execBootstrapPriority, exit12) - - // ctxM, stopM := context.WithCancel(context.Background()) - ctxL, stopL := context.WithCancel(context.Background()) - - // go runController(ctxM, confM) - // go runController(ctxL, confL) - - // For Debug mode only - runController(ctxL, confL) - - // Wait for a few secs, for the test to complete - // <-time.After(2 * time.Second) - - // Shutdown the controller - // stopM() - stopL() - - expectedStructure := []struct { - Path string - Entries []string - }{ - { - Path: confL.ExecBootstrap.DirFrom(0), - Entries: []string{}, // all files should be processed - }, - { - Path: confL.ExecBootstrap.DirDone(0), - Entries: []string{ - // "0-1-etv0.1.2-stv1.2.3-getZkProof.json.success", - // "1-2-etv0.1.2-stv1.2.3-getZkProof.json.large.success", - "2-3-etv0.1.2-stv1.2.3-getZkProof.json.failure.code_67", - }, - }, - // { - // Path: confL.ExecBootstrap.DirTo(0), - // Entries: []string{ - // "0-1-etv0.1.2-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json", - // "1-2-etv0.1.2-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json", - // }, - // }, - // { - // Path: confL.ExecBootstrap.DirTo(1), - // Entries: []string{ - // "0-1-etv0.1.2-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json", - // "1-2-etv0.1.2-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json", - // }, - // }, - } - - for _, dirVal := range expectedStructure { - dir, err := os.Open(dirVal.Path) - require.NoErrorf(t, err, "dir %v", dirVal.Path) - filesFound, err := dir.Readdirnames(-1) - require.NoErrorf(t, err, "dir %v", dirVal.Path) - slices.Sort(filesFound) - assert.Equalf(t, dirVal.Entries, filesFound, "dir %v", dirVal.Path) - } -} From b15ca48c05b477f349ace92838da2078bf885619 Mon Sep 17 00:00:00 2001 From: srinathln7 Date: Mon, 17 Feb 2025 13:30:19 +0000 Subject: [PATCH 42/48] (feat): limitless ctrler all unit tests pass --- .../controller/controller_limitless_test.go | 247 ++++++++++++++++++ 1 file changed, 247 insertions(+) diff --git a/prover/cmd/controller/controller/controller_limitless_test.go b/prover/cmd/controller/controller/controller_limitless_test.go index f9c32a522..8eb824bc5 100644 --- a/prover/cmd/controller/controller/controller_limitless_test.go +++ b/prover/cmd/controller/controller/controller_limitless_test.go @@ -25,6 +25,17 @@ func TestLimitlessRun(t *testing.T) { // Dirs execBootstrapFrom := []string{confL.ExecBootstrap.DirFrom(0)} + execGLFrom := []string{confL.ExecGL.DirFrom(0)} + execRndBeaconFrom := []string{ + confL.ExecRndBeacon.DirFrom(0), + confL.ExecRndBeacon.DirFrom(1), + } + execLPPFrom := []string{confL.ExecLPP.DirFrom(0)} + execConglomerationFrom := []string{ + confL.ExecConglomeration.DirFrom(0), + confL.ExecConglomeration.DirFrom(1), + confL.ExecConglomeration.DirFrom(2), + } // Populate the filesystem with job files @@ -40,6 +51,54 @@ func TestLimitlessRun(t *testing.T) { createLimitlessTestInputFiles(execBootstrapFrom, 8, 9, execBootstrapPriority, exit10) createLimitlessTestInputFiles(execBootstrapFrom, 9, 10, execBootstrapPriority, exit12) + // GL + createLimitlessTestInputFiles(execGLFrom, 0, 1, execGLPriority, exit0) + createLimitlessTestInputFiles(execGLFrom, 1, 2, execGLPriority, exit12, forLarge) + createLimitlessTestInputFiles(execGLFrom, 2, 3, execGLPriority, exit77) + createLimitlessTestInputFiles(execGLFrom, 3, 4, execGLPriority, exit77, forLarge) + createLimitlessTestInputFiles(execGLFrom, 4, 5, execGLPriority, exit137) + createLimitlessTestInputFiles(execGLFrom, 5, 6, execGLPriority, exit137, forLarge) + createLimitlessTestInputFiles(execGLFrom, 6, 7, execGLPriority, exit2) + createLimitlessTestInputFiles(execGLFrom, 7, 8, execGLPriority, exit2) + createLimitlessTestInputFiles(execGLFrom, 8, 9, execGLPriority, exit10) + createLimitlessTestInputFiles(execGLFrom, 9, 10, execGLPriority, exit12) + + // RndBeacon + createLimitlessTestInputFiles(execRndBeaconFrom, 0, 1, execRndBeaconPriority, exit0) + createLimitlessTestInputFiles(execRndBeaconFrom, 1, 2, execRndBeaconPriority, exit12, forLarge) + createLimitlessTestInputFiles(execRndBeaconFrom, 2, 3, execRndBeaconPriority, exit77) + createLimitlessTestInputFiles(execRndBeaconFrom, 3, 4, execRndBeaconPriority, exit77, forLarge) + createLimitlessTestInputFiles(execRndBeaconFrom, 4, 5, execRndBeaconPriority, exit137) + createLimitlessTestInputFiles(execRndBeaconFrom, 5, 6, execRndBeaconPriority, exit137, forLarge) + createLimitlessTestInputFiles(execRndBeaconFrom, 6, 7, execRndBeaconPriority, exit2) + createLimitlessTestInputFiles(execRndBeaconFrom, 7, 8, execRndBeaconPriority, exit2) + createLimitlessTestInputFiles(execRndBeaconFrom, 8, 9, execRndBeaconPriority, exit10) + createLimitlessTestInputFiles(execRndBeaconFrom, 9, 10, execRndBeaconPriority, exit12) + + // LPP + createLimitlessTestInputFiles(execLPPFrom, 0, 1, execLPPPriority, exit0) + createLimitlessTestInputFiles(execLPPFrom, 1, 2, execLPPPriority, exit12, forLarge) + createLimitlessTestInputFiles(execLPPFrom, 2, 3, execLPPPriority, exit77) + createLimitlessTestInputFiles(execLPPFrom, 3, 4, execLPPPriority, exit77, forLarge) + createLimitlessTestInputFiles(execLPPFrom, 4, 5, execLPPPriority, exit137) + createLimitlessTestInputFiles(execLPPFrom, 5, 6, execLPPPriority, exit137, forLarge) + createLimitlessTestInputFiles(execLPPFrom, 6, 7, execLPPPriority, exit2) + createLimitlessTestInputFiles(execLPPFrom, 7, 8, execLPPPriority, exit2) + createLimitlessTestInputFiles(execLPPFrom, 8, 9, execLPPPriority, exit10) + createLimitlessTestInputFiles(execLPPFrom, 9, 10, execLPPPriority, exit12) + + // Conglomeration + createLimitlessTestInputFiles(execConglomerationFrom, 0, 1, execConglomerationPriority, exit0) + createLimitlessTestInputFiles(execConglomerationFrom, 1, 2, execConglomerationPriority, exit12, forLarge) + createLimitlessTestInputFiles(execConglomerationFrom, 2, 3, execConglomerationPriority, exit77) + createLimitlessTestInputFiles(execConglomerationFrom, 3, 4, execConglomerationPriority, exit77, forLarge) + createLimitlessTestInputFiles(execConglomerationFrom, 4, 5, execConglomerationPriority, exit137) + createLimitlessTestInputFiles(execConglomerationFrom, 5, 6, execConglomerationPriority, exit137, forLarge) + createLimitlessTestInputFiles(execConglomerationFrom, 6, 7, execConglomerationPriority, exit2) + createLimitlessTestInputFiles(execConglomerationFrom, 7, 8, execConglomerationPriority, exit2) + createLimitlessTestInputFiles(execConglomerationFrom, 8, 9, execConglomerationPriority, exit10) + createLimitlessTestInputFiles(execConglomerationFrom, 9, 10, execConglomerationPriority, exit12) + ctxM, stopM := context.WithCancel(context.Background()) ctxL, stopL := context.WithCancel(context.Background()) @@ -60,6 +119,7 @@ func TestLimitlessRun(t *testing.T) { Path string Entries []string }{ + // Bootstrap { Path: confL.ExecBootstrap.DirFrom(0), Entries: []string{}, // all files should be processed @@ -97,6 +157,193 @@ func TestLimitlessRun(t *testing.T) { "9-10-etv0.1.2-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json", }, }, + + // GL + { + Path: confL.ExecGL.DirFrom(0), + Entries: []string{}, // all files should be processed + }, + { + Path: confL.ExecGL.DirDone(0), + Entries: []string{ + "0-1-etv0.1.2-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.success", + "1-2-etv0.1.2-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.success", + "2-3-etv0.1.2-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.failure.code_67", + "3-4-etv0.1.2-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.failure.code_65", + "4-5-etv0.1.2-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.failure.code_125", + "5-6-etv0.1.2-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.failure.code_125", + "6-7-etv0.1.2-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.failure.code_2", + "7-8-etv0.1.2-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.failure.code_2", + "8-9-etv0.1.2-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.success", + "9-10-etv0.1.2-stv1.2.3-getZkProof_Bootstrap_GLSubmodule.json.large.success", + }, + }, + { + Path: confL.ExecGL.DirTo(0), + Entries: []string{ + "0-1-etv0.1.2-stv1.2.3-getZkProof_GL_RndBeacon.json", + "1-2-etv0.1.2-stv1.2.3-getZkProof_GL_RndBeacon.json", + "8-9-etv0.1.2-stv1.2.3-getZkProof_GL_RndBeacon.json", + "9-10-etv0.1.2-stv1.2.3-getZkProof_GL_RndBeacon.json", + }, + }, + { + Path: confL.ExecGL.DirTo(1), + Entries: []string{ + "0-1-etv0.1.2-stv1.2.3-getZkProof_GL.json", + "1-2-etv0.1.2-stv1.2.3-getZkProof_GL.json", + "8-9-etv0.1.2-stv1.2.3-getZkProof_GL.json", + "9-10-etv0.1.2-stv1.2.3-getZkProof_GL.json", + }, + }, + + // RndBeacon + { + Path: confL.ExecRndBeacon.DirFrom(0), + Entries: []string{}, // all files should be processed + }, + { + Path: confL.ExecRndBeacon.DirFrom(1), + Entries: []string{}, // all files should be processed + }, + { + Path: confL.ExecRndBeacon.DirDone(0), + Entries: []string{ + "0-1-etv0.1.2-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.success", + "1-2-etv0.1.2-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.success", + "2-3-etv0.1.2-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.failure.code_67", + "3-4-etv0.1.2-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_65", + "4-5-etv0.1.2-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_125", + "5-6-etv0.1.2-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_125", + "6-7-etv0.1.2-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.failure.code_2", + "7-8-etv0.1.2-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.failure.code_2", + "8-9-etv0.1.2-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.success", + "9-10-etv0.1.2-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.success", + }, + }, + { + Path: confL.ExecRndBeacon.DirDone(1), + Entries: []string{ + "0-1-etv0.1.2-stv1.2.3-getZkProof_GL_RndBeacon.json.success", + "1-2-etv0.1.2-stv1.2.3-getZkProof_GL_RndBeacon.json.large.success", + "2-3-etv0.1.2-stv1.2.3-getZkProof_GL_RndBeacon.json.failure.code_67", + "3-4-etv0.1.2-stv1.2.3-getZkProof_GL_RndBeacon.json.large.failure.code_65", + "4-5-etv0.1.2-stv1.2.3-getZkProof_GL_RndBeacon.json.large.failure.code_125", + "5-6-etv0.1.2-stv1.2.3-getZkProof_GL_RndBeacon.json.large.failure.code_125", + "6-7-etv0.1.2-stv1.2.3-getZkProof_GL_RndBeacon.json.failure.code_2", + "7-8-etv0.1.2-stv1.2.3-getZkProof_GL_RndBeacon.json.failure.code_2", + "8-9-etv0.1.2-stv1.2.3-getZkProof_GL_RndBeacon.json.success", + "9-10-etv0.1.2-stv1.2.3-getZkProof_GL_RndBeacon.json.large.success", + }, + }, + { + Path: confL.ExecRndBeacon.DirTo(0), + Entries: []string{ + "0-1-etv0.1.2-stv1.2.3-getZkProof_RndBeacon.json", + "1-2-etv0.1.2-stv1.2.3-getZkProof_RndBeacon.json", + "8-9-etv0.1.2-stv1.2.3-getZkProof_RndBeacon.json", + "9-10-etv0.1.2-stv1.2.3-getZkProof_RndBeacon.json", + }, + }, + + // LPP + { + Path: confL.ExecLPP.DirFrom(0), + Entries: []string{}, // all files should be processed + }, + { + Path: confL.ExecLPP.DirDone(0), + Entries: []string{ + "0-1-etv0.1.2-stv1.2.3-getZkProof_RndBeacon.json.success", + "1-2-etv0.1.2-stv1.2.3-getZkProof_RndBeacon.json.large.success", + "2-3-etv0.1.2-stv1.2.3-getZkProof_RndBeacon.json.failure.code_67", + "3-4-etv0.1.2-stv1.2.3-getZkProof_RndBeacon.json.large.failure.code_65", + "4-5-etv0.1.2-stv1.2.3-getZkProof_RndBeacon.json.large.failure.code_125", + "5-6-etv0.1.2-stv1.2.3-getZkProof_RndBeacon.json.large.failure.code_125", + "6-7-etv0.1.2-stv1.2.3-getZkProof_RndBeacon.json.failure.code_2", + "7-8-etv0.1.2-stv1.2.3-getZkProof_RndBeacon.json.failure.code_2", + "8-9-etv0.1.2-stv1.2.3-getZkProof_RndBeacon.json.success", + "9-10-etv0.1.2-stv1.2.3-getZkProof_RndBeacon.json.large.success", + }, + }, + { + Path: confL.ExecLPP.DirTo(0), + Entries: []string{ + "0-1-etv0.1.2-stv1.2.3-getZkProof_LPP.json", + "1-2-etv0.1.2-stv1.2.3-getZkProof_LPP.json", + "8-9-etv0.1.2-stv1.2.3-getZkProof_LPP.json", + "9-10-etv0.1.2-stv1.2.3-getZkProof_LPP.json", + }, + }, + + // Conglomeration + // (Assumed no of segments=1) + { + Path: confL.ExecConglomeration.DirFrom(0), + Entries: []string{}, // all files should be processed + }, + { + Path: confL.ExecConglomeration.DirFrom(1), + Entries: []string{}, // all files should be processed + }, + { + Path: confL.ExecConglomeration.DirFrom(2), + Entries: []string{}, // all files should be processed + }, + { + Path: confL.ExecConglomeration.DirDone(0), + Entries: []string{ + "0-1-etv0.1.2-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.success", + "1-2-etv0.1.2-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.success", + "2-3-etv0.1.2-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.failure.code_67", + "3-4-etv0.1.2-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_65", + "4-5-etv0.1.2-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_125", + "5-6-etv0.1.2-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.failure.code_125", + "6-7-etv0.1.2-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.failure.code_2", + "7-8-etv0.1.2-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.failure.code_2", + "8-9-etv0.1.2-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.success", + "9-10-etv0.1.2-stv1.2.3-getZkProof_Bootstrap_DistMetadata.json.large.success", + }, + }, + { + Path: confL.ExecConglomeration.DirDone(1), + Entries: []string{ + "0-1-etv0.1.2-stv1.2.3-getZkProof_GL.json.success", + "1-2-etv0.1.2-stv1.2.3-getZkProof_GL.json.large.success", + "2-3-etv0.1.2-stv1.2.3-getZkProof_GL.json.failure.code_67", + "3-4-etv0.1.2-stv1.2.3-getZkProof_GL.json.large.failure.code_65", + "4-5-etv0.1.2-stv1.2.3-getZkProof_GL.json.large.failure.code_125", + "5-6-etv0.1.2-stv1.2.3-getZkProof_GL.json.large.failure.code_125", + "6-7-etv0.1.2-stv1.2.3-getZkProof_GL.json.failure.code_2", + "7-8-etv0.1.2-stv1.2.3-getZkProof_GL.json.failure.code_2", + "8-9-etv0.1.2-stv1.2.3-getZkProof_GL.json.success", + "9-10-etv0.1.2-stv1.2.3-getZkProof_GL.json.large.success", + }, + }, + { + Path: confL.ExecConglomeration.DirDone(2), + Entries: []string{ + "0-1-etv0.1.2-stv1.2.3-getZkProof_LPP.json.success", + "1-2-etv0.1.2-stv1.2.3-getZkProof_LPP.json.large.success", + "2-3-etv0.1.2-stv1.2.3-getZkProof_LPP.json.failure.code_67", + "3-4-etv0.1.2-stv1.2.3-getZkProof_LPP.json.large.failure.code_65", + "4-5-etv0.1.2-stv1.2.3-getZkProof_LPP.json.large.failure.code_125", + "5-6-etv0.1.2-stv1.2.3-getZkProof_LPP.json.large.failure.code_125", + "6-7-etv0.1.2-stv1.2.3-getZkProof_LPP.json.failure.code_2", + "7-8-etv0.1.2-stv1.2.3-getZkProof_LPP.json.failure.code_2", + "8-9-etv0.1.2-stv1.2.3-getZkProof_LPP.json.success", + "9-10-etv0.1.2-stv1.2.3-getZkProof_LPP.json.large.success", + }, + }, + { + Path: confM.ExecConglomeration.DirTo(0), + Entries: []string{ + "0-1-getZkProof.json", + "1-2-getZkProof.json", + "8-9-getZkProof.json", + "9-10-getZkProof.json", + }, + }, } for _, dirVal := range expectedStructure { From 71c6529a82bb2ef8a1b942a4b2d73c44a2a9c188 Mon Sep 17 00:00:00 2001 From: srinathln7 Date: Mon, 17 Feb 2025 13:54:21 +0000 Subject: [PATCH 43/48] (feat): create limitless prover dirs in cmd and unit tests pass for controller --- prover/cmd/controller/controller/command.go | 29 +++++++++++++++++++++ prover/config/constants.go | 3 --- 2 files changed, 29 insertions(+), 3 deletions(-) diff --git a/prover/cmd/controller/controller/command.go b/prover/cmd/controller/controller/command.go index e7a0ce779..9a677d6c2 100644 --- a/prover/cmd/controller/controller/command.go +++ b/prover/cmd/controller/controller/command.go @@ -56,6 +56,35 @@ func cobraControllerRunCmd(c *cobra.Command, args []string) { cfg.Aggregation.DirDone(0), cfg.Aggregation.DirFrom(0), cfg.Aggregation.DirTo(0), + + // Dirs. for Limitless controller + cfg.ExecBootstrap.DirFrom(0), + cfg.ExecBootstrap.DirDone(0), + cfg.ExecBootstrap.DirTo(0), + cfg.ExecBootstrap.DirTo(1), + + cfg.ExecGL.DirFrom(0), + cfg.ExecGL.DirDone(0), + cfg.ExecGL.DirTo(0), + cfg.ExecGL.DirTo(1), + + cfg.ExecRndBeacon.DirFrom(0), + cfg.ExecRndBeacon.DirFrom(1), + cfg.ExecRndBeacon.DirDone(0), + cfg.ExecRndBeacon.DirDone(1), + cfg.ExecRndBeacon.DirTo(0), + + cfg.ExecLPP.DirFrom(0), + cfg.ExecLPP.DirDone(0), + cfg.ExecLPP.DirTo(0), + + cfg.ExecConglomeration.DirFrom(0), + cfg.ExecConglomeration.DirFrom(1), + cfg.ExecConglomeration.DirFrom(2), + cfg.ExecConglomeration.DirDone(0), + cfg.ExecConglomeration.DirDone(1), + cfg.ExecConglomeration.DirDone(2), + cfg.ExecConglomeration.DirTo(0), } for _, dir := range dirs { diff --git a/prover/config/constants.go b/prover/config/constants.go index 4578658ba..6b5dfb632 100644 --- a/prover/config/constants.go +++ b/prover/config/constants.go @@ -17,7 +17,4 @@ const ( // Extension to add in order to defer the job to the large prover LargeSuffix = "large" - - // Limitless prover - NoOfSegments = 1 ) From a74f6b0d142007e73c44d979841ae3949c7d45ff Mon Sep 17 00:00:00 2001 From: srinathln7 Date: Tue, 18 Feb 2025 09:41:07 +0000 Subject: [PATCH 44/48] add placeholders for default dirs --- prover/config/config.go | 16 ---------------- prover/config/config_default.go | 21 ++++++++++++++------- 2 files changed, 14 insertions(+), 23 deletions(-) diff --git a/prover/config/config.go b/prover/config/config.go index 0e047ba12..6d7d4ffa4 100644 --- a/prover/config/config.go +++ b/prover/config/config.go @@ -146,22 +146,6 @@ type Config struct { TracesLimitsLarge TracesLimits `mapstructure:"traces_limits_large" validate:"required"` } -type Conglomeration struct { - GL WithRequestDir `mapstructure:",squash"` - - LPP WithRequestDir `mapstructure:",squash"` - - BootstrapMetadata WithRequestDir `mapstructure:",squash"` - - WithResponseDir `mapstructure:",squash"` - - // ProverMode stores the kind of prover to use. - ProverMode ProverMode `mapstructure:"prover_mode" validate:"required,oneof=dev partial full proofless bench check-only encode-only"` - - // CanRunFullLarge indicates whether the prover is running on a large machine (and can run full large traces). - CanRunFullLarge bool `mapstructure:"can_run_full_large"` -} - func (cfg *Config) Logger() *logrus.Logger { // TODO @gbotrel revisit. return logrus.StandardLogger() diff --git a/prover/config/config_default.go b/prover/config/config_default.go index dbbe17719..03ff56649 100644 --- a/prover/config/config_default.go +++ b/prover/config/config_default.go @@ -30,19 +30,26 @@ func setDefaultValues() { viper.SetDefault("controller.retry_locally_with_large_codes", DefaultRetryLocallyWithLargeCodes) // Set default for cmdTmpl and cmdLargeTmpl - // TODO @gbotrel binary to run prover is hardcoded here. viper.SetDefault("controller.worker_cmd_tmpl", "prover prove --config {{.ConfFile}} --in {{.InFile}} --out {{.OutFile}}") viper.SetDefault("controller.worker_cmd_large_tmpl", "prover prove --config {{.ConfFile}} --in {{.InFile}} --out {{.OutFile}} --large") } func setDefaultPaths() { viper.SetDefault("execution.conflated_traces_dir", "/shared/traces/conflated") - viper.SetDefault("execution.requests_root_dir", "/shared/prover-execution") - viper.SetDefault("blob_decompression.requests_root_dir", "/shared/prover-compression") - viper.SetDefault("aggregation.requests_root_dir", "/shared/prover-aggregation") - - // TODO: @srinathLN7 Add Limitless prover default values - viper.SetDefault("execution_bootstrap.requests_root_dir", "/shared/prover-execution/limitless/bootstrap") + viper.SetDefault("execution.requests_root_dir", []string{"/shared/prover-execution"}) + viper.SetDefault("blob_decompression.requests_root_dir", []string{"/shared/prover-compression"}) + viper.SetDefault("aggregation.requests_root_dir", []string{"/shared/prover-aggregation"}) + viper.SetDefault("execution.responses_root_dir", []string{"/shared/prover-execution"}) + viper.SetDefault("blob_decompression.responses_root_dir", []string{"/shared/prover-compression"}) + viper.SetDefault("aggregation.responses_root_dir", []string{"/shared/prover-aggregation"}) + + // PLACEHOLDER for default request and response dirs - Subjected to change + // TODO @srinathln7: Implement a simple mechanism to introduce the chaining effect here i.e. move the response files + // in response dir from prev. job to req dir of the next job. FIND the optimal way to implement this + viper.SetDefault("execution_bootstrap.requests_root_dir", []string{"/shared/prover-execution/limitless/bootstrap"}) + viper.SetDefault("execution_bootstrap.responses_root_dir", []string{"/shared/prover-execution/limitless/bootstrap/gl", + "/shared/prover-execution/limitless/bootstrap/metadata", + }) } From 4d467b61c039cf25ac08bd8beeeaa3d9f27fdc70 Mon Sep 17 00:00:00 2001 From: srinathln7 Date: Tue, 18 Feb 2025 16:34:12 +0000 Subject: [PATCH 45/48] init run controller --- prover/cmd/controller/controller/command.go | 11 ++ .../controller/fs_watcher_limitless_test.go | 36 ---- prover/config/config-test.toml | 173 ++++++++++++++++++ prover/config/config.go | 2 +- 4 files changed, 185 insertions(+), 37 deletions(-) create mode 100644 prover/config/config-test.toml diff --git a/prover/cmd/controller/controller/command.go b/prover/cmd/controller/controller/command.go index 9a677d6c2..009edfabf 100644 --- a/prover/cmd/controller/controller/command.go +++ b/prover/cmd/controller/controller/command.go @@ -44,6 +44,17 @@ func cobraControllerRunCmd(c *cobra.Command, args []string) { } cfg.Controller.LocalID = fLocalID + // Disable Legacy for testing + cfg.Controller.EnableExecution = true + cfg.Controller.EnableBlobDecompression = false + cfg.Controller.EnableAggregation = false + + cfg.Controller.EnableExecBootstrap = false + cfg.Controller.EnableExecGL = false + cfg.Controller.EnableExecRndBeacon = false + cfg.Controller.EnableExecLPP = false + cfg.Controller.EnableExecConglomeration = false + // TODO @gbotrel @AlexandreBelling check who is responsible for creating the directories // create the sub directories if they do not exist dirs := []string{ diff --git a/prover/cmd/controller/controller/fs_watcher_limitless_test.go b/prover/cmd/controller/controller/fs_watcher_limitless_test.go index 49306d858..8ca7a5627 100644 --- a/prover/cmd/controller/controller/fs_watcher_limitless_test.go +++ b/prover/cmd/controller/controller/fs_watcher_limitless_test.go @@ -188,42 +188,6 @@ func setupLimitlessFsTest(t *testing.T) (confM, confL *config.Config) { execConglomeration = "execution" ) - /* - // Create a configuration using temporary directories - // Defines three command templates for different types of jobs. - // These templates will be used to create shell commands for the worker processes. - cmd := ` - /bin/sh {{index .InFile 0}} - CODE=$? - if [ $CODE -eq 0 ]; then - touch {{index .OutFile 0}} - touch {{index .OutFile 1}} - fi - exit $CODE - ` - cmdLarge := ` - /bin/sh {{index .InFile 0}} - CODE=$? - CODE=$(($CODE - 12)) - if [ $CODE -eq 0 ]; then - touch {{index .OutFile 0}} - touch {{index .OutFile 1}} - fi - exit $CODE - ` - - cmdLargeInternal := ` - /bin/sh {{index .InFile 0}} - CODE=$? - CODE=$(($CODE - 10)) - if [ $CODE -eq 0 ]; then - touch {{index .OutFile 0}} - touch {{index .OutFile 1}} - fi - exit $CODE - ` - */ - cmd := ` {{- range .InFile }} /bin/sh {{ . }} diff --git a/prover/config/config-test.toml b/prover/config/config-test.toml new file mode 100644 index 000000000..b8b88677e --- /dev/null +++ b/prover/config/config-test.toml @@ -0,0 +1,173 @@ +environment = "sepolia" +version = "4.0.0" # TODO @gbotrel hunt all version definitions. +assets_dir = "./prover-assets" +log_level = 4 # TODO @gbotrel will be refactored with new logger. + +[controller] +retry_delays = [0, 1] + + + +[execution] +prover_mode = "full" +conflated_traces_dir = "/home/ubuntu/dummy-test-data/traces/conflated" +requests_root_dir = ["/home/ubuntu/dummy-test-data/prover-execution"] +responses_root_dir = ["/home/ubuntu/dummy-test-data/prover-execution"] + +# dummy-test-data to test limitless prover controller + +[execution_bootstrap] +prover_mode = "full" +conflated_traces_dir = "/home/ubuntu/dummy-test-data/traces/conflated" +requests_root_dir = ["/home/ubuntu/dummy-test-data/prover-execution/limitless"] +responses_root_dir = ["/home/ubuntu/dummy-test-data/prover-execution/limitless/bootstrap-gl", "/home/ubuntu/dummy-test-data/prover-execution/limitless/bootstrap-distmetadata"] + +[execution_gl] +prover_mode = "full" +conflated_traces_dir = "/home/ubuntu/dummy-test-data/traces/conflated" +requests_root_dir = ["/home/ubuntu/dummy-test-data/prover-execution/limitless/bootstrap-gl"] +responses_root_dir = ["/home/ubuntu/dummy-test-data/prover-execution/limitless/gl-rndbeacon", "/home/ubuntu/dummy-test-data/prover-execution/limitless/gl-conglomeration"] + +[execution_rndbeacon] +prover_mode = "full" +conflated_traces_dir = "/home/ubuntu/dummy-test-data/traces/conflated" +requests_root_dir = ["/home/ubuntu/dummy-test-data/prover-execution/limitless/bootstrap-rndbeacon/", "/home/ubuntu/dummy-test-data/prover-execution/limitless/gl-rndbeacon"] +responses_root_dir = ["/home/ubuntu/dummy-test-data/prover-execution/limitless/rndbeacon-lpp"] + +[execution_lpp] +prover_mode = "full" +conflated_traces_dir = "/home/ubuntu/dummy-test-data/traces/conflated" +requests_root_dir = ["/home/ubuntu/dummy-test-data/prover-execution/limitless/rndbeacon-lpp"] +responses_root_dir = ["/home/ubuntu/dummy-test-data/prover-execution/limitless/lpp-conglomeration"] + +[execution_conglomeration] +prover_mode = "full" +conflated_traces_dir = "/home/ubuntu/dummy-test-data/traces/conflated" +requests_root_dir = ["/home/ubuntu/dummy-test-data/prover-execution/limitless/bootstrap-metadata", "/home/ubuntu/dummy-test-data/prover-execution/limitless/gl-conglomeration", "/home/ubuntu/dummy-test-data/prover-execution/limitless/lpp-conglomeration"] +responses_root_dir = ["/home/ubuntu/dummy-test-data/prover-execution/limitless"] + +[blob_decompression] +prover_mode = "full" +requests_root_dir = ["/home/ubuntu/sepolia-testing-full/prover-compression"] +responses_root_dir = ["/home/ubuntu/sepolia-testing-full/prover-compression"] +dict_path = "lib/compressor/compressor_dict.bin" + +[aggregation] +prover_mode = "full" +requests_root_dir = ["/home/ubuntu/sepolia-testing-full/prover-aggregation"] +responses_root_dir = ["/home/ubuntu/sepolia-testing-full/prover-aggregation"] +num_proofs = [10, 20, 50, 100, 200, 400] +allowed_inputs = ["execution-dummy", "execution", "execution-large", "blob-decompression-dummy", "blob-decompression-v0", "blob-decompression-v1"] +verifier_id = 1 + +[public_input_interconnection] +max_nb_decompression = 400 +max_nb_execution = 400 +max_nb_circuits = 400 +execution_max_nb_msg = 16 +l2_msg_merkle_depth = 5 +l2_msg_max_nb_merkle = 200 + +[layer2] +chain_id = 59141 +message_service_contract = "0x971e727e956690b9957be6d51Ec16E73AcAC83A7" + +[traces_limits] +ADD = 524288 +BIN = 262144 +BLAKE_MODEXP_DATA = 16384 +BLOCK_DATA = 1024 +BLOCK_HASH = 512 +EC_DATA = 262144 +EUC = 65536 +EXP = 8192 +EXT = 1048576 +GAS = 65536 +HUB = 2097152 +LOG_DATA = 65536 +LOG_INFO = 4096 +MMIO = 4194304 +MMU = 4194304 +MOD = 131072 +MUL = 65536 +MXP = 524288 +OOB = 262144 +RLP_ADDR = 4096 +RLP_TXN = 131072 +RLP_TXN_RCPT = 65536 +ROM = 4194304 +ROM_LEX = 1024 +SHAKIRA_DATA = 32768 +SHF = 65536 +STP = 16384 +TRM = 32768 +TXN_DATA = 8192 +WCP = 262144 +PRECOMPILE_ECRECOVER_EFFECTIVE_CALLS = 128 +PRECOMPILE_SHA2_BLOCKS = 671 +PRECOMPILE_RIPEMD_BLOCKS = 671 +PRECOMPILE_MODEXP_EFFECTIVE_CALLS = 4 +PRECOMPILE_ECADD_EFFECTIVE_CALLS = 16384 +PRECOMPILE_ECMUL_EFFECTIVE_CALLS = 32 +PRECOMPILE_ECPAIRING_FINAL_EXPONENTIATIONS = 16 +PRECOMPILE_ECPAIRING_MILLER_LOOPS = 64 +PRECOMPILE_ECPAIRING_G2_MEMBERSHIP_CALLS = 64 +PRECOMPILE_BLAKE_EFFECTIVE_CALLS = 600 +PRECOMPILE_BLAKE_ROUNDS = 600 +BLOCK_KECCAK = 8192 +BLOCK_L1_SIZE = 1000000 +BLOCK_L2_L1_LOGS = 16 +BLOCK_TRANSACTIONS = 200 +BIN_REFERENCE_TABLE = 262144 +SHF_REFERENCE_TABLE = 4096 +INSTRUCTION_DECODER = 512 + +[traces_limits_large] +ADD = 1048576 +BIN = 524288 +BLAKE_MODEXP_DATA = 32768 +BLOCK_DATA = 2048 +BLOCK_HASH = 1024 +EC_DATA = 524288 +EUC = 131072 +EXP = 16384 +EXT = 2097152 +GAS = 131072 +HUB = 4194304 +LOG_DATA = 131072 +LOG_INFO = 8192 +MMIO = 8388608 +MMU = 8388608 +MOD = 262144 +MUL = 131072 +MXP = 1048576 +OOB = 524288 +RLP_ADDR = 8192 +RLP_TXN = 262144 +RLP_TXN_RCPT = 131072 +ROM = 8388608 +ROM_LEX = 2048 +SHAKIRA_DATA = 65536 +SHF = 131072 +STP = 32768 +TRM = 65536 +TXN_DATA = 16384 +WCP = 524288 +PRECOMPILE_ECRECOVER_EFFECTIVE_CALLS = 256 +PRECOMPILE_SHA2_BLOCKS = 671 +PRECOMPILE_RIPEMD_BLOCKS = 671 +PRECOMPILE_MODEXP_EFFECTIVE_CALLS = 8 +PRECOMPILE_ECADD_EFFECTIVE_CALLS = 32768 +PRECOMPILE_ECMUL_EFFECTIVE_CALLS = 64 +PRECOMPILE_ECPAIRING_FINAL_EXPONENTIATIONS = 32 +PRECOMPILE_ECPAIRING_MILLER_LOOPS = 128 +PRECOMPILE_ECPAIRING_G2_MEMBERSHIP_CALLS = 128 +PRECOMPILE_BLAKE_EFFECTIVE_CALLS = 600 +PRECOMPILE_BLAKE_ROUNDS = 600 +BLOCK_KECCAK = 8192 +BLOCK_L1_SIZE = 1000000 +BLOCK_L2_L1_LOGS = 16 +BLOCK_TRANSACTIONS = 200 +BIN_REFERENCE_TABLE = 262144 +SHF_REFERENCE_TABLE = 4096 +INSTRUCTION_DECODER = 512 \ No newline at end of file diff --git a/prover/config/config.go b/prover/config/config.go index 6d7d4ffa4..56b9591ac 100644 --- a/prover/config/config.go +++ b/prover/config/config.go @@ -114,8 +114,8 @@ type Config struct { // LIMITLESS PROVER Components ExecBootstrap Execution `mapstructure:"execution_bootstrap"` ExecGL Execution `mapstructure:"execution_gl"` - ExecLPP Execution `mapstructure:"execution_lpp"` ExecRndBeacon Execution `mapstructure:"execution_rndbeacon"` + ExecLPP Execution `mapstructure:"execution_lpp"` ExecConglomeration Execution `mapstructure:"execution_conglomeration"` Debug struct { From dcb90764b3784097df88a535cea9dfbd77553725 Mon Sep 17 00:00:00 2001 From: srinathln7 Date: Wed, 19 Feb 2025 09:45:49 +0000 Subject: [PATCH 46/48] commit progress --- prover/cmd/controller/controller/command.go | 4 ++-- prover/cmd/prover/cmd/prove.go | 24 ++++++++++----------- prover/cmd/prover/main.go | 9 ++++++-- 3 files changed, 21 insertions(+), 16 deletions(-) diff --git a/prover/cmd/controller/controller/command.go b/prover/cmd/controller/controller/command.go index 009edfabf..259785c47 100644 --- a/prover/cmd/controller/controller/command.go +++ b/prover/cmd/controller/controller/command.go @@ -44,11 +44,11 @@ func cobraControllerRunCmd(c *cobra.Command, args []string) { } cfg.Controller.LocalID = fLocalID - // Disable Legacy for testing cfg.Controller.EnableExecution = true + + // Disable for testing cfg.Controller.EnableBlobDecompression = false cfg.Controller.EnableAggregation = false - cfg.Controller.EnableExecBootstrap = false cfg.Controller.EnableExecGL = false cfg.Controller.EnableExecRndBeacon = false diff --git a/prover/cmd/prover/cmd/prove.go b/prover/cmd/prover/cmd/prove.go index bda4ba51c..ccd4e8bea 100644 --- a/prover/cmd/prover/cmd/prove.go +++ b/prover/cmd/prover/cmd/prove.go @@ -15,8 +15,8 @@ import ( ) type ProverArgs struct { - Input string - Output string + Input []string + Output []string Large bool ConfigFile string } @@ -33,32 +33,32 @@ func Prove(args ProverArgs) error { } // discover the type of the job from the input file name - jobExecution := strings.Contains(args.Input, "getZkProof") - jobBlobDecompression := strings.Contains(args.Input, "getZkBlobCompressionProof") - jobAggregation := strings.Contains(args.Input, "getZkAggregatedProof") + jobExecution := strings.Contains(args.Input[0], "getZkProof") + jobBlobDecompression := strings.Contains(args.Input[0], "getZkBlobCompressionProof") + jobAggregation := strings.Contains(args.Input[0], "getZkAggregatedProof") if jobExecution { req := &execution.Request{} - if err := readRequest(args.Input, req); err != nil { + if err := readRequest(args.Input[0], req); err != nil { return fmt.Errorf("could not read the input file (%v): %w", args.Input, err) } // we use the large traces in 2 cases; // 1. the user explicitly asked for it (args.Large) // 2. the job contains the large suffix and we are a large machine (cfg.Execution.CanRunLarge) - large := args.Large || (strings.Contains(args.Input, "large") && cfg.Execution.CanRunFullLarge) + large := args.Large || (strings.Contains(args.Input[0], "large") && cfg.Execution.CanRunFullLarge) resp, err := execution.Prove(cfg, req, large) if err != nil { return fmt.Errorf("could not prove the execution: %w", err) } - return writeResponse(args.Output, resp) + return writeResponse(args.Output[0], resp) } if jobBlobDecompression { req := &blobdecompression.Request{} - if err := readRequest(args.Input, req); err != nil { + if err := readRequest(args.Input[0], req); err != nil { return fmt.Errorf("could not read the input file (%v): %w", args.Input, err) } @@ -67,12 +67,12 @@ func Prove(args ProverArgs) error { return fmt.Errorf("could not prove the blob decompression: %w", err) } - return writeResponse(args.Output, resp) + return writeResponse(args.Output[0], resp) } if jobAggregation { req := &aggregation.Request{} - if err := readRequest(args.Input, req); err != nil { + if err := readRequest(args.Input[0], req); err != nil { return fmt.Errorf("could not read the input file (%v): %w", args.Input, err) } @@ -81,7 +81,7 @@ func Prove(args ProverArgs) error { return fmt.Errorf("could not prove the aggregation: %w", err) } - return writeResponse(args.Output, resp) + return writeResponse(args.Output[0], resp) } return errors.New("unknown job type") diff --git a/prover/cmd/prover/main.go b/prover/cmd/prover/main.go index 073110b08..dac8b02c6 100644 --- a/prover/cmd/prover/main.go +++ b/prover/cmd/prover/main.go @@ -62,8 +62,13 @@ func init() { rootCmd.AddCommand(proveCmd) - proveCmd.Flags().StringVar(&proverArgs.Input, "in", "", "input file") - proveCmd.Flags().StringVar(&proverArgs.Output, "out", "", "output file") + // ASSUMED 0 index here + // proveCmd.Flags().StringVar(&proverArgs.Input[0], "in", "", "input file") + // proveCmd.Flags().StringVar(&proverArgs.Output[0], "out", "", "output file") + + proveCmd.Flags().StringSliceVar(&proverArgs.Input, "in", make([]string, len(proverArgs.Input)), "input file") + proveCmd.Flags().StringSliceVar(&proverArgs.Output, "out", make([]string, len(proverArgs.Output)), "output file") + proveCmd.Flags().BoolVar(&proverArgs.Large, "large", false, "run the large execution circuit") } From f2ae46a0d0fcaa271d4a9d054fe9cd19ac79466d Mon Sep 17 00:00:00 2001 From: srinathln7 Date: Wed, 19 Feb 2025 13:45:06 +0000 Subject: [PATCH 47/48] local commit test toml --- prover/config/config-test.toml | 40 ++++++++++++++++------------------ 1 file changed, 19 insertions(+), 21 deletions(-) diff --git a/prover/config/config-test.toml b/prover/config/config-test.toml index b8b88677e..0c47a860e 100644 --- a/prover/config/config-test.toml +++ b/prover/config/config-test.toml @@ -6,45 +6,43 @@ log_level = 4 # TODO @gbotrel will be refactore [controller] retry_delays = [0, 1] - - [execution] prover_mode = "full" -conflated_traces_dir = "/home/ubuntu/dummy-test-data/traces/conflated" -requests_root_dir = ["/home/ubuntu/dummy-test-data/prover-execution"] -responses_root_dir = ["/home/ubuntu/dummy-test-data/prover-execution"] +conflated_traces_dir = "/home/ubuntu/dummy-test-data/sepolia-v0.8.0-rc3/traces/conflated" +requests_root_dir = ["/home/ubuntu/dummy-test-data/sepolia-v0.8.0-rc3/prover-execution"] +responses_root_dir = ["/home/ubuntu/dummy-test-data/sepolia-v0.8.0-rc3/prover-execution"] -# dummy-test-data to test limitless prover controller +# dummy-test-data/sepolia-v0.8.0-rc3 to test limitless prover controller [execution_bootstrap] prover_mode = "full" -conflated_traces_dir = "/home/ubuntu/dummy-test-data/traces/conflated" -requests_root_dir = ["/home/ubuntu/dummy-test-data/prover-execution/limitless"] -responses_root_dir = ["/home/ubuntu/dummy-test-data/prover-execution/limitless/bootstrap-gl", "/home/ubuntu/dummy-test-data/prover-execution/limitless/bootstrap-distmetadata"] +conflated_traces_dir = "/home/ubuntu/dummy-test-data/sepolia-v0.8.0-rc3/traces/conflated" +requests_root_dir = ["/home/ubuntu/dummy-test-data/sepolia-v0.8.0-rc3/prover-execution/limitless"] +responses_root_dir = ["/home/ubuntu/dummy-test-data/sepolia-v0.8.0-rc3/prover-execution/limitless/bootstrap-gl", "/home/ubuntu/dummy-test-data/sepolia-v0.8.0-rc3/prover-execution/limitless/bootstrap-distmetadata"] [execution_gl] prover_mode = "full" -conflated_traces_dir = "/home/ubuntu/dummy-test-data/traces/conflated" -requests_root_dir = ["/home/ubuntu/dummy-test-data/prover-execution/limitless/bootstrap-gl"] -responses_root_dir = ["/home/ubuntu/dummy-test-data/prover-execution/limitless/gl-rndbeacon", "/home/ubuntu/dummy-test-data/prover-execution/limitless/gl-conglomeration"] +conflated_traces_dir = "/home/ubuntu/dummy-test-data/sepolia-v0.8.0-rc3/traces/conflated" +requests_root_dir = ["/home/ubuntu/dummy-test-data/sepolia-v0.8.0-rc3/prover-execution/limitless/bootstrap-gl"] +responses_root_dir = ["/home/ubuntu/dummy-test-data/sepolia-v0.8.0-rc3/prover-execution/limitless/gl-rndbeacon", "/home/ubuntu/dummy-test-data/sepolia-v0.8.0-rc3/prover-execution/limitless/gl-conglomeration"] [execution_rndbeacon] prover_mode = "full" -conflated_traces_dir = "/home/ubuntu/dummy-test-data/traces/conflated" -requests_root_dir = ["/home/ubuntu/dummy-test-data/prover-execution/limitless/bootstrap-rndbeacon/", "/home/ubuntu/dummy-test-data/prover-execution/limitless/gl-rndbeacon"] -responses_root_dir = ["/home/ubuntu/dummy-test-data/prover-execution/limitless/rndbeacon-lpp"] +conflated_traces_dir = "/home/ubuntu/dummy-test-data/sepolia-v0.8.0-rc3/traces/conflated" +requests_root_dir = ["/home/ubuntu/dummy-test-data/sepolia-v0.8.0-rc3/prover-execution/limitless/bootstrap-rndbeacon/", "/home/ubuntu/dummy-test-data/sepolia-v0.8.0-rc3/prover-execution/limitless/gl-rndbeacon"] +responses_root_dir = ["/home/ubuntu/dummy-test-data/sepolia-v0.8.0-rc3/prover-execution/limitless/rndbeacon-lpp"] [execution_lpp] prover_mode = "full" -conflated_traces_dir = "/home/ubuntu/dummy-test-data/traces/conflated" -requests_root_dir = ["/home/ubuntu/dummy-test-data/prover-execution/limitless/rndbeacon-lpp"] -responses_root_dir = ["/home/ubuntu/dummy-test-data/prover-execution/limitless/lpp-conglomeration"] +conflated_traces_dir = "/home/ubuntu/dummy-test-data/sepolia-v0.8.0-rc3/traces/conflated" +requests_root_dir = ["/home/ubuntu/dummy-test-data/sepolia-v0.8.0-rc3/prover-execution/limitless/rndbeacon-lpp"] +responses_root_dir = ["/home/ubuntu/dummy-test-data/sepolia-v0.8.0-rc3/prover-execution/limitless/lpp-conglomeration"] [execution_conglomeration] prover_mode = "full" -conflated_traces_dir = "/home/ubuntu/dummy-test-data/traces/conflated" -requests_root_dir = ["/home/ubuntu/dummy-test-data/prover-execution/limitless/bootstrap-metadata", "/home/ubuntu/dummy-test-data/prover-execution/limitless/gl-conglomeration", "/home/ubuntu/dummy-test-data/prover-execution/limitless/lpp-conglomeration"] -responses_root_dir = ["/home/ubuntu/dummy-test-data/prover-execution/limitless"] +conflated_traces_dir = "/home/ubuntu/dummy-test-data/sepolia-v0.8.0-rc3/traces/conflated" +requests_root_dir = ["/home/ubuntu/dummy-test-data/sepolia-v0.8.0-rc3/prover-execution/limitless/bootstrap-metadata", "/home/ubuntu/dummy-test-data/sepolia-v0.8.0-rc3/prover-execution/limitless/gl-conglomeration", "/home/ubuntu/dummy-test-data/sepolia-v0.8.0-rc3/prover-execution/limitless/lpp-conglomeration"] +responses_root_dir = ["/home/ubuntu/dummy-test-data/sepolia-v0.8.0-rc3/prover-execution/limitless"] [blob_decompression] prover_mode = "full" From 7f657830ec17ebdc51d73c9c0fa9e5045556fa1e Mon Sep 17 00:00:00 2001 From: srinathln7 Date: Wed, 19 Feb 2025 15:07:42 +0000 Subject: [PATCH 48/48] (feat): enable successful execution proof run using controller --- prover/backend/execution/craft.go | 17 +++++++++-------- prover/cmd/controller/controller/command.go | 17 ++++++++--------- prover/config/config_default.go | 11 ++++++++--- 3 files changed, 25 insertions(+), 20 deletions(-) diff --git a/prover/backend/execution/craft.go b/prover/backend/execution/craft.go index 9943cb395..b513f1c8f 100644 --- a/prover/backend/execution/craft.go +++ b/prover/backend/execution/craft.go @@ -35,14 +35,15 @@ func CraftProverOutput( req *Request, ) Response { - // Split the embedded file contents into a string slice - constraintsVersions := strings.Split(strings.TrimSpace(constraintsVersionsStr), "\n") - - // Check the arithmetization version used to generate the trace is contained in the prover request - // and fail fast if the constraint version is not supported - if err := checkArithmetizationVersion(req.ConflatedExecutionTracesFile, req.TracesEngineVersion, constraintsVersions); err != nil { - panic(err.Error()) - } + /* + // Split the embedded file contents into a string slice + constraintsVersions := strings.Split(strings.TrimSpace(constraintsVersionsStr), "\n") + + // Check the arithmetization version used to generate the trace is contained in the prover request + // and fail fast if the constraint version is not supported + if err := checkArithmetizationVersion(req.ConflatedExecutionTracesFile, req.TracesEngineVersion, constraintsVersions); err != nil { + panic(err.Error()) + } */ var ( l2BridgeAddress = cfg.Layer2.MsgSvcContract diff --git a/prover/cmd/controller/controller/command.go b/prover/cmd/controller/controller/command.go index 259785c47..7f3ce5dfe 100644 --- a/prover/cmd/controller/controller/command.go +++ b/prover/cmd/controller/controller/command.go @@ -44,16 +44,15 @@ func cobraControllerRunCmd(c *cobra.Command, args []string) { } cfg.Controller.LocalID = fLocalID - cfg.Controller.EnableExecution = true - // Disable for testing - cfg.Controller.EnableBlobDecompression = false - cfg.Controller.EnableAggregation = false - cfg.Controller.EnableExecBootstrap = false - cfg.Controller.EnableExecGL = false - cfg.Controller.EnableExecRndBeacon = false - cfg.Controller.EnableExecLPP = false - cfg.Controller.EnableExecConglomeration = false + // cfg.Controller.EnableExecution = true + // cfg.Controller.EnableBlobDecompression = false + // cfg.Controller.EnableAggregation = false + // cfg.Controller.EnableExecBootstrap = false + // cfg.Controller.EnableExecGL = false + // cfg.Controller.EnableExecRndBeacon = false + // cfg.Controller.EnableExecLPP = false + // cfg.Controller.EnableExecConglomeration = false // TODO @gbotrel @AlexandreBelling check who is responsible for creating the directories // create the sub directories if they do not exist diff --git a/prover/config/config_default.go b/prover/config/config_default.go index 03ff56649..750ec577b 100644 --- a/prover/config/config_default.go +++ b/prover/config/config_default.go @@ -18,6 +18,7 @@ func setDefaultValues() { viper.SetDefault("controller.enable_blob_decompression", true) viper.SetDefault("controller.enable_aggregation", true) + // Limitless controller components viper.SetDefault("controller.enable_exec_bootstrap", true) viper.SetDefault("controller.enable_exec_gl", true) viper.SetDefault("controller.enable_exec_rndbeacon", true) @@ -29,9 +30,13 @@ func setDefaultValues() { viper.SetDefault("controller.defer_to_other_large_codes", DefaultDeferToOtherLargeCodes) viper.SetDefault("controller.retry_locally_with_large_codes", DefaultRetryLocallyWithLargeCodes) - // Set default for cmdTmpl and cmdLargeTmpl - viper.SetDefault("controller.worker_cmd_tmpl", "prover prove --config {{.ConfFile}} --in {{.InFile}} --out {{.OutFile}}") - viper.SetDefault("controller.worker_cmd_large_tmpl", "prover prove --config {{.ConfFile}} --in {{.InFile}} --out {{.OutFile}} --large") + // Define the default command templates using the range action + workerCmdTmpl := `prover prove --config {{.ConfFile}} {{range $index, $element := .InFile}}--in {{$element}} {{end}} {{range $index, $element := .OutFile}}--out {{$element}} {{end}}` + workerCmdLargeTmpl := `prover prove --config {{.ConfFile}} {{range $index, $element := .InFile}}--in {{$element}} {{end}} {{range $index, $element := .OutFile}}--out {{$element}} {{end}} --large` + + // Set the default command templates in viper + viper.SetDefault("controller.worker_cmd_tmpl", workerCmdTmpl) + viper.SetDefault("controller.worker_cmd_large_tmpl", workerCmdLargeTmpl) } func setDefaultPaths() {