Skip to content

Commit

Permalink
OS-54 Nebula Parser (#658)
Browse files Browse the repository at this point in the history
* WIP: Mostly working

* WIP: Implement NebulaParser

* Can't test because no internet connection
* Everything compiles

---------

Signed-off-by: Reid Spencer <reid-spencer@users.noreply.github.com>
  • Loading branch information
reid-spencer authored Sep 14, 2024
1 parent 18674ba commit 4eeb698
Show file tree
Hide file tree
Showing 19 changed files with 208 additions and 64 deletions.
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
package com.ossuminc.riddl.language.parsing

import org.scalatest.TestData

class NebulaTest extends ParsingTest {

"Module" should {
"be accepted at root scope" in { (td: TestData) =>
val input = RiddlParserInput(
"""
|nebula is {
| domain blah is { ??? }
|}
|""".stripMargin, td
)
parseNebula(input) match
case Left(messages) => fail(messages.justErrors.format)
case Right(root) => succeed
}
}

}

Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,9 @@ trait ParsingTest extends TestingBasisWithTestData {
protected val testingOptions: CommonOptions = CommonOptions.empty.copy(maxIncludeWait = 10.seconds)

case class StringParser(content: String, testCase: String = "unknown test case")
extends TopLevelParser(RiddlParserInput(content, testCase), testingOptions)
extends TopLevelParser(testingOptions):
val rpi = RiddlParserInput(content, testCase)
end StringParser

def parsePath(
path: Path,
Expand Down Expand Up @@ -84,6 +86,11 @@ trait ParsingTest extends TestingBasisWithTestData {
val tp = TestParser(input)
tp.parseTopLevelDomain[TO](extract).map(x => (x, input))
}

def parseNebula(input: RiddlParserInput): Either[Messages, Nebula] = {
val tp = TestParser(input)
tp.parseNebula(input)
}

def parseDomainDefinition[TO <: RiddlValue](
input: RiddlParserInput,
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
package com.ossuminc.riddl.language.parsing

import com.ossuminc.riddl.language.AST
import com.ossuminc.riddl.language.{AST, CommonOptions}
import com.ossuminc.riddl.language.AST.{Context, Definition, Domain, RiddlValue, Root}
import com.ossuminc.riddl.language.Messages.*
import fastparse.*
Expand All @@ -12,9 +12,9 @@ import scala.util.control.NonFatal
import scala.concurrent.ExecutionContext

case class TestParser(
override val input: RiddlParserInput,
input: RiddlParserInput,
throwOnError: Boolean = false
) extends TopLevelParser(input)
) extends TopLevelParser(CommonOptions())
with Matchers {

def expect[CT <: RiddlValue](
Expand Down Expand Up @@ -70,17 +70,17 @@ case class TestParser(
}

def parseRoot: Either[Messages, Root] = {
parseRoot(withVerboseFailures = true)
parseRoot(input, withVerboseFailures = true)
}

def parseTopLevelDomains: Either[Messages, Root] = {
parseRoot(withVerboseFailures = true)
parseRoot(input, withVerboseFailures = true)
}

def parseTopLevelDomain[TO <: RiddlValue](
extract: Root => TO
): Either[Messages, TO] = {
parseRoot(withVerboseFailures = true).map { (root: Root) => extract(root) }
parseRoot(input, withVerboseFailures = true).map { (root: Root) => extract(root) }
}

def parseDefinition[FROM <: Definition: ClassTag, TO <: RiddlValue](
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ class TopLevelParserTest extends ParsingTest {
"parse empty String" in { (td: TestData) =>
val expected = Root(Contents())
val parser = StringParser("")
parser.parseRoot() match {
parser.parseRoot(parser.rpi) match {
case Right(r: Root) =>
fail(s"Should have failed expecting an author or domain but got ${r.format}")
case Left(messages: Messages) =>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -830,6 +830,12 @@ object AST:
/** Type of definitions that occur in a block of [[Statement]] */
type Statements = Statement | Comment

type NebulaContents = Adaptor | Application | Author | Connector | Constant | ContainedGroup | Context | Domain |
Entity | Enumerator | Epic | Field | Function | Group | Handler | Inlet | Input | Invariant | Method | Module |
OnClause | OnInitializationClause | OnTerminationClause | OnMessageClause | OnOtherClause | Outlet | Output |
Projector | Relationship | Repository | Root | Saga | SagaStep | Schema | State | Streamlet | Term | Type |
UseCase | User

////////////////////////////////////////////////////////////////////////////////////////////////////////// DEFINITIONS
//////// The Abstract classes for defining Definitions by using the foregoing traits

Expand Down Expand Up @@ -1009,14 +1015,43 @@ object AST:

override def identifyWithLoc: String = "Root"

def format: String = ""
def format: String = "Root"
end Root

object Root:

/** The value to use for an empty [[Root]] instance */
val empty: Root = apply(mutable.Seq.empty[RootContents])
end Root
////////////////////////////////////////////////////////////////////////////////////////////////////////////// NEBULA

/** The nubula of arbitrary definitions. This allows any named definition in its contents without regard to
* intended structure of those things. This can be used as a general "scratchpad".
*
* @param contents
* The nebula of unrelated single definitions
*/
case class Nebula(
contents: Contents[NebulaContents] = Contents.empty
) extends BranchDefinition[NebulaContents]:
override def isRootContainer: Boolean = false
def loc: At = At.empty

override def id: Identifier = Identifier(loc, "Nebula")

override def identify: String = "Nebula"

override def identifyWithLoc: String = "Nebula"

def format: String = "Nebula"
end Nebula

object Nebula:

/** The value to use for an empty [[Nebula]] instance */
val empty: Nebula = Nebula(Contents.empty[NebulaContents])
end Nebula


////////////////////////////////////////////////////////////////////////////////////////////////////////////// MODULE

Expand Down Expand Up @@ -1129,7 +1164,7 @@ object AST:

///////////////////////////////////////////////////////////////////////////////////////////////////////// RELATIONSHIP

enum RelationshipCardinality(val proportion: String) :
enum RelationshipCardinality(val proportion: String):
case OneToOne extends RelationshipCardinality("1:1")
case OneToMany extends RelationshipCardinality("1:N")
case ManyToOne extends RelationshipCardinality("N:1")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ import fastparse.MultiLineWhitespace.*
private[parsing] trait ApplicationParser {
this: ProcessorParser & StreamingParser & CommonParser =>

private def containedGroup[u: P]: P[ContainedGroup] = {
def containedGroup[u: P]: P[ContainedGroup] = {
P(
location ~ Keywords.contains ~ identifier ~ as ~ groupRef ~ withDescriptives
).map { case (loc, id, group, descriptives) =>
Expand All @@ -23,11 +23,11 @@ private[parsing] trait ApplicationParser {

private def groupDefinitions[u: P]: P[Seq[OccursInGroup]] = {
P(
group | containedGroup | shownBy | output | appInput | comment
group | containedGroup | shownBy | appOutput | appInput | comment
).asInstanceOf[P[OccursInGroup]].rep(1)
}

private def group[u: P]: P[Group] = {
def group[u: P]: P[Group] = {
P(
location ~ groupAliases ~ identifier ~/ is ~ open ~
(undefined(Seq.empty[OccursInGroup]) | groupDefinitions) ~
Expand All @@ -47,14 +47,14 @@ private[parsing] trait ApplicationParser {

private def outputDefinitions[u: P]: P[Seq[OccursInOutput]] = {
P(
is ~ open ~ (undefined(Seq.empty[OccursInOutput]) | (output | typeRef).rep(1)) ~ close
is ~ open ~ (undefined(Seq.empty[OccursInOutput]) | (appOutput | typeRef).rep(1)) ~ close
).?.map {
case Some(definitions: Seq[OccursInOutput]) => definitions
case None => Seq.empty[OccursInOutput]
}
}

private def output[u: P]: P[Output] = {
def appOutput[u: P]: P[Output] = {
P(
location ~ outputAliases ~/ identifier ~ presentationAliases ~/
(literalString | constantRef | typeRef) ~/ outputDefinitions ~ withDescriptives
Expand Down Expand Up @@ -102,7 +102,7 @@ private[parsing] trait ApplicationParser {
).!
}

private def appInput[u: P]: P[Input] = {
def appInput[u: P]: P[Input] = {
P(
location ~ inputAliases ~/ identifier ~/ acquisitionAliases ~/ typeRef ~ inputDefinitions ~ withDescriptives
).map { case (loc, inputAlias, id, acquisitionAlias, putIn, contents, descriptives) =>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ import scala.concurrent.{Await, Future}
private[parsing] trait DomainParser {
this: VitalDefinitionParser & ApplicationParser & ContextParser & EpicParser & SagaParser & StreamingParser =>

private def user[u: P]: P[User] = {
def user[u: P]: P[User] = {
P(
location ~ Keywords.user ~ identifier ~/ is ~ literalString ~/ withDescriptives
)./.map { case (loc, id, is_a, descriptives) =>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,10 +11,10 @@ import fastparse.*
import fastparse.MultiLineWhitespace.*

/** Parsing rules for entity definitions */
private[parsing] trait EntityParser {
private[parsing] trait EntityParser {
this: ProcessorParser & StreamingParser =>

private def state[u: P]: P[State] = {
def state[u: P]: P[State] = {
P(
location ~ Keywords.state ~ identifier ~/ (of | is) ~ typeRef ~/ withDescriptives
)./.map { case (loc, id, typRef, descriptives) =>
Expand All @@ -41,7 +41,7 @@ private[parsing] trait EntityParser {
def entity[u: P]: P[Entity] = {
P(
location ~ Keywords.entity ~/ identifier ~ is ~ open ~/ entityBody ~ close ~ withDescriptives
)./map { case (loc, id, contents, descriptives) =>
)./ map { case (loc, id, contents, descriptives) =>
checkForDuplicateIncludes(contents)
Entity(loc, id, contents.toContents, descriptives.toContents)
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -125,10 +125,10 @@ private[parsing] trait EpicParser {
}

private def interactions[u: P]: P[Seq[InteractionContainerContents]] = {
P( interaction.rep(1) )
P(interaction.rep(1))
}

private def useCase[u: P]: P[UseCase] = {
def useCase[u: P]: P[UseCase] = {
P(
location ~ Keywords.case_ ~/ identifier ~ is ~ open ~ userStory ~
(undefined(Seq.empty[TwoReferenceInteraction]) | interactions) ~
Expand All @@ -152,7 +152,7 @@ private[parsing] trait EpicParser {
}

private def epicDefinitions[u: P]: P[Seq[EpicContents]] = {
P( vitalDefinitionContents | useCase | shownBy | epicInclude ).asInstanceOf[P[EpicContents]].rep(1)
P(vitalDefinitionContents | useCase | shownBy | epicInclude).asInstanceOf[P[EpicContents]].rep(1)
}

private type EpicBody = (
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,11 +18,11 @@ private[parsing] trait FunctionParser {
include[u, FunctionContents](functionDefinitions(_))
}

def input[u: P]: P[Aggregation] = {
def funcInput[u: P]: P[Aggregation] = {
P(Keywords.requires ~ Punctuation.colon.? ~ aggregation)./
}

def output[u: P]: P[Aggregation] = {
def funcOutput[u: P]: P[Aggregation] = {
P(Keywords.returns ~ Punctuation.colon.? ~ aggregation)./
}

Expand All @@ -37,7 +37,7 @@ private[parsing] trait FunctionParser {
private type BodyType = (Option[Aggregation], Option[Aggregation], Seq[FunctionContents])

private def functionBody[u: P]: P[BodyType] =
P(input.? ~ output.? ~ functionDefinitions)
P(funcInput.? ~ funcOutput.? ~ functionDefinitions)

/** Parses function literals, i.e.
*
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -51,12 +51,12 @@ private[parsing] trait HandlerParser extends CommonParser with ReferenceParser w
OnMessageClause(loc, msgRef, msgOrigins, statements.toContents, descriptives.toContents)
}

private def onClauses[u: P](set: StatementsSet): P[OnClause] = {
def onClause[u: P](set: StatementsSet): P[OnClause] = {
P(onInitClause(set) | onOtherClause(set) | onTermClause(set) | onMessageClause(set) )
}

private def handlerContents[u:P](set: StatementsSet): P[Seq[HandlerContents]] = {
(onClauses(set) | comment)./.rep(0).asInstanceOf[P[Seq[HandlerContents]]]
(onClause(set) | comment)./.rep(0).asInstanceOf[P[Seq[HandlerContents]]]
}

private def handlerBody[u: P](set: StatementsSet): P[Seq[HandlerContents]] = {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -191,6 +191,8 @@ object Keywords {
def morph[u: P]: P[Unit] = keyword(Keyword.morph)

def name[u: P]: P[Unit] = keyword(Keyword.name)

def nebula[u:P]: P[Unit] = keyword(Keyword.nebula)

def on[u: P]: P[Unit] = keyword(Keyword.on)

Expand Down Expand Up @@ -399,6 +401,7 @@ object Keyword {
final val module = "module"
final val morph = "morph"
final val name = "name"
final val nebula = "nebula"
final val on = "on"
final val one = "one"
final val organization = "organization"
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
package com.ossuminc.riddl.language.parsing

import com.ossuminc.riddl.language.AST.*
import fastparse.*
import fastparse.MultiLineWhitespace.*

/** Parsing production rules for Modules
* {{{
* Root = Comment | Domain | Module | Author
* Module = Root | Context | User | Epic | Author | Application | Saga
* Domain = VitalDefinition | Domain | Context | User | Epic | Author | Application | Saga
* }}}
*/
private[parsing] trait NebulaParser {
this: ProcessorParser & DomainParser & AdaptorParser & ApplicationParser & ContextParser & EntityParser &
EpicParser & FunctionParser & HandlerParser & ModuleParser & ProjectorParser & RepositoryParser &
RootParser & SagaParser & StreamingParser & TypeParser & Readability & CommonParser =>

private def nebulaContent[u:P]: P[NebulaContents] =
P(adaptor | application | author | connector | constant | containedGroup | context | domain |
entity | enumerator | epic | field | function | group | handler(StatementsSet.AllStatements) |
inlet | appInput | invariant | method | module | onClause(StatementsSet.AllStatements) | outlet | appOutput |
projector | relationship | repository | root | saga | sagaStep | schema | state | streamlet | term | typeDef |
useCase | user).map { (r: RiddlValue) => r.asInstanceOf[NebulaContents] }

private def nebulaContents[u:P]: P[Seq[NebulaContents]] =
P(nebulaContent).rep(0)

def nebula[u: P]: P[Nebula] = {
P(Start ~ Keywords.nebula ~ is ~ open ~ nebulaContents ~ close ~ End).map {
(contents: Seq[NebulaContents]) => Nebula(contents.toContents)
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -45,18 +45,18 @@ private[parsing] trait RepositoryParser {
}
)
}
private def data[u:P]: P[(Identifier,TypeRef)] = {
P( of ~ identifier ~ as ~ typeRef )./

private def data[u: P]: P[(Identifier, TypeRef)] = {
P(of ~ identifier ~ as ~ typeRef)./
}
private def link[u:P]: P[(Identifier, FieldRef, FieldRef)] =
P (Keywords.link ~ identifier ~ as ~ fieldRef ~ to ~ fieldRef )./
private def index[u:P]: P[FieldRef] =

private def link[u: P]: P[(Identifier, FieldRef, FieldRef)] =
P(Keywords.link ~ identifier ~ as ~ fieldRef ~ to ~ fieldRef)./

private def index[u: P]: P[FieldRef] =
P(Keywords.index ~ Keywords.on ~ fieldRef)./
private def schema[u: P]: P[Schema] = {

def schema[u: P]: P[Schema] = {
P(
location ~ Keywords.schema ~ identifier ~ is ~ schemaKind ~
data.rep(1) ~ link.rep(0) ~ index.rep(0) ~ withDescriptives
Expand Down
Loading

0 comments on commit 4eeb698

Please sign in to comment.