Skip to content

Commit

Permalink
Bump up to Scala 2.13.12; Bump up sbt; Fix errors and warnings (#22)
Browse files Browse the repository at this point in the history
* Bump up to Scala 2.13.12; Bump up sbt; Fix errors and warnings

* Update src/main/scala/Compiler.scala

harnessConnect is mistakenly renamed to ections.

* Update src/main/scala/Harness.scala

harnessConnect is mistakenly renamed to ections.
  • Loading branch information
haoozi authored Jan 16, 2024
1 parent 18dd3d5 commit 9167e79
Show file tree
Hide file tree
Showing 26 changed files with 177 additions and 177 deletions.
9 changes: 4 additions & 5 deletions build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ version := "0.8-SNAPSHOT"

name := "essent"

scalaVersion := "2.12.18"
scalaVersion := "2.13.12"

scalacOptions ++= Seq("-deprecation", "-unchecked")

Expand All @@ -16,12 +16,11 @@ libraryDependencies += "org.json4s" %% "json4s-native" % "3.6.12"

libraryDependencies += "edu.berkeley.cs" %% "firrtl" % "1.5.6"


// Assembly

assemblyJarName in assembly := "essent.jar"
assembly / assemblyJarName := "essent.jar"

assemblyOutputPath in assembly := file("./utils/bin/essent.jar")
assembly / assemblyOutputPath:= file("./utils/bin/essent.jar")


// Ignore disabled .scala files
Expand All @@ -31,7 +30,7 @@ unmanagedSources / excludeFilter := HiddenFileFilter || "*disabled*.scala"

// Publishing setup
publishMavenStyle := true
publishArtifact in Test := false
Test / publishArtifact := false
pomIncludeRepository := { x => false }

// POM info
Expand Down
2 changes: 1 addition & 1 deletion project/build.properties
Original file line number Diff line number Diff line change
@@ -1 +1 @@
sbt.version=1.6.2
sbt.version=1.9.6
2 changes: 1 addition & 1 deletion src/main/scala/ActivityTracker.scala
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ class ActivityTracker(w: Writer, opt: OptFlags) {
}

def declareSigTracking(sg: StatementGraph, topName: String): Unit = {
val allNamesAndTypes = sg.collectValidStmts(sg.nodeRange) flatMap findStmtNameAndType
val allNamesAndTypes = sg.collectValidStmts(sg.nodeRange()) flatMap findStmtNameAndType
sigNameToID = (allNamesAndTypes map {
_._1
}).zipWithIndex.toMap
Expand Down
20 changes: 10 additions & 10 deletions src/main/scala/AcyclicPart.scala
Original file line number Diff line number Diff line change
Expand Up @@ -35,14 +35,14 @@ class AcyclicPart(val mg: MergeGraph, excludeSet: Set[NodeID]) extends LazyLoggi
totalInDegree + totalOutDegree - (mergedInDegree + mergedOutDegree)
}

def coarsenWithMFFCs() {
def coarsenWithMFFCs(): Unit = {
val mffcResults = MFFC(mg, excludeSet)
mg.applyInitialAssignments(mffcResults)
logger.info(s" #mffcs found: ${mg.mergeIDToMembers.size - excludeSet.size}")
logger.info(s" largest mffc: ${(mg.mergeIDToMembers.values.map{_.size}).max}")
}

def mergeSingleInputPartsIntoParents(smallPartCutoff: Int = 20) {
def mergeSingleInputPartsIntoParents(smallPartCutoff: Int = 20): Unit = {
val smallPartIDs = findSmallParts(smallPartCutoff)
val singleInputIDs = smallPartIDs filter { id => (mg.inNeigh(id).size == 1) }
val singleInputParents = (singleInputIDs flatMap mg.inNeigh).distinct
Expand All @@ -57,7 +57,7 @@ class AcyclicPart(val mg: MergeGraph, excludeSet: Set[NodeID]) extends LazyLoggi
mergeSingleInputPartsIntoParents(smallPartCutoff)
}

def mergeSmallSiblings(smallPartCutoff: Int = 10) {
def mergeSmallSiblings(smallPartCutoff: Int = 10): Unit = {
val smallPartIDs = findSmallParts(smallPartCutoff)
val inputsAndIDPairs = smallPartIDs map { id => {
val inputsCanonicalized = mg.inNeigh(id).toSeq.sorted
Expand All @@ -75,11 +75,11 @@ class AcyclicPart(val mg: MergeGraph, excludeSet: Set[NodeID]) extends LazyLoggi
}
}

def mergeSmallParts(smallPartCutoff: Int = 20, mergeThreshold: Double = 0.5) {
def mergeSmallParts(smallPartCutoff: Int = 20, mergeThreshold: Double = 0.5): Unit = {
val smallPartIDs = findSmallParts(smallPartCutoff)
val mergesToConsider = smallPartIDs flatMap { id => {
val numInputs = mg.inNeigh(id).size.toDouble
val siblings = (mg.inNeigh(id) flatMap mg.outNeigh).distinct - id
val siblings = (mg.inNeigh(id) flatMap mg.outNeigh).distinct.filter(_ != id)
val legalSiblings = siblings filter { sibID => !excludeSet.contains(sibID) }
val orderConstrSibs = legalSiblings filter { _ < id }
val myInputSet = mg.inNeigh(id).toSet
Expand All @@ -101,7 +101,7 @@ class AcyclicPart(val mg: MergeGraph, excludeSet: Set[NodeID]) extends LazyLoggi
}
}

def mergeSmallPartsDown(smallPartCutoff: Int = 20) {
def mergeSmallPartsDown(smallPartCutoff: Int = 20): Unit = {
val smallPartIDs = findSmallParts(smallPartCutoff)
val mergesToConsider = smallPartIDs flatMap { id => {
val mergeableChildren = mg.outNeigh(id) filter {
Expand All @@ -122,7 +122,7 @@ class AcyclicPart(val mg: MergeGraph, excludeSet: Set[NodeID]) extends LazyLoggi
}
}

def partition(smallPartCutoff: Int = 20) {
def partition(smallPartCutoff: Int = 20): Unit = {
val toApply = Seq(
("mffc", {ap: AcyclicPart => ap.coarsenWithMFFCs()}),
("single", {ap: AcyclicPart => ap.mergeSingleInputPartsIntoParents()}),
Expand All @@ -141,16 +141,16 @@ class AcyclicPart(val mg: MergeGraph, excludeSet: Set[NodeID]) extends LazyLoggi
assert(checkPartioning())
}

def iterParts() = mg.iterGroups
def iterParts() = mg.iterGroups()

def checkPartioning() = {
val includedSoFar = HashSet[NodeID]()
val disjoint = mg.iterGroups forall { case (macroID, memberIDs) => {
val disjoint = mg.iterGroups() forall { case (macroID, memberIDs) => {
val overlap = includedSoFar.intersect(memberIDs.toSet).nonEmpty
includedSoFar ++= memberIDs
!overlap
}}
val complete = includedSoFar == mg.nodeRange.toSet
val complete = includedSoFar == mg.nodeRange().toSet
disjoint && complete
}
}
Expand Down
2 changes: 1 addition & 1 deletion src/main/scala/ArgsParser.scala
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ case class OptFlags(
essentLogLevel: String = "warn",
firrtlLogLevel: String = "warn") {
def inputFileDir() = firInputFile.getParent
def outputDir() = if (inputFileDir == null) "" else inputFileDir()
def outputDir() = if (inputFileDir() == null) "" else inputFileDir()
}

class ArgsParser {
Expand Down
36 changes: 18 additions & 18 deletions src/main/scala/Compiler.scala
Original file line number Diff line number Diff line change
Expand Up @@ -17,12 +17,12 @@ import logger._

class EssentEmitter(initialOpt: OptFlags, w: Writer, circuit: Circuit) extends LazyLogging {
val flagVarName = "PARTflags"
implicit val rn = new Renamer
implicit val rn: Renamer = new Renamer
val actTrac = new ActivityTracker(w, initialOpt)
val vcd = if (initialOpt.withVCD) Some(new Vcd(circuit,initialOpt,w,rn)) else None
val vcd: Option[Vcd] = if (initialOpt.withVCD) Some(new Vcd(circuit,initialOpt,w,rn)) else None
// Declaring Modules
//----------------------------------------------------------------------------
def declareModule(m: Module, topName: String) {
def declareModule(m: Module, topName: String): Unit = {
val registers = findInstancesOf[DefRegister](m.body)
val memories = findInstancesOf[DefMemory](m.body)
val registerDecs = registers flatMap {d: DefRegister => {
Expand Down Expand Up @@ -57,7 +57,7 @@ class EssentEmitter(initialOpt: OptFlags, w: Writer, circuit: Circuit) extends L
}
}

def declareExtModule(m: ExtModule) {
def declareExtModule(m: ExtModule): Unit = {
val modName = m.name
w.writeLines(0, "")
w.writeLines(0, s"typedef struct $modName {")
Expand All @@ -70,8 +70,8 @@ class EssentEmitter(initialOpt: OptFlags, w: Writer, circuit: Circuit) extends L
//----------------------------------------------------------------------------
// TODO: move specialized CondMux emitter elsewhere?
def writeBodyInner(indentLevel: Int, sg: StatementGraph, opt: OptFlags,
keepAvail: Set[String] = Set()) {
sg.stmtsOrdered foreach { stmt => stmt match {
keepAvail: Set[String] = Set()): Unit = {
sg.stmtsOrdered() foreach { stmt => stmt match {
case cm: CondMux => {
if (rn.nameToMeta(cm.name).decType == MuxOut)
w.writeLines(indentLevel, s"${genCppType(cm.mux.tpe)} ${rn.emit(cm.name)};")
Expand All @@ -91,8 +91,8 @@ class EssentEmitter(initialOpt: OptFlags, w: Writer, circuit: Circuit) extends L
}}
}

def checkRegResetSafety(sg: StatementGraph) {
val updatesWithResets = sg.allRegDefs filter { r => emitExpr(r.reset) != "UInt<1>(0x0)" }
def checkRegResetSafety(sg: StatementGraph): Unit = {
val updatesWithResets = sg.allRegDefs() filter { r => emitExpr(r.reset) != "UInt<1>(0x0)" }
assert(updatesWithResets.isEmpty)
}

Expand All @@ -117,7 +117,7 @@ class EssentEmitter(initialOpt: OptFlags, w: Writer, circuit: Circuit) extends L
condPartWorker: MakeCondPart,
topName: String,
extIOtypes: Map[String, Type],
opt: OptFlags) {
opt: OptFlags): Unit = {
// predeclare part outputs
val outputPairs = condPartWorker.getPartOutputsToDeclare()
val outputConsumers = condPartWorker.getPartInputMap()
Expand All @@ -134,7 +134,7 @@ class EssentEmitter(initialOpt: OptFlags, w: Writer, circuit: Circuit) extends L
w.writeLines(1, s"bool done_reset;")
w.writeLines(1, s"bool verbose;")
w.writeLines(0, "")
sg.stmtsOrdered foreach { stmt => stmt match {
sg.stmtsOrdered() foreach { stmt => stmt match {
case cp: CondPart => {
w.writeLines(1, s"void ${genEvalFuncName(cp.id)}() {")
if (!cp.alwaysActive)
Expand Down Expand Up @@ -171,7 +171,7 @@ class EssentEmitter(initialOpt: OptFlags, w: Writer, circuit: Circuit) extends L
w.writeLines(0, "")
}

def writeZoningBody(sg: StatementGraph, condPartWorker: MakeCondPart, opt: OptFlags) {
def writeZoningBody(sg: StatementGraph, condPartWorker: MakeCondPart, opt: OptFlags): Unit = {
w.writeLines(2, "if (reset || !done_reset) {")
w.writeLines(3, "sim_cached = false;")
w.writeLines(3, "regs_set = false;")
Expand All @@ -192,7 +192,7 @@ class EssentEmitter(initialOpt: OptFlags, w: Writer, circuit: Circuit) extends L
sigName => s"${rn.emit(sigName + condPartWorker.cacheSuffix)} = ${rn.emit(sigName)};"
}
w.writeLines(2, extIOCaches.toSeq)
sg.stmtsOrdered foreach { stmt => stmt match {
sg.stmtsOrdered() foreach { stmt => stmt match {
case cp: CondPart => {
if (!cp.alwaysActive)
w.writeLines(2, s"if (UNLIKELY($flagVarName[${cp.id}])) ${genEvalFuncName(cp.id)}();")
Expand All @@ -210,7 +210,7 @@ class EssentEmitter(initialOpt: OptFlags, w: Writer, circuit: Circuit) extends L

// General Structure (and Compiler Boilerplate)
//----------------------------------------------------------------------------
def execute(circuit: Circuit) {
def execute(circuit: Circuit): Unit = {
val opt = initialOpt
val topName = circuit.main
val headerGuardName = topName.toUpperCase + "_H_"
Expand All @@ -234,7 +234,7 @@ class EssentEmitter(initialOpt: OptFlags, w: Writer, circuit: Circuit) extends L
w.writeLines(1,s"""char VCD_BUF[2000];""")
}
val sg = StatementGraph(circuit, opt.removeFlatConnects)
logger.info(sg.makeStatsString)
logger.info(sg.makeStatsString())
val containsAsserts = sg.containsStmtOfType[Stop]()
val extIOMap = findExternalPorts(circuit)
val condPartWorker = MakeCondPart(sg, rn, extIOMap)
Expand Down Expand Up @@ -332,10 +332,10 @@ class EssentCompiler(opt: OptFlags) {
Dependency(essent.passes.ReplaceRsvdKeywords)
)

def compileAndEmit(circuit: Circuit) {
def compileAndEmit(circuit: Circuit): Unit = {
val topName = circuit.main
if (opt.writeHarness) {
val harnessFilename = new File(opt.outputDir, s"$topName-harness.cc")
val harnessFilename = new File(opt.outputDir(), s"$topName-harness.cc")
val harnessWriter = new FileWriter(harnessFilename)
if (opt.withVCD) { HarnessGenerator.topFile(topName, harnessWriter," | dut.genWaveHeader();") }
else { HarnessGenerator.topFile(topName, harnessWriter, "")}
Expand All @@ -344,11 +344,11 @@ class EssentCompiler(opt: OptFlags) {
val firrtlCompiler = new transforms.Compiler(readyForEssent)
val resultState = firrtlCompiler.execute(CircuitState(circuit, Seq()))
if (opt.dumpLoFirrtl) {
val debugWriter = new FileWriter(new File(opt.outputDir, s"$topName.lo.fir"))
val debugWriter = new FileWriter(new File(opt.outputDir(), s"$topName.lo.fir"))
debugWriter.write(resultState.circuit.serialize)
debugWriter.close()
}
val dutWriter = new FileWriter(new File(opt.outputDir, s"$topName.h"))
val dutWriter = new FileWriter(new File(opt.outputDir(), s"$topName.h"))
val emitter = new EssentEmitter(opt, dutWriter,resultState.circuit)
emitter.execute(resultState.circuit)
dutWriter.close()
Expand Down
8 changes: 4 additions & 4 deletions src/main/scala/Driver.scala
Original file line number Diff line number Diff line change
Expand Up @@ -7,18 +7,18 @@ import logger._


object Driver {
def main(args: Array[String]) {
(new ArgsParser).getConfig(args) match {
def main(args: Array[String]): Unit = {
(new ArgsParser).getConfig(args.toSeq) match {
case Some(config) => generate(config)
case None =>
}
}

def generate(opt: OptFlags) {
def generate(opt: OptFlags): Unit = {
Logger.setClassLogLevels(Map("essent" -> logger.LogLevel(opt.essentLogLevel)))
Logger.setClassLogLevels(Map("firrtl" -> logger.LogLevel(opt.firrtlLogLevel)))
val sourceReader = Source.fromFile(opt.firInputFile)
val circuit = firrtl.Parser.parse(sourceReader.getLines, firrtl.Parser.IgnoreInfo)
val circuit = firrtl.Parser.parse(sourceReader.getLines(), firrtl.Parser.IgnoreInfo)
sourceReader.close()
val compiler = new EssentCompiler(opt)
compiler.compileAndEmit(circuit)
Expand Down
2 changes: 1 addition & 1 deletion src/main/scala/Emitter.scala
Original file line number Diff line number Diff line change
Expand Up @@ -224,7 +224,7 @@ object Emitter {
val printWidth = math.ceil(width.toDouble/4).toInt
(format, s"""%0${printWidth}" PRIx64 """")
} else {
val printWidth = math.ceil(math.log10((1l<<width.toInt).toDouble)).toInt
val printWidth = math.ceil(math.log10((1L<<width.toInt).toDouble)).toInt
(format, s"""%${printWidth}" PRIu64 """")
}
}
Expand Down
4 changes: 2 additions & 2 deletions src/main/scala/Extract.scala
Original file line number Diff line number Diff line change
Expand Up @@ -146,7 +146,7 @@ object Extract extends LazyLogging {
case ru: RegUpdate => Seq(HyperedgeDep(emitExpr(ru.regRef)+"$final", findDependencesExpr(ru.expr), s))
case mw: MemWrite =>
val deps = Seq(mw.wrEn, mw.wrMask, mw.wrAddr, mw.wrData) flatMap findDependencesExpr
Seq(HyperedgeDep(mw.nodeName, deps.distinct, s))
Seq(HyperedgeDep(mw.nodeName(), deps.distinct, s))
case p: Print =>
val deps = (Seq(p.en) ++ p.args) flatMap findDependencesExpr
val uniqueName = "PRINTF" + emitExpr(p.clk) + deps.mkString("$") + Util.tidyString(p.string.serialize)
Expand Down Expand Up @@ -216,7 +216,7 @@ object Extract extends LazyLogging {
namesToExclude: Set[String]): Seq[Statement] = {
def isRef(e: Expression): Boolean = e.isInstanceOf[WRef] || e.isInstanceOf[WSubField]
def findChainRenames(sg: StatementGraph): Map[String, String] = {
val sourceIDs = sg.nodeRange filter { sg.inNeigh(_).isEmpty }
val sourceIDs = sg.nodeRange() filter { sg.inNeigh(_).isEmpty }
def reachableIDs(id: Int): Seq[Int] = {
Seq(id) ++ (sg.outNeigh(id) flatMap reachableIDs)
}
Expand Down
18 changes: 9 additions & 9 deletions src/main/scala/Graph.scala
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ class Graph {

// Graph building
//----------------------------------------------------------------------------
def growNeighsIfNeeded(id: NodeID) {
def growNeighsIfNeeded(id: NodeID): Unit = {
assert(id >= 0)
if (id >= outNeigh.size) {
val numElemsToGrow = id - outNeigh.size + 1
Expand All @@ -29,13 +29,13 @@ class Graph {
}
}

def addEdge(sourceID: NodeID, destID: NodeID) {
def addEdge(sourceID: NodeID, destID: NodeID): Unit = {
growNeighsIfNeeded(math.max(sourceID, destID))
outNeigh(sourceID) += destID
inNeigh(destID) += sourceID
}

def addEdgeIfNew(sourceID: NodeID, destID: NodeID) {
def addEdgeIfNew(sourceID: NodeID, destID: NodeID): Unit = {
if ((sourceID >= outNeigh.size) || !outNeigh(sourceID).contains(destID))
addEdge(sourceID, destID)
}
Expand Down Expand Up @@ -79,16 +79,16 @@ class Graph {

// Mutators
//----------------------------------------------------------------------------
def removeDuplicateEdges() {
def removeDuplicateEdges(): Unit = {
// will not remove self-loops
def uniquifyNeighs(neighs: AdjacencyList) {
(0 until neighs.size) foreach { id => neighs(id) = neighs(id).distinct }
def uniquifyNeighs(neighs: AdjacencyList): Unit = {
neighs.indices foreach { id => neighs(id) = neighs(id).distinct }
}
uniquifyNeighs(outNeigh)
uniquifyNeighs(inNeigh)
}

def mergeNodesMutably(mergeDest: NodeID, mergeSources: Seq[NodeID]) {
def mergeNodesMutably(mergeDest: NodeID, mergeSources: Seq[NodeID]): Unit = {
val mergedID = mergeDest
val idsToRemove = mergeSources
val idsToMerge = mergeSources :+ mergeDest
Expand All @@ -103,8 +103,8 @@ class Graph {
inNeigh(outNeighID) --= idsToRemove
if (!inNeigh(outNeighID).contains(mergedID)) inNeigh(outNeighID) += mergedID
}}
inNeigh(mergedID) = combinedInNeigh.to[ArrayBuffer]
outNeigh(mergedID) = combinedOutNeigh.to[ArrayBuffer]
inNeigh(mergedID) = combinedInNeigh.to(ArrayBuffer)
outNeigh(mergedID) = combinedOutNeigh.to(ArrayBuffer)
idsToRemove foreach { deleteID => {
inNeigh(deleteID).clear()
outNeigh(deleteID).clear()
Expand Down
7 changes: 4 additions & 3 deletions src/main/scala/Harness.scala
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ import firrtl._
import firrtl.ir._

import java.io.Writer
import scala.collection.mutable.ArrayBuffer

object HarnessGenerator {
def harnessConnections(m: Module) = {
Expand Down Expand Up @@ -54,9 +55,9 @@ object HarnessGenerator {
val mapConnects = (internalNames.zipWithIndex) map {
case (label: String, index: Int) => s"""comm->map_signal("$modName.$label", $index);"""
}
(origOrderInputNames ++ reorderPorts(inputNames) map connectSignal("in_")) ++
(reorderPorts(outputNames) map connectSignal("out_")) ++
(reorderPorts(signalNames) map connectSignal("")) ++ mapConnects
((origOrderInputNames ++ reorderPorts(inputNames.toSeq) map {connectSignal("in_")(_)}) ++
(reorderPorts(outputNames.toSeq) map {connectSignal("out_")(_)}) ++
(reorderPorts(signalNames.toSeq) map {connectSignal("")(_)}) ++ mapConnects).toSeq
}

def topFile(circuitName: String, writer: Writer , vcdHeader: String) = {
Expand Down
Loading

0 comments on commit 9167e79

Please sign in to comment.