Commit 37331e13 authored by Jens Korinth's avatar Jens Korinth
Browse files

Fix scalastyle warnings

* simple ones fixed
* some require more refactoring, will keep for later
parent 41f05e6f
...@@ -34,6 +34,7 @@ object Tapasco { ...@@ -34,6 +34,7 @@ object Tapasco {
import ch.qos.logback.classic.spi.ILoggingEvent import ch.qos.logback.classic.spi.ILoggingEvent
private[this] val logger = de.tu_darmstadt.cs.esa.tapasco.Logging.logger(this.getClass) private[this] val logger = de.tu_darmstadt.cs.esa.tapasco.Logging.logger(this.getClass)
private[this] val logFileAppender: FileAppender[ILoggingEvent] = new FileAppender() private[this] val logFileAppender: FileAppender[ILoggingEvent] = new FileAppender()
private[this] final val UNLIMITED_THREADS = 1000
private def setupLogFileAppender(file: String, quiet: Boolean = false) = { private def setupLogFileAppender(file: String, quiet: Boolean = false) = {
val ctx = LoggerFactory.getILoggerFactory().asInstanceOf[LoggerContext] val ctx = LoggerFactory.getILoggerFactory().asInstanceOf[LoggerContext]
...@@ -64,6 +65,8 @@ object Tapasco { ...@@ -64,6 +65,8 @@ object Tapasco {
System.exit(0) System.exit(0)
} }
// scalastyle:off cyclomatic.complexity
// scalastyle:off method.length
def main(args: Array[String]) { def main(args: Array[String]) {
implicit val tasks = new Tasks implicit val tasks = new Tasks
val ok = try { val ok = try {
...@@ -85,12 +88,13 @@ object Tapasco { ...@@ -85,12 +88,13 @@ object Tapasco {
logger.trace("parallel: {}", cfg.parallel) logger.trace("parallel: {}", cfg.parallel)
cfg.logFile map { logfile: Path => setupLogFileAppender(logfile.toString) } cfg.logFile map { logfile: Path => setupLogFileAppender(logfile.toString) }
logger.info("Running with configuration: {}", cfg.toString) logger.info("Running with configuration: {}", cfg.toString)
implicit val exe = ExecutionContext.fromExecutor(new java.util.concurrent.ForkJoinPool(500)) implicit val exe = ExecutionContext.fromExecutor(new java.util.concurrent.ForkJoinPool(UNLIMITED_THREADS))
def get(f: Future[Boolean]): Boolean = { Await.ready(f, duration.Duration.Inf); f.value map (_ getOrElse false) getOrElse false } def get(f: Future[Boolean]): Boolean = { Await.ready(f, duration.Duration.Inf); f.value map (_ getOrElse false) getOrElse false }
if (cfg.parallel) if (cfg.parallel) {
runGui(args) || (cfg.jobs map { j => Future { jobs.executors.execute(j) } } map (get _) fold true) (_ && _) runGui(args) || (cfg.jobs map { j => Future { jobs.executors.execute(j) } } map (get _) fold true) (_ && _)
else } else {
runGui(args) || (cfg.jobs map { jobs.executors.execute(_) } fold true) (_ && _) runGui(args) || (cfg.jobs map { jobs.executors.execute(_) } fold true) (_ && _)
}
} else { } else {
logger.error("invalid arguments: {}", c.left.get.toString) logger.error("invalid arguments: {}", c.left.get.toString)
logger.error("run `tapasco -h` or `tapasco --help` to get more info") logger.error("run `tapasco -h` or `tapasco --help` to get more info")
...@@ -117,4 +121,6 @@ object Tapasco { ...@@ -117,4 +121,6 @@ object Tapasco {
logger.info("TPC finished successfully") logger.info("TPC finished successfully")
} }
} }
// scalastyle:on method.length
// scalastyle:on cyclomatic.complexity
} }
...@@ -51,6 +51,7 @@ class DesignSpace( ...@@ -51,6 +51,7 @@ class DesignSpace(
val designFrequency: Heuristics.Frequency val designFrequency: Heuristics.Frequency
)(implicit cfg: Configuration) { )(implicit cfg: Configuration) {
import scala.util.Properties.{lineSeparator => NL} import scala.util.Properties.{lineSeparator => NL}
private[this] final val DEFAULT_CLOCK_PERIOD_NS = 4 // default: 250 MHz
private[this] val logger = de.tu_darmstadt.cs.esa.tapasco.Logging.logger(this.getClass) private[this] val logger = de.tu_darmstadt.cs.esa.tapasco.Logging.logger(this.getClass)
logger.trace(Seq("DesignSpace(", dim, ")") mkString) logger.trace(Seq("DesignSpace(", dim, ")") mkString)
...@@ -60,7 +61,7 @@ class DesignSpace( ...@@ -60,7 +61,7 @@ class DesignSpace(
val cores = bd.composition flatMap (ce => FileAssetManager.entities.core(ce.kernel, target)) val cores = bd.composition flatMap (ce => FileAssetManager.entities.core(ce.kernel, target))
val srs = cores flatMap { c: Core => FileAssetManager.reports.synthReport(c.name, target) } val srs = cores flatMap { c: Core => FileAssetManager.reports.synthReport(c.name, target) }
val cps = srs flatMap (_.timing) map (_.clockPeriod) val cps = srs flatMap (_.timing) map (_.clockPeriod)
val fmax = 1000.0 / (if (cps.nonEmpty) cps.max else 4) // default: 250 MHz val fmax = 1000.0 / (if (cps.nonEmpty) cps.max else DEFAULT_CLOCK_PERIOD_NS)
target.pd.supportedFrequencies map (_.toDouble) filter (_ <= fmax) sortWith (_>_) target.pd.supportedFrequencies map (_.toDouble) filter (_ <= fmax) sortWith (_>_)
} else { } else {
Seq(designFrequency) Seq(designFrequency)
......
...@@ -15,7 +15,7 @@ class LogTrackingFileWatcher(_logger: Option[Logger] = None, pollInterval: Int = ...@@ -15,7 +15,7 @@ class LogTrackingFileWatcher(_logger: Option[Logger] = None, pollInterval: Int =
extends MultiFileWatcher(POLL_INTERVAL) { extends MultiFileWatcher(POLL_INTERVAL) {
private[this] final val logger = _logger getOrElse de.tu_darmstadt.cs.esa.tapasco.Logging.logger(getClass) private[this] final val logger = _logger getOrElse de.tu_darmstadt.cs.esa.tapasco.Logging.logger(getClass)
private object listener extends Listener[Event]{ private lazy val listener = new Listener[Event] {
def update(e: MultiFileWatcher.Event): Unit = e match { def update(e: MultiFileWatcher.Event): Unit = e match {
case LinesAdded(src, ls) => ls map { l => case LinesAdded(src, ls) => ls map { l =>
logger.info(l) logger.info(l)
......
...@@ -12,6 +12,7 @@ import java.nio.file._ ...@@ -12,6 +12,7 @@ import java.nio.file._
* via the [[dump]] method. * via the [[dump]] method.
*/ */
object JobExamples { object JobExamples {
// scalastyle:off magic.number
val bulkImportJob = BulkImportJob(Paths.get("some.csv")) val bulkImportJob = BulkImportJob(Paths.get("some.csv"))
val composition = Composition(Paths.get("N/A"), val composition = Composition(Paths.get("N/A"),
Some("An optional description."), Some("An optional description."),
...@@ -63,4 +64,5 @@ object JobExamples { ...@@ -63,4 +64,5 @@ object JobExamples {
fw.close() fw.close()
} }
} }
// scalastyle:on magic.number
} }
...@@ -131,14 +131,14 @@ final case class DesignSpaceExplorationJob( ...@@ -131,14 +131,14 @@ final case class DesignSpaceExplorationJob(
FileAssetManager.entities.architectures filter (a => _architectures map (_.contains(a.name)) getOrElse true) FileAssetManager.entities.architectures filter (a => _architectures map (_.contains(a.name)) getOrElse true)
/** Returns the name filter for [[base.Architecture]] instances. */ /** Returns the name filter for [[base.Architecture]] instances. */
def architectureNames = _architectures def architectureNames: Option[Seq[String]] = _architectures
/** Returns the list of [[base.Platform]] instances selected in this job. */ /** Returns the list of [[base.Platform]] instances selected in this job. */
def platforms: Set[Platform] = def platforms: Set[Platform] =
FileAssetManager.entities.platforms filter (p => _platforms map (_.contains(p.name)) getOrElse true) FileAssetManager.entities.platforms filter (p => _platforms map (_.contains(p.name)) getOrElse true)
/** Returns the name filter for [[base.Platform]] instances. */ /** Returns the name filter for [[base.Platform]] instances. */
def platformNames = _platforms def platformNames: Option[Seq[String]] = _platforms
/** Returns the first target (alphabetically Arch - Platform). */ /** Returns the first target (alphabetically Arch - Platform). */
def target: Target = targets.head def target: Target = targets.head
......
...@@ -30,10 +30,6 @@ private object ComposeParser { ...@@ -30,10 +30,6 @@ private object ComposeParser {
case ("Architectures", as: Seq[String @unchecked]) => _.copy(_architectures = Some(as)) case ("Architectures", as: Seq[String @unchecked]) => _.copy(_architectures = Some(as))
case ("Platforms", ps: Seq[String @unchecked]) => _.copy(_platforms = Some(ps)) case ("Platforms", ps: Seq[String @unchecked]) => _.copy(_platforms = Some(ps))
case ("Features", fs: Seq[Feature @unchecked]) => _.copy(features = Some(fs)) case ("Features", fs: Seq[Feature @unchecked]) => _.copy(features = Some(fs))
case ("Features", fs: Map[_, _]) => { job =>
logger.warn("new features not implemented yet!")
job
}
case ("DebugMode", m: String) => _.copy(debugMode = Some(m)) case ("DebugMode", m: String) => _.copy(debugMode = Some(m))
case o => throw new Exception(s"parsed illegal option: $o") case o => throw new Exception(s"parsed illegal option: $o")
} }
......
...@@ -54,6 +54,8 @@ final object Slurm extends Publisher { ...@@ -54,6 +54,8 @@ final object Slurm extends Publisher {
final val slurmDelay = 15000 // 15 secs final val slurmDelay = 15000 // 15 secs
/** Set of POSIX permissions for SLURM job scripts. */ /** Set of POSIX permissions for SLURM job scripts. */
final val slurmScriptPermissions = Set(OWNER_READ, OWNER_WRITE, OWNER_EXECUTE, GROUP_READ, OTHERS_READ).asJava final val slurmScriptPermissions = Set(OWNER_READ, OWNER_WRITE, OWNER_EXECUTE, GROUP_READ, OTHERS_READ).asJava
/** Wait interval between retries. */
final val slurmRetryDelay = 10000 // 10 secs
/** Returns true if SLURM is available on host running iTPC. */ /** Returns true if SLURM is available on host running iTPC. */
lazy val available: Boolean = "which sbatch".! == 0 lazy val available: Boolean = "which sbatch".! == 0
...@@ -116,11 +118,11 @@ final object Slurm extends Publisher { ...@@ -116,11 +118,11 @@ final object Slurm extends Publisher {
logger.debug("running slurm batch job: '%s'".format(cmd)) logger.debug("running slurm batch job: '%s'".format(cmd))
val res = cmd.!! val res = cmd.!!
val id = slurmSubmissionAck.findFirstMatchIn(res) map (_ group (1) toInt) val id = slurmSubmissionAck.findFirstMatchIn(res) map (_ group (1) toInt)
if (id.isEmpty ) { if (id.isEmpty) {
if (retries > 0) { if (retries > 0) {
Thread.sleep(10000) // wait 10 secs Thread.sleep(slurmRetryDelay) // wait 10 secs
apply(script, retries - 1) apply(script, retries - 1)
} else throw new SlurmException(script.toString, res) } else { throw new SlurmException(script.toString, res) }
} else { } else {
logger.debug("received SLURM id: {}", id) logger.debug("received SLURM id: {}", id)
id id
......
...@@ -70,8 +70,7 @@ class ComposeTask(composition: Composition, ...@@ -70,8 +70,7 @@ class ComposeTask(composition: Composition,
LogFileTracker.stopLogFileAppender(appender) LogFileTracker.stopLogFileAppender(appender)
val result = (_composerResult map (_.result) getOrElse false) == ComposeResult.Success val result = (_composerResult map (_.result) getOrElse false) == ComposeResult.Success
if (result) if (result) { composer.clean(composition, target, designFrequency) }
composer.clean(composition, target, designFrequency)
result result
} }
......
...@@ -53,11 +53,11 @@ object ZipUtils { ...@@ -53,11 +53,11 @@ object ZipUtils {
((exclude map (r => r.findFirstIn(zipEntry.toString()).isEmpty) fold true) (_&&_))) { ((exclude map (r => r.findFirstIn(zipEntry.toString()).isEmpty) fold true) (_&&_))) {
logger.trace(zipFile + ": extracting " + zipEntry) logger.trace(zipFile + ": extracting " + zipEntry)
val buffer = new Array[Byte](bufsz) val buffer = new Array[Byte](bufsz)
val outname = tempdir.resolve(if (flatten) val outname = tempdir.resolve(if (flatten) {
Paths.get(zipEntry.getName()).getFileName() Paths.get(zipEntry.getName()).getFileName()
else } else {
Paths.get(zipEntry.getName()) Paths.get(zipEntry.getName())
) })
logger.trace("outname = {}}", outname) logger.trace("outname = {}}", outname)
Option(outname.getParent) foreach { p => if (!p.toFile.exists()) Files.createDirectories(p) } Option(outname.getParent) foreach { p => if (!p.toFile.exists()) Files.createDirectories(p) }
val dest = new BufferedOutputStream(new FileOutputStream(outname.toString), bufsz) val dest = new BufferedOutputStream(new FileOutputStream(outname.toString), bufsz)
...@@ -84,7 +84,7 @@ object ZipUtils { ...@@ -84,7 +84,7 @@ object ZipUtils {
* @param zipFile Path to output zip file. * @param zipFile Path to output zip file.
* @param files Sequence of files to pack. * @param files Sequence of files to pack.
*/ */
def zipFile(zipFile: Path, files: Seq[Path]) = { def zipFile(zipFile: Path, files: Seq[Path]) {
import java.util.zip._ import java.util.zip._
import java.io.{BufferedOutputStream, FileOutputStream} import java.io.{BufferedOutputStream, FileOutputStream}
val zos = new ZipOutputStream(new BufferedOutputStream(new FileOutputStream(zipFile.toFile))) val zos = new ZipOutputStream(new BufferedOutputStream(new FileOutputStream(zipFile.toFile)))
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment