Skip to content
GitLab
Projects
Groups
Snippets
/
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
Menu
Open sidebar
tapasco
tapasco
Commits
37331e13
Commit
37331e13
authored
Aug 09, 2017
by
Jens Korinth
Browse files
Fix scalastyle warnings
* simple ones fixed * some require more refactoring, will keep for later
parent
41f05e6f
Changes
16
Hide whitespace changes
Inline
Side-by-side
src/main/scala/tapasco/Tapasco.scala
View file @
37331e13
...
...
@@ -34,6 +34,7 @@ object Tapasco {
import
ch.qos.logback.classic.spi.ILoggingEvent
private
[
this
]
val
logger
=
de
.
tu_darmstadt
.
cs
.
esa
.
tapasco
.
Logging
.
logger
(
this
.
getClass
)
private
[
this
]
val
logFileAppender
:
FileAppender
[
ILoggingEvent
]
=
new
FileAppender
()
private
[
this
]
final
val
UNLIMITED_THREADS
=
1000
private
def
setupLogFileAppender
(
file
:
String
,
quiet
:
Boolean
=
false
)
=
{
val
ctx
=
LoggerFactory
.
getILoggerFactory
().
asInstanceOf
[
LoggerContext
]
...
...
@@ -64,6 +65,8 @@ object Tapasco {
System
.
exit
(
0
)
}
// scalastyle:off cyclomatic.complexity
// scalastyle:off method.length
def
main
(
args
:
Array
[
String
])
{
implicit
val
tasks
=
new
Tasks
val
ok
=
try
{
...
...
@@ -85,12 +88,13 @@ object Tapasco {
logger
.
trace
(
"parallel: {}"
,
cfg
.
parallel
)
cfg
.
logFile
map
{
logfile
:
Path
=>
setupLogFileAppender
(
logfile
.
toString
)
}
logger
.
info
(
"Running with configuration: {}"
,
cfg
.
toString
)
implicit
val
exe
=
ExecutionContext
.
fromExecutor
(
new
java
.
util
.
concurrent
.
ForkJoinPool
(
500
))
implicit
val
exe
=
ExecutionContext
.
fromExecutor
(
new
java
.
util
.
concurrent
.
ForkJoinPool
(
UNLIMITED_THREADS
))
def
get
(
f
:
Future
[
Boolean
])
:
Boolean
=
{
Await
.
ready
(
f
,
duration
.
Duration
.
Inf
);
f
.
value
map
(
_
getOrElse
false
)
getOrElse
false
}
if
(
cfg
.
parallel
)
if
(
cfg
.
parallel
)
{
runGui
(
args
)
||
(
cfg
.
jobs
map
{
j
=>
Future
{
jobs
.
executors
.
execute
(
j
)
}
}
map
(
get
_
)
fold
true
)
(
_
&&
_
)
else
}
else
{
runGui
(
args
)
||
(
cfg
.
jobs
map
{
jobs
.
executors
.
execute
(
_
)
}
fold
true
)
(
_
&&
_
)
}
}
else
{
logger
.
error
(
"invalid arguments: {}"
,
c
.
left
.
get
.
toString
)
logger
.
error
(
"run `tapasco -h` or `tapasco --help` to get more info"
)
...
...
@@ -117,4 +121,6 @@ object Tapasco {
logger
.
info
(
"TPC finished successfully"
)
}
}
// scalastyle:on method.length
// scalastyle:on cyclomatic.complexity
}
src/main/scala/tapasco/dse/DesignSpace.scala
View file @
37331e13
...
...
@@ -51,6 +51,7 @@ class DesignSpace(
val
designFrequency
:
Heuristics.Frequency
)(
implicit
cfg
:
Configuration
)
{
import
scala.util.Properties.
{
lineSeparator
=>
NL
}
private
[
this
]
final
val
DEFAULT_CLOCK_PERIOD_NS
=
4
// default: 250 MHz
private
[
this
]
val
logger
=
de
.
tu_darmstadt
.
cs
.
esa
.
tapasco
.
Logging
.
logger
(
this
.
getClass
)
logger
.
trace
(
Seq
(
"DesignSpace("
,
dim
,
")"
)
mkString
)
...
...
@@ -60,7 +61,7 @@ class DesignSpace(
val
cores
=
bd
.
composition
flatMap
(
ce
=>
FileAssetManager
.
entities
.
core
(
ce
.
kernel
,
target
))
val
srs
=
cores
flatMap
{
c
:
Core
=>
FileAssetManager
.
reports
.
synthReport
(
c
.
name
,
target
)
}
val
cps
=
srs
flatMap
(
_
.
timing
)
map
(
_
.
clockPeriod
)
val
fmax
=
1000.0
/
(
if
(
cps
.
nonEmpty
)
cps
.
max
else
4
)
// default: 250 MHz
val
fmax
=
1000.0
/
(
if
(
cps
.
nonEmpty
)
cps
.
max
else
DEFAULT_CLOCK_PERIOD_NS
)
target
.
pd
.
supportedFrequencies
map
(
_
.
toDouble
)
filter
(
_
<=
fmax
)
sortWith
(
_
>
_
)
}
else
{
Seq
(
designFrequency
)
...
...
src/main/scala/tapasco/filemgmt/LogTrackingFileWatcher.scala
View file @
37331e13
...
...
@@ -15,7 +15,7 @@ class LogTrackingFileWatcher(_logger: Option[Logger] = None, pollInterval: Int =
extends
MultiFileWatcher
(
POLL_INTERVAL
)
{
private
[
this
]
final
val
logger
=
_logger
getOrElse
de
.
tu_darmstadt
.
cs
.
esa
.
tapasco
.
Logging
.
logger
(
getClass
)
private
object
listener
extends
Listener
[
Event
]{
private
lazy
val
listener
=
new
Listener
[
Event
]
{
def
update
(
e
:
MultiFileWatcher.Event
)
:
Unit
=
e
match
{
case
LinesAdded
(
src
,
ls
)
=>
ls
map
{
l
=>
logger
.
info
(
l
)
...
...
src/main/scala/tapasco/itapasco/controller/ExplorationGraphController.scala
View file @
37331e13
...
...
@@ -40,7 +40,7 @@ import scala.collection.JavaConverters._
* current element
* - '''pgup'''/'''pgdown''' jumps to elements pruned by / pruning reasons of
* the current element (alternative keys: '''up'''/'''down''')
*
*
* ==DSE Log Table==
* Table with all DSE events ordered by timestamp; shows a short textual
* representation of the event (e.g., Run XY started). This log is later written
...
...
src/main/scala/tapasco/itapasco/executables/LogViewer.scala
View file @
37331e13
...
...
@@ -9,7 +9,7 @@ import scala.swing.{BorderPanel, Frame, MainFrame, SimpleSwingApplication, Swin
/** Reads a DSE Json logfile and shows an [[controller.ExplorationGraphController]]
* for it.
* Each DSE run produces a logfile in Json format, which can be "replayed" into
* an [[model.DesignSpaceGraph]], which in turn can be displayed by the
* an [[model.DesignSpaceGraph]], which in turn can be displayed by the
* [[controller ExplorationGraphController]]. This applications shows a window
* containing only the graph view of the DSE page in iTPC and allows to browse
* through past DSE runs conveniently
...
...
src/main/scala/tapasco/itapasco/view/detail/PlatformDetailPanel.scala
View file @
37331e13
...
...
@@ -11,7 +11,7 @@ import PlatformsPanel.Events._
* On top there is a [[common.DescriptionPropertiesTable]] instance showing
* details of the description itself. Below there is a chart of the average
* transfer speeds achieved by the platform across different chunk sizes.
*
*
* @see [[chart.PlatformBenchmarkChart]]
*/
class
PlatformDetailPanel
extends
GridBagPanel
with
Listener
[
PlatformsPanel.Event
]
{
...
...
src/main/scala/tapasco/itapasco/view/dse/config/ConfigPanel.scala
View file @
37331e13
...
...
@@ -12,7 +12,7 @@ import scala.swing.event._
* [[common.DividerSync]] to present a table-like structure, consisting of first
* row: Design space dimension (frequency, utilization, alternatives); second
* row: batch size, a warning panel (for misconfigurations) and SLURM.
* Last row contains a button for the user to start the exploration, which
* Last row contains a button for the user to start the exploration, which
* raises a corresponding [[ConfigPanel.Event]] (nothing is done directly, MVC
* approach better here).
*
...
...
src/main/scala/tapasco/itapasco/view/dse/graph/SatelliteViewerGrid.scala
View file @
37331e13
...
...
@@ -2,7 +2,7 @@ package de.tu_darmstadt.cs.esa.tapasco.itapasco.view.dse.graph
import
edu.uci.ics.jung.visualization._
/** JUNG2 VisualizationViewer Paintable which renders the grid for [[globals.Graph.satViewer]].
* The axes are drawn in bold + white, the rest of the grid is 10-step in dotted gray.
* The axes are drawn in bold + white, the rest of the grid is 10-step in dotted gray.
*/
class
SatelliteViewerGrid
(
vv
:
VisualizationViewer
[
_
,
_
])
extends
VisualizationServer
.
Paintable
{
import
java.awt._
...
...
src/main/scala/tapasco/itapasco/view/package.scala
View file @
37331e13
...
...
@@ -2,7 +2,7 @@ package de.tu_darmstadt.cs.esa.tapasco.itapasco
/** iTPC loosely adheres to the 'Model-View-Controller (MVC)' paradigm; this
* package contains all 'View' classes, i.e., UI elements.
* Most views are Selection/Detail, the packages [[view.selection]] and
* Most views are Selection/Detail, the packages [[view.selection]] and
* [[view.detail]] contain the classes in these categories. Note that some
* UI elements that have been reused intensively are also found in
* [[itapasco.common]].
...
...
src/main/scala/tapasco/jobs/JobExamples.scala
View file @
37331e13
...
...
@@ -12,6 +12,7 @@ import java.nio.file._
* via the [[dump]] method.
*/
object
JobExamples
{
// scalastyle:off magic.number
val
bulkImportJob
=
BulkImportJob
(
Paths
.
get
(
"some.csv"
))
val
composition
=
Composition
(
Paths
.
get
(
"N/A"
),
Some
(
"An optional description."
),
...
...
@@ -63,4 +64,5 @@ object JobExamples {
fw
.
close
()
}
}
// scalastyle:on magic.number
}
src/main/scala/tapasco/jobs/Jobs.scala
View file @
37331e13
...
...
@@ -131,14 +131,14 @@ final case class DesignSpaceExplorationJob(
FileAssetManager
.
entities
.
architectures
filter
(
a
=>
_architectures
map
(
_
.
contains
(
a
.
name
))
getOrElse
true
)
/** Returns the name filter for [[base.Architecture]] instances. */
def
architectureNames
=
_architectures
def
architectureNames
:
Option
[
Seq
[
String
]]
=
_architectures
/** Returns the list of [[base.Platform]] instances selected in this job. */
def
platforms
:
Set
[
Platform
]
=
FileAssetManager
.
entities
.
platforms
filter
(
p
=>
_platforms
map
(
_
.
contains
(
p
.
name
))
getOrElse
true
)
/** Returns the name filter for [[base.Platform]] instances. */
def
platformNames
=
_platforms
def
platformNames
:
Option
[
Seq
[
String
]]
=
_platforms
/** Returns the first target (alphabetically Arch - Platform). */
def
target
:
Target
=
targets
.
head
...
...
src/main/scala/tapasco/parser/ComposeParser.scala
View file @
37331e13
...
...
@@ -30,10 +30,6 @@ private object ComposeParser {
case
(
"Architectures"
,
as
:
Seq
[
String
@unchecked
])
=>
_
.
copy
(
_architectures
=
Some
(
as
))
case
(
"Platforms"
,
ps
:
Seq
[
String
@unchecked
])
=>
_
.
copy
(
_platforms
=
Some
(
ps
))
case
(
"Features"
,
fs
:
Seq
[
Feature
@unchecked
])
=>
_
.
copy
(
features
=
Some
(
fs
))
case
(
"Features"
,
fs
:
Map
[
_
,
_
])
=>
{
job
=>
logger
.
warn
(
"new features not implemented yet!"
)
job
}
case
(
"DebugMode"
,
m
:
String
)
=>
_
.
copy
(
debugMode
=
Some
(
m
))
case
o
=>
throw
new
Exception
(
s
"parsed illegal option: $o"
)
}
...
...
src/main/scala/tapasco/slurm/Slurm.scala
View file @
37331e13
...
...
@@ -54,6 +54,8 @@ final object Slurm extends Publisher {
final
val
slurmDelay
=
15000
// 15 secs
/** Set of POSIX permissions for SLURM job scripts. */
final
val
slurmScriptPermissions
=
Set
(
OWNER_READ
,
OWNER_WRITE
,
OWNER_EXECUTE
,
GROUP_READ
,
OTHERS_READ
).
asJava
/** Wait interval between retries. */
final
val
slurmRetryDelay
=
10000
// 10 secs
/** Returns true if SLURM is available on host running iTPC. */
lazy
val
available
:
Boolean
=
"which sbatch"
.!
==
0
...
...
@@ -116,11 +118,11 @@ final object Slurm extends Publisher {
logger
.
debug
(
"running slurm batch job: '%s'"
.
format
(
cmd
))
val
res
=
cmd
.!!
val
id
=
slurmSubmissionAck
.
findFirstMatchIn
(
res
)
map
(
_
group
(
1
)
toInt
)
if
(
id
.
isEmpty
)
{
if
(
id
.
isEmpty
)
{
if
(
retries
>
0
)
{
Thread
.
sleep
(
10000
)
// wait 10 secs
Thread
.
sleep
(
slurmRetryDelay
)
// wait 10 secs
apply
(
script
,
retries
-
1
)
}
else
throw
new
SlurmException
(
script
.
toString
,
res
)
}
else
{
throw
new
SlurmException
(
script
.
toString
,
res
)
}
}
else
{
logger
.
debug
(
"received SLURM id: {}"
,
id
)
id
...
...
src/main/scala/tapasco/task/ComposeTask.scala
View file @
37331e13
...
...
@@ -70,8 +70,7 @@ class ComposeTask(composition: Composition,
LogFileTracker
.
stopLogFileAppender
(
appender
)
val
result
=
(
_composerResult
map
(
_
.
result
)
getOrElse
false
)
==
ComposeResult
.
Success
if
(
result
)
composer
.
clean
(
composition
,
target
,
designFrequency
)
if
(
result
)
{
composer
.
clean
(
composition
,
target
,
designFrequency
)
}
result
}
...
...
src/main/scala/tapasco/util/PublisherListener.scala
View file @
37331e13
...
...
@@ -48,7 +48,7 @@ trait Publisher {
/** Type alias for listeners. */
type
EventListener
=
Listener
[
Event
]
/** Internal array of listeners. */
/** Internal array of listeners. */
protected
val
_listeners
:
ArrayBuffer
[
EventListener
]
=
new
ArrayBuffer
()
/** Adds an [[EventListener]].
...
...
src/main/scala/tapasco/util/ZipUtils.scala
View file @
37331e13
...
...
@@ -53,11 +53,11 @@ object ZipUtils {
((
exclude
map
(
r
=>
r
.
findFirstIn
(
zipEntry
.
toString
()).
isEmpty
)
fold
true
)
(
_
&&
_
)))
{
logger
.
trace
(
zipFile
+
": extracting "
+
zipEntry
)
val
buffer
=
new
Array
[
Byte
](
bufsz
)
val
outname
=
tempdir
.
resolve
(
if
(
flatten
)
val
outname
=
tempdir
.
resolve
(
if
(
flatten
)
{
Paths
.
get
(
zipEntry
.
getName
()).
getFileName
()
else
}
else
{
Paths
.
get
(
zipEntry
.
getName
())
)
}
)
logger
.
trace
(
"outname = {}}"
,
outname
)
Option
(
outname
.
getParent
)
foreach
{
p
=>
if
(!
p
.
toFile
.
exists
())
Files
.
createDirectories
(
p
)
}
val
dest
=
new
BufferedOutputStream
(
new
FileOutputStream
(
outname
.
toString
),
bufsz
)
...
...
@@ -84,7 +84,7 @@ object ZipUtils {
* @param zipFile Path to output zip file.
* @param files Sequence of files to pack.
*/
def
zipFile
(
zipFile
:
Path
,
files
:
Seq
[
Path
])
=
{
def
zipFile
(
zipFile
:
Path
,
files
:
Seq
[
Path
])
{
import
java.util.zip._
import
java.io.
{
BufferedOutputStream
,
FileOutputStream
}
val
zos
=
new
ZipOutputStream
(
new
BufferedOutputStream
(
new
FileOutputStream
(
zipFile
.
toFile
)))
...
...
Write
Preview
Supports
Markdown
0%
Try again
or
attach a new file
.
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment