Skip to content

Commit

Permalink
Merge branch 'refs/heads/dev' into df/#856-tap-water
Browse files Browse the repository at this point in the history
# Conflicts:
#	CHANGELOG.md
#	src/main/scala/edu/ie3/simona/model/thermal/ThermalGrid.scala
  • Loading branch information
danielfeismann committed Oct 30, 2024
2 parents 611b701 + 2eb0567 commit ce64339
Show file tree
Hide file tree
Showing 29 changed files with 1,108 additions and 868 deletions.
9 changes: 9 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
- EmAgents should be able to handle initialization [#945](https://github.com/ie3-institute/simona/issues/945)
- Added option to directly zip the output files [#793](https://github.com/ie3-institute/simona/issues/793)
- Added weatherData HowTo for Copernicus ERA5 data [#967](https://github.com/ie3-institute/simona/issues/967)
- Add some quote to 'printGoodbye' [#997](https://github.com/ie3-institute/simona/issues/997)
- Integration test for thermal grids [#878](https://github.com/ie3-institute/simona/issues/878)

### Changed
Expand Down Expand Up @@ -87,6 +88,12 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
- Prepare ThermalStorageTestData for Storage without storageVolumeLvlMin [#894](https://github.com/ie3-institute/simona/issues/894)
- Renamed `ActivityStartTrigger`, `ScheduleTriggerMessage`, `CompletionMessage` in UML Diagrams[#675](https://github.com/ie3-institute/simona/issues/675)
- Simplifying quantity integration in QuantityUtil [#973](https://github.com/ie3-institute/simona/issues/973)
- Reorganized Jenkins pipeline to separate build and test stages for better efficiency [#938](https://github.com/ie3-institute/simona/issues/938)
- Rewrote SystemParticipantTest and MockParticipant from groovy to scala [#646](https://github.com/ie3-institute/simona/issues/646)
- Rewrote ChpModelTest from groovy to scala [#646](https://github.com/ie3-institute/simona/issues/646)
- Rewrote CylindricalThermalStorageTest Test from groovy to scala [#646](https://github.com/ie3-institute/simona/issues/646)
- Replace mutable var in ChpModelSpec [#1002](https://github.com/ie3-institute/simona/issues/1002)
- Move compression of output files into `ResultEventListener`[#965](https://github.com/ie3-institute/simona/issues/965)

### Fixed
- Removed a repeated line in the documentation of vn_simona config [#658](https://github.com/ie3-institute/simona/issues/658)
Expand Down Expand Up @@ -117,6 +124,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
- Improve code quality in fixedloadmodelspec and other tests [#919](https://github.com/ie3-institute/simona/issues/919)
- Fix power flow calculation with em agents [#962](https://github.com/ie3-institute/simona/issues/962)
- Fix scheduling at Evcs with more than one Ev at a time without Em [#787](https://github.com/ie3-institute/simona/issues/787)
- Fix CheckWindow duration [#921](https://github.com/ie3-institute/simona/issues/921)
- Fixed ThermalStorageResults having multiple entries [#924](https://github.com/ie3-institute/simona/issues/924)
- Fixed Hp results leading to overheating house and other effects [#827](https://github.com/ie3-institute/simona/issues/827)
- Fixed thermal storage getting recharged when empty [#827](https://github.com/ie3-institute/simona/issues/827)

Expand Down
21 changes: 11 additions & 10 deletions Jenkinsfile
Original file line number Diff line number Diff line change
Expand Up @@ -101,31 +101,32 @@ node {
}
}

// Build the project
stage('build') {
gradle('clean assemble', projectName)
}

// test the project
stage('run tests') {

sh 'java -version'

gradle('--refresh-dependencies clean spotlessCheck pmdMain pmdTest reportScoverage checkScoverage', projectName)
gradle('--refresh-dependencies spotlessCheck pmdMain pmdTest', projectName)

sh(script: """set +x && cd $projectName""" + ''' set +x; ./gradlew javadoc''', returnStdout: true)
}

// sonarqube analysis
stage('sonarqube analysis') {
// sonarqube analysis & quality gate
stage('sonarqube') {
String sonarqubeCurrentBranchName = prFromFork() ? prJsonObj.head.repo.full_name : currentBranchName // forks needs to be handled differently
String sonarqubeCmd = determineSonarqubeGradleCmd(sonarqubeProjectKey, sonarqubeCurrentBranchName, targetBranchName, orgName, projectName, projectName)
withSonarQubeEnv() {
// will pick the global server connection from jenkins for sonarqube
gradle(sonarqubeCmd, projectName)
}
}

// sonarqube quality gate
stage("quality gate") {
timeout(time: 1, unit: 'HOURS') {
// just in case something goes wrong, pipeline will be killed after a timeout
def qg = waitForQualityGate() // reuse taskId previously collected by withSonarQubeEnv
// Just in case something goes wrong, pipeline will be killed after a timeout
def qg = waitForQualityGate() // Reuse taskId previously collected by withSonarQubeEnv
if (qg.status != 'OK') {
error "Pipeline aborted due to quality gate failure: ${qg.status}"
}
Expand Down Expand Up @@ -684,4 +685,4 @@ def getBranchType(String branchName) {
} else {
return null
}
}
}
8 changes: 4 additions & 4 deletions build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -26,11 +26,11 @@ ext {

scalaVersion = '2.13'
scalaBinaryVersion = '2.13.15'
pekkoVersion = '1.1.1'
pekkoVersion = '1.1.2'
jtsVersion = '1.20.0'
confluentKafkaVersion = '7.4.0'
tscfgVersion = '1.1.3'
scapegoatVersion = '3.0.3'
scapegoatVersion = '3.1.2'

testContainerVersion = '0.41.4'

Expand Down Expand Up @@ -98,12 +98,12 @@ dependencies {

/* logging */
implementation "com.typesafe.scala-logging:scala-logging_${scalaVersion}:3.9.5" // pekko scala logging
implementation "ch.qos.logback:logback-classic:1.5.8"
implementation "ch.qos.logback:logback-classic:1.5.12"

/* testing */
testImplementation 'org.spockframework:spock-core:2.3-groovy-4.0'
testImplementation 'org.scalatestplus:mockito-3-4_2.13:3.2.10.0'
testImplementation 'org.mockito:mockito-core:5.14.1' // mocking framework
testImplementation 'org.mockito:mockito-core:5.14.2' // mocking framework
testImplementation "org.scalatest:scalatest_${scalaVersion}:3.2.19"
testRuntimeOnly 'com.vladsch.flexmark:flexmark-all:0.64.8' //scalatest html output
testImplementation group: 'org.pegdown', name: 'pegdown', version: '1.6.0'
Expand Down
4 changes: 2 additions & 2 deletions docs/readthedocs/config.md
Original file line number Diff line number Diff line change
Expand Up @@ -94,11 +94,11 @@ simona.output.sink.csv {
fileFormat = ".csv"
filePrefix = ""
fileSuffix = ""
zipFiles = false
compressOutputs = false
}
```

While using a csv sink, the raw data output files can be zipped directly when `zipFiles = true` is used.
While using a csv sink, the raw data output files can be zipped directly when `compressOutputs = true` is used.


#### Output configuration of the grid
Expand Down
4 changes: 2 additions & 2 deletions docs/readthedocs/requirements.txt
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
Sphinx==7.4.7
sphinx-rtd-theme==2.0.0
Sphinx==8.1.3
sphinx-rtd-theme==3.0.1
sphinxcontrib-plantuml==0.30
myst-parser==4.0.0
markdown-it-py==3.0.0
Expand Down
2 changes: 1 addition & 1 deletion input/samples/vn_simona/vn_simona.conf
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ simona.output.sink.csv {
fileFormat = ".csv"
filePrefix = ""
fileSuffix = ""
zipFiles = false
compressOutputs = false
}

simona.output.grid = {
Expand Down
2 changes: 1 addition & 1 deletion src/main/resources/config/config-template.conf
Original file line number Diff line number Diff line change
Expand Up @@ -269,7 +269,7 @@ simona.output.sink.csv {
isHierarchic = Boolean | false
filePrefix = ""
fileSuffix = ""
zipFiles = "Boolean" | false
compressOutputs = "Boolean" | false
}
#@optional
simona.output.sink.influxDb1x {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -188,7 +188,8 @@ trait HpAgentFundamentals
)

val accompanyingResults = baseStateData.model.thermalGrid.results(
updatedState.thermalGridState
tick,
updatedState.thermalGridState,
)(baseStateData.startDate)
val result = AccompaniedSimulationResult(power, accompanyingResults)

Expand Down Expand Up @@ -252,7 +253,8 @@ trait HpAgentFundamentals
relevantData,
)
val accompanyingResults = baseStateData.model.thermalGrid.results(
lastModelState.thermalGridState
currentTick,
lastModelState.thermalGridState,
)(baseStateData.startDate)
val result = AccompaniedSimulationResult(power, accompanyingResults)

Expand Down
7 changes: 5 additions & 2 deletions src/main/scala/edu/ie3/simona/config/SimonaConfig.scala
Original file line number Diff line number Diff line change
Expand Up @@ -2050,11 +2050,11 @@ object SimonaConfig {
)
object Sink {
final case class Csv(
compressOutputs: scala.Boolean,
fileFormat: java.lang.String,
filePrefix: java.lang.String,
fileSuffix: java.lang.String,
isHierarchic: scala.Boolean,
zipFiles: scala.Boolean,
)
object Csv {
def apply(
Expand All @@ -2063,6 +2063,10 @@ object SimonaConfig {
$tsCfgValidator: $TsCfgValidator,
): SimonaConfig.Simona.Output.Sink.Csv = {
SimonaConfig.Simona.Output.Sink.Csv(
compressOutputs =
c.hasPathOrNull("compressOutputs") && c.getBoolean(
"compressOutputs"
),
fileFormat =
if (c.hasPathOrNull("fileFormat")) c.getString("fileFormat")
else ".csv",
Expand All @@ -2074,7 +2078,6 @@ object SimonaConfig {
else "",
isHierarchic =
c.hasPathOrNull("isHierarchic") && c.getBoolean("isHierarchic"),
zipFiles = c.hasPathOrNull("zipFiles") && c.getBoolean("zipFiles"),
)
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -68,41 +68,48 @@ object ResultEventListener extends Transformer3wResultSupport {
resultFileHierarchy: ResultFileHierarchy
): Iterable[Future[(Class[_], ResultEntitySink)]] = {
resultFileHierarchy.resultSinkType match {
case _: ResultSinkType.Csv =>
resultFileHierarchy.resultEntitiesToConsider
.map(resultClass => {
resultFileHierarchy.rawOutputDataFilePaths
.get(resultClass)
.map(Future.successful)
.getOrElse(
Future.failed(
new FileHierarchyException(
s"Unable to get file path for result class '${resultClass.getSimpleName}' from output file hierarchy! " +
s"Available file result file paths: ${resultFileHierarchy.rawOutputDataFilePaths}"
)
case csv: ResultSinkType.Csv =>
val enableCompression = csv.compressOutputs

resultFileHierarchy.resultEntitiesToConsider.map { resultClass =>
val filePathOpt =
resultFileHierarchy.rawOutputDataFilePaths.get(resultClass)

val filePathFuture = filePathOpt match {
case Some(fileName) => Future.successful(fileName)
case None =>
Future.failed(
new FileHierarchyException(
s"Unable to get file path for result class '${resultClass.getSimpleName}' from output file hierarchy! " +
s"Available file result file paths: ${resultFileHierarchy.rawOutputDataFilePaths}"
)
)
.flatMap { fileName =>
if (fileName.endsWith(".csv") || fileName.endsWith(".csv.gz")) {
Future {
(
resultClass,
ResultEntityCsvSink(
fileName.replace(".gz", ""),
new ResultEntityProcessor(resultClass),
fileName.endsWith(".gz"),
),
)
}
} else {
Future.failed(
new ProcessResultEventException(
s"Invalid output file format for file $fileName provided. Currently only '.csv' or '.csv.gz' is supported!"
)
}

filePathFuture.map { fileName =>
val finalFileName =
fileName match {
case name if name.endsWith(".csv.gz") && enableCompression =>
name.replace(".gz", "")
case name if name.endsWith(".csv") => name
case fileName =>
throw new ProcessResultEventException(
s"Invalid output file format for file $fileName provided or compression is not activated but filename indicates compression. Currently only '.csv' or '.csv.gz' is supported!"
)
}
}
})

(
resultClass,
ResultEntityCsvSink(
finalFileName,
new ResultEntityProcessor(resultClass),
enableCompression,
),
)

}
}

case ResultSinkType.InfluxDb1x(url, database, scenario) =>
// creates one connection per result entity that should be processed
resultFileHierarchy.resultEntitiesToConsider
Expand Down
4 changes: 2 additions & 2 deletions src/main/scala/edu/ie3/simona/io/result/ResultSinkType.scala
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ object ResultSinkType {
fileFormat: String = ".csv",
filePrefix: String = "",
fileSuffix: String = "",
zipFiles: Boolean = false,
compressOutputs: Boolean = false,
) extends ResultSinkType

final case class InfluxDb1x(url: String, database: String, scenario: String)
Expand Down Expand Up @@ -53,7 +53,7 @@ object ResultSinkType {
params.fileFormat,
params.filePrefix,
params.fileSuffix,
params.zipFiles,
params.compressOutputs,
)
case Some(params: SimonaConfig.Simona.Output.Sink.InfluxDb1x) =>
InfluxDb1x(buildInfluxDb1xUrl(params), params.database, runName)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,6 @@ import scala.concurrent.duration.DurationLong
final case class RuntimeEventLogSink(
simulationStartDate: ZonedDateTime,
log: Logger,
private var last: Long = 0L,
) extends RuntimeEventSink {

override def handleRuntimeEvent(
Expand All @@ -45,15 +44,13 @@ final case class RuntimeEventLogSink(

case CheckWindowPassed(tick, duration) =>
log.info(
s"******* Simulation until ${calcTime(tick)} completed. ${durationAndMemoryString(duration - last)} ******"
s"******* Simulation until ${calcTime(tick)} completed. ${durationAndMemoryString(duration)} ******"
)
last = duration

case Ready(tick, duration) =>
log.info(
s"******* Switched from 'Simulating' to 'Ready'. Last simulated time: ${calcTime(tick)}. ${durationAndMemoryString(duration - last)} ******"
s"******* Switched from 'Simulating' to 'Ready'. Last simulated time: ${calcTime(tick)}. ${durationAndMemoryString(duration)} ******"
)
last = duration

case Simulating(startTick, endTick) =>
log.info(
Expand Down
1 change: 1 addition & 0 deletions src/main/scala/edu/ie3/simona/main/RunSimona.scala
Original file line number Diff line number Diff line change
Expand Up @@ -66,6 +66,7 @@ trait RunSimona[T <: SimonaSetup] extends LazyLogging {
"\"Ich bin der Anfang, das Ende, die Eine, die Viele ist. Ich bin die Borg.\" - Borg-Königin (in Star Trek: Der erste Kontakt)",
"\"A horse! A horse! My kingdom for a horse!\" - King Richard III (in Shakespeare's Richard III, 1594)",
"\"Und wenn du lange in einen Abgrund blickst, blickt der Abgrund auch in dich hinein\" - F. Nietzsche",
"\"Before anything else, preparation is the key to success.\" - Alexander Graham Bell",
)

val rand = new Random
Expand Down
38 changes: 1 addition & 37 deletions src/main/scala/edu/ie3/simona/main/RunSimonaStandalone.scala
Original file line number Diff line number Diff line change
Expand Up @@ -10,17 +10,12 @@ import edu.ie3.simona.config.{ArgsParser, ConfigFailFast, SimonaConfig}
import edu.ie3.simona.main.RunSimona._
import edu.ie3.simona.sim.SimonaSim
import edu.ie3.simona.sim.setup.SimonaStandaloneSetup
import edu.ie3.util.io.FileIOUtils
import org.apache.pekko.actor.typed.scaladsl.AskPattern._
import org.apache.pekko.actor.typed.{ActorSystem, Scheduler}
import org.apache.pekko.util.Timeout

import java.nio.file.Path
import scala.concurrent.Await
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.duration.{Duration, DurationInt}
import scala.jdk.FutureConverters.CompletionStageOps
import scala.util.{Failure, Success}
import scala.concurrent.duration.DurationInt

/** Run a standalone simulation of simona
*
Expand All @@ -29,7 +24,6 @@ import scala.util.{Failure, Success}
object RunSimonaStandalone extends RunSimona[SimonaStandaloneSetup] {

override implicit val timeout: Timeout = Timeout(12.hours)
implicit val compressTimeoutDuration: Duration = 15.minutes

override def setup(args: Array[String]): SimonaStandaloneSetup = {
// get the config and prepare it with the provided args
Expand Down Expand Up @@ -62,36 +56,6 @@ object RunSimonaStandalone extends RunSimona[SimonaStandaloneSetup] {
case SimonaEnded(successful) =>
simonaSim.terminate()

val config = SimonaConfig(simonaSetup.typeSafeConfig).simona.output

config.sink.csv.map(_.zipFiles).foreach { zipFiles =>
if (zipFiles) {
val rawOutputPath =
Path.of(simonaSetup.resultFileHierarchy.rawOutputDataDir)

rawOutputPath.toFile.listFiles().foreach { file =>
val fileName = file.getName
val archiveName = fileName.replace(".csv", "")
val filePath = rawOutputPath.resolve(fileName)

val compressFuture =
FileIOUtils
.compressFile(filePath, rawOutputPath.resolve(archiveName))
.asScala
compressFuture.onComplete {
case Success(_) =>
FileIOUtils.deleteRecursively(filePath)
case Failure(exception) =>
logger.error(
s"Compression of output file to '$archiveName' has failed. Keep raw data.",
exception,
)
}
Await.ready(compressFuture, compressTimeoutDuration)
}
}
}

successful
}
}
Expand Down
Loading

0 comments on commit ce64339

Please sign in to comment.