Skip to content

Commit

Permalink
formatted
Browse files Browse the repository at this point in the history
  • Loading branch information
0815Creeper committed Sep 14, 2024
1 parent 262507a commit 429778c
Show file tree
Hide file tree
Showing 3 changed files with 20 additions and 20 deletions.
32 changes: 16 additions & 16 deletions src/batch.jl
Original file line number Diff line number Diff line change
Expand Up @@ -27,11 +27,11 @@ mutable struct FMULoss{T}
end
end

function nominalLoss(l::FMULoss{T}) where {T<:AbstractArray}
function nominalLoss(l::FMULoss{T}) where {T <: AbstractArray}
return unsense(sum(l.loss))
end

function nominalLoss(l::FMULoss{T}) where {T<:Real}
function nominalLoss(l::FMULoss{T}) where {T <: Real}
return unsense(l.loss)
end

Expand All @@ -45,10 +45,10 @@ end

mutable struct FMU2SolutionBatchElement{D} <: FMU2BatchElement

snapshot::Union{FMUSnapshot,Nothing}
snapshot::Union{FMUSnapshot, Nothing}

xStart::Union{Vector{fmi2Real},Nothing}
xdStart::Union{Vector{D},Nothing}
xStart::Union{Vector{fmi2Real}, Nothing}
xdStart::Union{Vector{D}, Nothing}

tStart::fmi2Real
tStop::fmi2Real
Expand All @@ -61,8 +61,8 @@ mutable struct FMU2SolutionBatchElement{D} <: FMU2BatchElement
losses::Array{<:FMULoss} # logged losses (if used)
step::Integer

saveat::Union{AbstractVector{<:Real},Nothing}
targets::Union{AbstractArray,Nothing}
saveat::Union{AbstractVector{<:Real}, Nothing}
targets::Union{AbstractArray, Nothing}

indicesModel::Any

Expand All @@ -83,7 +83,7 @@ mutable struct FMU2SolutionBatchElement{D} <: FMU2BatchElement
# inst.initialState = nothing
# inst.initialEventInfo = nothing
inst.loss = FMULoss(Inf)
inst.losses = Array{FMULoss,1}()
inst.losses = Array{FMULoss, 1}()
inst.step = 0

inst.saveat = nothing
Expand All @@ -106,9 +106,9 @@ mutable struct FMU2EvaluationBatchElement <: FMU2BatchElement
losses::Array{<:FMULoss}
step::Integer

saveat::Union{AbstractVector{<:Real},Nothing}
targets::Union{AbstractArray,Nothing}
features::Union{AbstractArray,Nothing}
saveat::Union{AbstractVector{<:Real}, Nothing}
targets::Union{AbstractArray, Nothing}
features::Union{AbstractArray, Nothing}

indicesModel::Any

Expand All @@ -123,7 +123,7 @@ mutable struct FMU2EvaluationBatchElement <: FMU2BatchElement
inst.tStop = Inf

inst.loss = FMULoss(Inf)
inst.losses = Array{FMULoss,1}()
inst.losses = Array{FMULoss, 1}()
inst.step = 0

inst.saveat = nothing
Expand Down Expand Up @@ -373,7 +373,7 @@ function loss!(batchElement::FMU2SolutionBatchElement, lossFct; logLoss::Bool =
if hasmethod(lossFct, Tuple{FMUSolution})
loss = lossFct(batchElement.solution)

elseif hasmethod(lossFct, Tuple{FMUSolution,Union{}})
elseif hasmethod(lossFct, Tuple{FMUSolution, Union{}})
loss = lossFct(batchElement.solution, batchElement.targets)

else # hasmethod(lossFct, Tuple{Union{}, Union{}})
Expand Down Expand Up @@ -545,7 +545,7 @@ function batchDataSolution(
solverKwargs...,
)

batch = Array{FMIFlux.FMU2SolutionBatchElement,1}()
batch = Array{FMIFlux.FMU2SolutionBatchElement, 1}()
_batchDataSolution!(
batch,
neuralFMU,
Expand Down Expand Up @@ -585,15 +585,15 @@ end
function batchDataEvaluation(
train_t::AbstractArray{<:Real},
targets::AbstractArray,
features::Union{AbstractArray,Nothing} = nothing;
features::Union{AbstractArray, Nothing} = nothing;
batchDuration::Real = (train_t[end] - train_t[1]),
indicesModel = 1:length(targets[1]),
plot::Bool = false,
round_digits = 3,
scalarLoss::Bool = true,
)

batch = Array{FMIFlux.FMU2EvaluationBatchElement,1}()
batch = Array{FMIFlux.FMU2EvaluationBatchElement, 1}()

indicesData = 1:1

Expand Down
6 changes: 3 additions & 3 deletions src/optimiser.jl
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,8 @@ struct OptimOptimiserWrapper{G} <: AbstractOptimiser
optim::Optim.AbstractOptimizer
grad_fun!::G

state::Union{Optim.AbstractOptimizerState,Nothing}
d::Union{Optim.OnceDifferentiable,Nothing}
state::Union{Optim.AbstractOptimizerState, Nothing}
d::Union{Optim.OnceDifferentiable, Nothing}
options::Any

function OptimOptimiserWrapper(
Expand Down Expand Up @@ -57,7 +57,7 @@ end
struct FluxOptimiserWrapper{G} <: AbstractOptimiser
optim::Flux.Optimise.AbstractOptimiser
grad_fun!::G
grad_buffer::Union{AbstractVector{Float64},AbstractMatrix{Float64}}
grad_buffer::Union{AbstractVector{Float64}, AbstractMatrix{Float64}}
multiGrad::Bool

function FluxOptimiserWrapper(
Expand Down
2 changes: 1 addition & 1 deletion test/runtests.jl
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ global EXPORTINGTOOL = nothing
global EXPORTINGVERSION = nothing
global X0 = [2.0, 0.0]
global OPTIMISER = Descent
global FAILED_GRADIENTS_QUOTA = 1/3
global FAILED_GRADIENTS_QUOTA = 1 / 3

# callback for bad optimization steps counter
global FAILED_GRADIENTS = 0
Expand Down

0 comments on commit 429778c

Please sign in to comment.