Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Misc. fixes #196

Merged
merged 23 commits into from
Apr 20, 2024
Merged
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
obj_grad_hess: simplify mapreduce
* destructure lambda function args
* don't use zip(): mapreduce support multi-arg functions
Alexey Stukalov authored and alyst committed Apr 20, 2024

Verified

This commit was created on GitHub.com and signed with GitHub’s verified signature. The key has expired.
commit 18e57fac4eff855fb6a1e84217f937e69cdf2b17
60 changes: 26 additions & 34 deletions src/objective_gradient_hessian.jl
Original file line number Diff line number Diff line change
@@ -87,9 +87,10 @@ end

function objective!(loss::SemLoss, par, model)
return mapreduce(
fun_weight -> fun_weight[2] * objective!(fun_weight[1], par, model),
(fun, weight) -> weight * objective!(fun, par, model),
+,
zip(loss.functions, loss.weights),
loss.functions,
loss.weights,
)
end

@@ -108,19 +109,19 @@ end

function objective_gradient!(gradient, loss::SemLoss, par, model)
return mapreduce(
fun_weight ->
objective_gradient_wrap_(gradient, fun_weight[1], par, model, fun_weight[2]),
(fun, weight) -> objective_gradient_wrap_(gradient, fun, par, model, weight),
+,
zip(loss.functions, loss.weights),
loss.functions,
loss.weights,
)
end

function objective_hessian!(hessian, loss::SemLoss, par, model)
return mapreduce(
fun_weight ->
objective_hessian_wrap_(hessian, fun_weight[1], par, model, fun_weight[2]),
(fun, weight) -> objective_hessian_wrap_(hessian, fun, par, model, weight),
+,
zip(loss.functions, loss.weights),
loss.functions,
loss.weights,
)
end

@@ -134,16 +135,11 @@ end

function objective_gradient_hessian!(gradient, hessian, loss::SemLoss, par, model)
return mapreduce(
fun_weight -> objective_gradient_hessian_wrap_(
gradient,
hessian,
fun_weight[1],
par,
model,
fun_weight[2],
),
(fun, weight) ->
objective_gradient_hessian_wrap_(gradient, hessian, fun, par, model, weight),
+,
zip(loss.functions, loss.weights),
loss.functions,
loss.weights,
)
end

@@ -174,9 +170,10 @@ end

function objective!(ensemble::SemEnsemble, par)
return mapreduce(
model_weight -> model_weight[2] * objective!(model_weight[1], par),
(model, weight) -> weight * objective!(model, par),
+,
zip(ensemble.sems, ensemble.weights),
ensemble.sems,
ensemble.weights,
)
end

@@ -201,20 +198,20 @@ end
function objective_gradient!(gradient, ensemble::SemEnsemble, par)
fill!(gradient, zero(eltype(gradient)))
return mapreduce(
model_weight ->
objective_gradient_wrap_(gradient, model_weight[1], par, model_weight[2]),
(model, weight) -> objective_gradient_wrap_(gradient, model, par, weight),
+,
zip(ensemble.sems, ensemble.weights),
ensemble.sems,
ensemble.weights,
)
end

function objective_hessian!(hessian, ensemble::SemEnsemble, par)
fill!(hessian, zero(eltype(hessian)))
return mapreduce(
model_weight ->
objective_hessian_wrap_(hessian, model_weight[1], par, model_weight[2]),
(model, weight) -> objective_hessian_wrap_(hessian, model, par, weight),
+,
zip(ensemble.sems, ensemble.weights),
ensemble.sems,
ensemble.weights,
)
end

@@ -236,16 +233,11 @@ function objective_gradient_hessian!(gradient, hessian, ensemble::SemEnsemble, p
fill!(gradient, zero(eltype(gradient)))
fill!(hessian, zero(eltype(hessian)))
return mapreduce(
model_weight -> objective_gradient_hessian_wrap_(
gradient,
hessian,
model_weight[1],
par,
model,
model_weight[2],
),
(model, weight) ->
objective_gradient_hessian_wrap_(gradient, hessian, model, par, model, weight),
+,
zip(ensemble.sems, ensemble.weights),
ensemble.sems,
ensemble.weights,
)
end