Skip to content
Snippets Groups Projects
Commit 6ee7bc66 authored by Vaggelis Papoutsis's avatar Vaggelis Papoutsis Committed by Andrew Heather
Browse files

ENH: added a general framework for normalization and setting targets

for all objective functions.

- The normalization is useful for practically all update methods dealing
with constraints (e.g. SQP, MMA). The normalization factor can be either
given explicitly or, if not given, will be the value of the objective
function in the first optimisation cycle.
- The target value is useful when using the objective as a constraint in
constrained optimisation problems (e.g. drag - dragTarget). It should
only be used with update methods that understand the value of the
constraint (e.g. SQP, MMA) but not when the objective in hand is the
only objective of the optimisation problem. In such a case, a squared
objective should be used (e.g. sqr(drag - dragTarget))
parent 4d67819a
Branches
Tags
1 merge request!368Updates for the adjoint optimisation library
...@@ -151,12 +151,16 @@ bool objectiveManager::readDict(const dictionary& dict) ...@@ -151,12 +151,16 @@ bool objectiveManager::readDict(const dictionary& dict)
return true; return true;
} }
void objectiveManager::updateNormalizationFactor() void objectiveManager::updateNormalizationFactor()
{ {
// Update normalization factors for all objectives // Update normalization factors for all objectives
for (objective& obj : objectives_) for (objective& obj : objectives_)
{ {
obj.updateNormalizationFactor(); if (obj.normalize())
{
obj.updateNormalizationFactor();
}
} }
} }
......
...@@ -128,6 +128,67 @@ autoPtr<objectiveIncompressible> objectiveIncompressible::New ...@@ -128,6 +128,67 @@ autoPtr<objectiveIncompressible> objectiveIncompressible::New
// * * * * * * * * * * * * * * * Member Functions * * * * * * * * * * * * * // // * * * * * * * * * * * * * * * Member Functions * * * * * * * * * * * * * //
void objectiveIncompressible::doNormalization()
{
if (normalize_ && normFactor_.valid())
{
const scalar oneOverNorm(1./normFactor_());
if (hasdJdv())
{
dJdvPtr_().primitiveFieldRef() *= oneOverNorm;
}
if (hasdJdp())
{
dJdpPtr_().primitiveFieldRef() *= oneOverNorm;
}
if (hasdJdT())
{
dJdTPtr_().primitiveFieldRef() *= oneOverNorm;
}
if (hasdJdTMVar1())
{
dJdTMvar1Ptr_().primitiveFieldRef() *= oneOverNorm;
}
if (hasdJdTMVar2())
{
dJdTMvar2Ptr_().primitiveFieldRef() *= oneOverNorm;
}
if (hasBoundarydJdv())
{
bdJdvPtr_() *= oneOverNorm;
}
if (hasBoundarydJdvn())
{
bdJdvnPtr_() *= oneOverNorm;
}
if (hasBoundarydJdvt())
{
bdJdvtPtr_() *= oneOverNorm;
}
if (hasBoundarydJdp())
{
bdJdpPtr_() *= oneOverNorm;
}
if (hasBoundarydJdT())
{
bdJdTPtr_() *= oneOverNorm;
}
if (hasBoundarydJdTMVar1())
{
bdJdTMvar1Ptr_() *= oneOverNorm;
}
if (hasBoundarydJdTMVar2())
{
bdJdTMvar2Ptr_() *= oneOverNorm;
}
// Normalize geometric fields
objective::doNormalization();
}
}
const volVectorField& objectiveIncompressible::dJdv() const volVectorField& objectiveIncompressible::dJdv()
{ {
if (dJdvPtr_.empty()) if (dJdvPtr_.empty())
...@@ -418,6 +479,9 @@ void objectiveIncompressible::update() ...@@ -418,6 +479,9 @@ void objectiveIncompressible::update()
update_dxdbDirectMultiplier(); update_dxdbDirectMultiplier();
update_boundaryEdgeContribution(); update_boundaryEdgeContribution();
update_dJdStressMultiplier(); update_dJdStressMultiplier();
// Divide everything with normalization factor
doNormalization();
} }
......
...@@ -165,6 +165,9 @@ public: ...@@ -165,6 +165,9 @@ public:
//- Return the objective function value //- Return the objective function value
virtual scalar J() = 0; virtual scalar J() = 0;
//- Normalize all fields allocated by the objective
virtual void doNormalization();
//- Contribution to field adjoint momentum eqs //- Contribution to field adjoint momentum eqs
const volVectorField& dJdv(); const volVectorField& dJdv();
......
...@@ -247,6 +247,7 @@ void objectivePtLosses::write() const ...@@ -247,6 +247,7 @@ void objectivePtLosses::write() const
setObjectiveFilePtr(); setObjectiveFilePtr();
objFunctionFilePtr_() << setw(4) << "#" << " "; objFunctionFilePtr_() << setw(4) << "#" << " ";
objFunctionFilePtr_() << setw(width) << "ptLosses" << " "; objFunctionFilePtr_() << setw(width) << "ptLosses" << " ";
objFunctionFilePtr_() << setw(width) << "JCycle" << " ";
forAll(patches_, oI) forAll(patches_, oI)
{ {
label patchI = patches_[oI]; label patchI = patches_[oI];
...@@ -258,6 +259,7 @@ void objectivePtLosses::write() const ...@@ -258,6 +259,7 @@ void objectivePtLosses::write() const
objFunctionFilePtr_() << setw(4) << mesh_.time().value() << " "; objFunctionFilePtr_() << setw(4) << mesh_.time().value() << " ";
objFunctionFilePtr_() << setw(width) << J_ << " "; objFunctionFilePtr_() << setw(width) << J_ << " ";
objFunctionFilePtr_() << setw(width) << JCycle() << " ";
forAll(patchPt_, pI) forAll(patchPt_, pI)
{ {
objFunctionFilePtr_() << setw(width) << patchPt_[pI] << " "; objFunctionFilePtr_() << setw(width) << patchPt_[pI] << " ";
......
...@@ -59,8 +59,10 @@ class objectivePtLosses ...@@ -59,8 +59,10 @@ class objectivePtLosses
{ {
// Private data // Private data
//- Patches on which to compute losses
labelList patches_; labelList patches_;
//- Total pressure per patch on patches_
scalarField patchPt_; scalarField patchPt_;
......
...@@ -129,10 +129,13 @@ objective::objective ...@@ -129,10 +129,13 @@ objective::objective
objectiveName_(dict.dictName()), objectiveName_(dict.dictName()),
computeMeanFields_(false), // is reset in derived classes computeMeanFields_(false), // is reset in derived classes
nullified_(false), nullified_(false),
normalize_(dict.getOrDefault<bool>("normalize", false)),
J_(Zero), J_(Zero),
JMean_(this->getOrDefault<scalar>("JMean", Zero)), JMean_(this->getOrDefault<scalar>("JMean", Zero)),
weight_(Zero), weight_(Zero),
normFactor_(nullptr),
target_(dict.getOrDefault<scalar>("target", Zero)),
integrationStartTimePtr_(nullptr), integrationStartTimePtr_(nullptr),
integrationEndTimePtr_(nullptr), integrationEndTimePtr_(nullptr),
...@@ -174,6 +177,20 @@ objective::objective ...@@ -174,6 +177,20 @@ objective::objective
new scalar(dict.get<scalar>("integrationEndTime")) new scalar(dict.get<scalar>("integrationEndTime"))
); );
} }
// Set normalization factor, if present
if (normalize_)
{
scalar normFactor(Zero);
if (dict.readIfPresent("normFactor", normFactor))
{
normFactor_.reset(new scalar(normFactor));
}
else if (this->readIfPresent("normFactor", normFactor))
{
normFactor_.reset(new scalar(normFactor));
}
}
} }
...@@ -234,13 +251,26 @@ scalar objective::JCycle() const ...@@ -234,13 +251,26 @@ scalar objective::JCycle() const
{ {
J = JMean_; J = JMean_;
} }
// Subtract target, in case the objective is used as a constraint
J -= target_;
// Normalize here, in order to get the correct value for line search
if (normalize_ && normFactor_.valid())
{
J /= normFactor_();
}
return J; return J;
} }
void objective::updateNormalizationFactor() void objective::updateNormalizationFactor()
{ {
// Does nothing in base if (normalize_ && normFactor_.empty())
{
normFactor_.reset(new scalar(JCycle()));
}
} }
...@@ -289,6 +319,62 @@ scalar objective::weight() const ...@@ -289,6 +319,62 @@ scalar objective::weight() const
} }
bool objective::normalize() const
{
return normalize_;
}
void objective::doNormalization()
{
if (normalize_ && normFactor_.valid())
{
const scalar oneOverNorm(1./normFactor_());
if (hasdJdb())
{
dJdbPtr_().primitiveFieldRef() *= oneOverNorm;
}
if (hasBoundarydJdb())
{
bdJdbPtr_() *= oneOverNorm;
}
if (hasdSdbMult())
{
bdSdbMultPtr_() *= oneOverNorm;
}
if (hasdndbMult())
{
bdndbMultPtr_() *= oneOverNorm;
}
if (hasdxdbMult())
{
bdxdbMultPtr_() *= oneOverNorm;
}
if (hasdxdbDirectMult())
{
bdxdbDirectMultPtr_() *= oneOverNorm;
}
if (hasBoundaryEdgeContribution())
{
bEdgeContribution_() *= oneOverNorm;
}
if (hasDivDxDbMult())
{
divDxDbMultPtr_() *= oneOverNorm;
}
if (hasGradDxDbMult())
{
gradDxDbMultPtr_() *= oneOverNorm;
}
if (hasBoundarydJdStress())
{
bdJdStressPtr_() *= oneOverNorm;
}
}
}
bool objective::isWithinIntegrationTime() const bool objective::isWithinIntegrationTime() const
{ {
if (hasIntegrationStartTime() && hasIntegrationEndTime()) if (hasIntegrationStartTime() && hasIntegrationEndTime())
...@@ -667,6 +753,10 @@ void objective::writeMeanValue() const ...@@ -667,6 +753,10 @@ void objective::writeMeanValue() const
bool objective::writeData(Ostream& os) const bool objective::writeData(Ostream& os) const
{ {
os.writeEntry("JMean", JMean_); os.writeEntry("JMean", JMean_);
if (normFactor_.valid())
{
os.writeEntry("normFactor", normFactor_());
}
return os.good(); return os.good();
} }
......
...@@ -72,6 +72,7 @@ protected: ...@@ -72,6 +72,7 @@ protected:
const word objectiveName_; const word objectiveName_;
bool computeMeanFields_; bool computeMeanFields_;
bool nullified_; bool nullified_;
bool normalize_;
//- Objective function value and weight //- Objective function value and weight
scalar J_; scalar J_;
...@@ -82,12 +83,21 @@ protected: ...@@ -82,12 +83,21 @@ protected:
//- Objective weight //- Objective weight
scalar weight_; scalar weight_;
// Objective integration start and end times (for unsteady flows) //- Normalization factor
autoPtr<scalar> normFactor_;
//- Target value, in case the objective is used as a constraint
// Should be used in caution and with updateMethods than get affected
// by the target value, without requiring a sqr (e.g. SQP, MMA)
scalar target_;
//- Objective integration start and end times (for unsteady flows)
autoPtr<scalar> integrationStartTimePtr_; autoPtr<scalar> integrationStartTimePtr_;
autoPtr<scalar> integrationEndTimePtr_; autoPtr<scalar> integrationEndTimePtr_;
// Contribution to field sensitivity derivatives //- Contribution to field sensitivity derivatives
// Topology optimisation // Topology optimisation or other variants with
// as many design variables as the mesh cells
//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ //~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
autoPtr<volScalarField> dJdbPtr_; autoPtr<volScalarField> dJdbPtr_;
...@@ -248,6 +258,12 @@ public: ...@@ -248,6 +258,12 @@ public:
//- Return the objective function weight //- Return the objective function weight
scalar weight() const; scalar weight() const;
//- Is the objective normalized
bool normalize() const;
//- Normalize all fields allocated by the objective
virtual void doNormalization();
//- Check whether this is an objective integration time //- Check whether this is an objective integration time
bool isWithinIntegrationTime() const; bool isWithinIntegrationTime() const;
......
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment