Commit 37e248c7 authored by Mark Olesen's avatar Mark Olesen
Browse files

STYLE: consistent use of wordHashSet instead of HashSet<word>

- the wordHashSet typedef is always available when HashSet has been
  included.

- use default HashTable key (word) instead of explicitly mentioning it
parent f95f8bf5
......@@ -54,7 +54,7 @@ class hyperbolic
// Private data
//- Minimum fraction of phases which can be considered continuous
HashTable<dimensionedScalar, word, word::hash> minContinuousAlpha_;
HashTable<dimensionedScalar> minContinuousAlpha_;
//- Width of the transition
const dimensionedScalar transitionAlphaScale_;
......
......@@ -54,12 +54,10 @@ class linear
// Private data
//- Minimum fraction of phases which can be considered fully continuous
HashTable<dimensionedScalar, word, word::hash>
minFullyContinuousAlpha_;
HashTable<dimensionedScalar> minFullyContinuousAlpha_;
//- Minimum fraction of phases which can be considered partly continuous
HashTable<dimensionedScalar, word, word::hash>
minPartlyContinuousAlpha_;
HashTable<dimensionedScalar> minPartlyContinuousAlpha_;
public:
......
......@@ -134,7 +134,7 @@ protected:
phasePairTable;
typedef
HashTable<autoPtr<blendingMethod>, word, word::hash>
HashTable<autoPtr<blendingMethod>>
blendingMethodTable;
typedef
......
......@@ -54,7 +54,7 @@ class hyperbolic
// Private data
//- Maximum fraction of phases which can be considered dispersed
HashTable<dimensionedScalar, word, word::hash> maxDispersedAlpha_;
HashTable<dimensionedScalar> maxDispersedAlpha_;
//- Width of the transition
const dimensionedScalar transitionAlphaScale_;
......
......@@ -54,12 +54,10 @@ class linear
// Private data
//- Maximum fraction of phases which can be considered fully dispersed
HashTable<dimensionedScalar, word, word::hash>
maxFullyDispersedAlpha_;
HashTable<dimensionedScalar> maxFullyDispersedAlpha_;
//- Maximum fraction of phases which can be considered partly dispersed
HashTable<dimensionedScalar, word, word::hash>
maxPartlyDispersedAlpha_;
HashTable<dimensionedScalar> maxPartlyDispersedAlpha_;
public:
......
......@@ -94,7 +94,7 @@ class twoPhaseSystem
autoPtr<orderedPhasePair> pair2In1_;
//- Blending methods
HashTable<autoPtr<blendingMethod>, word, word::hash> blendingMethods_;
HashTable<autoPtr<blendingMethod>> blendingMethods_;
//- Drag model
autoPtr<BlendedInterfacialModel<dragModel>> drag_;
......
......@@ -103,7 +103,7 @@ DynamicList<label> cellGroupType;
bool cubitFile = false;
void uniquify(word& name, HashSet<word>& patchNames)
void uniquify(word& name, wordHashSet& patchNames)
{
if (!patchNames.found(name))
{
......@@ -929,7 +929,7 @@ int main(int argc, char *argv[])
// Foam patch type for Fluent zone type
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
HashSet<word> fluentGroupToFoamPatch;
wordHashSet fluentGroupToFoamPatch;
fluentGroupToFoamPatch.insert("wall");
fluentGroupToFoamPatch.insert("fan");
......@@ -1040,7 +1040,7 @@ int main(int argc, char *argv[])
// ~~~~~~~~~~~~~~~~~
List<polyPatch*> newPatches(patchIDs.size());
HashSet<word> patchNames;
wordHashSet patchNames;
forAll(patchIDs, patchi)
{
......@@ -1138,7 +1138,7 @@ int main(int argc, char *argv[])
// Cell zones
mesh.cellZones().setSize(cellZoneIDs.size());
HashSet<word> cellZoneNames;
wordHashSet cellZoneNames;
forAll(cellZoneIDs, cellZonei)
{
......@@ -1167,7 +1167,7 @@ int main(int argc, char *argv[])
// Face zones
mesh.faceZones().setSize(faceZoneIDs.size());
HashSet<word> faceZoneNames;
wordHashSet faceZoneNames;
forAll(faceZoneIDs, faceZonei)
{
......
......@@ -331,7 +331,7 @@ Foam::conformationSurfaces::conformationSurfaces
List<sideVolumeType> globalVolumeTypes(surfI);
List<Map<sideVolumeType>> regionVolumeTypes(surfI);
HashSet<word> unmatchedKeys(surfacesDict.toc());
wordHashSet unmatchedKeys(surfacesDict.toc());
surfI = 0;
forAll(allGeometry_.names(), geomI)
......
......@@ -118,7 +118,7 @@ autoPtr<refinementSurfaces> createRefinementSurfaces
List<Map<scalar>> regionAngle(surfi);
List<Map<autoPtr<dictionary>>> regionPatchInfo(surfi);
HashSet<word> unmatchedKeys(surfacesDict.toc());
wordHashSet unmatchedKeys(surfacesDict.toc());
surfi = 0;
forAll(allGeometry.names(), geomi)
......
......@@ -133,7 +133,7 @@ int main(int argc, char *argv[])
word surfaceFormat;
const bool writeSets = args.readIfPresent("writeSets", surfaceFormat);
HashSet<word> selectedFields;
wordHashSet selectedFields;
bool writeFields = args.readIfPresent
(
"writeFields",
......
......@@ -69,7 +69,7 @@ void minFaceToCell
void Foam::writeFields
(
const fvMesh& mesh,
const HashSet<word>& selectedFields
const wordHashSet& selectedFields
)
{
if (selectedFields.empty())
......
......@@ -4,7 +4,7 @@ namespace Foam
{
void writeFields
(
const fvMesh&,
const HashSet<word>& selectedFields
const fvMesh& mesh,
const wordHashSet& selectedFields
);
}
......@@ -113,7 +113,7 @@ label addPatch
// Filter out the empty patches.
void filterPatches(fvMesh& mesh, const HashSet<word>& addedPatchNames)
void filterPatches(fvMesh& mesh, const wordHashSet& addedPatchNames)
{
// Remove any zero-sized ones. Assumes
// - processor patches are already only there if needed
......@@ -613,7 +613,7 @@ int main(int argc, char *argv[])
// Count patches to add
// ~~~~~~~~~~~~~~~~~~~~
HashSet<word> bafflePatches;
wordHashSet bafflePatches;
{
forAll(selectors, selectorI)
{
......@@ -655,7 +655,7 @@ int main(int argc, char *argv[])
// Pass 1: add patches
// ~~~~~~~~~~~~~~~~~~~
//HashSet<word> addedPatches;
// wordHashSet addedPatches;
{
const polyBoundaryMesh& pbm = mesh.boundaryMesh();
forAll(selectors, selectorI)
......@@ -834,7 +834,7 @@ int main(int argc, char *argv[])
fvMeshMapper mapper(mesh, map);
bool hasWarned = false;
forAllConstIter(HashSet<word>, bafflePatches, iter)
forAllConstIter(wordHashSet, bafflePatches, iter)
{
label patchi = mesh.boundaryMesh().findPatchID(iter.key());
......
......@@ -102,7 +102,7 @@ void changePatchID
// Filter out the empty patches.
void filterPatches(polyMesh& mesh, const HashSet<word>& addedPatchNames)
void filterPatches(polyMesh& mesh, const wordHashSet& addedPatchNames)
{
const polyBoundaryMesh& patches = mesh.boundaryMesh();
......@@ -557,7 +557,7 @@ int main(int argc, char *argv[])
// Read patch construct info from dictionary
PtrList<dictionary> patchSources(dict.lookup("patches"));
HashSet<word> addedPatchNames;
wordHashSet addedPatchNames;
forAll(patchSources, addedI)
{
const dictionary& dict = patchSources[addedI];
......
......@@ -139,7 +139,7 @@ int main(int argc, char *argv[])
IOobjectList faceObjects(objects.lookupClass(faceSet::typeName));
HashSet<word> slaveCellSets;
wordHashSet slaveCellSets;
//Pout<< "faceSets:" << faceObjects.names() << endl;
......
......@@ -101,11 +101,11 @@ void Foam::helpTypes::helpBoundary::execute
}
else if (args.found("constraint"))
{
HashSet<word> constraintTypes(fvPatch::constraintTypes());
wordHashSet constraintTypes(fvPatch::constraintTypes());
Info<< "Constraint types:" << nl;
forAllConstIter(HashSet<word>, constraintTypes, iter)
for (const word& cType : constraintTypes)
{
Info<< " " << iter.key() << nl;
Info<< " " << cType << nl;
}
Info<< endl;
}
......
......@@ -321,11 +321,8 @@ int main(int argc, char *argv[])
// Search for list of objects for this time
IOobjectList objects(mesh, runTime.timeName());
HashSet<word> selectedFields;
if (args.found("fields"))
{
args.lookup("fields")() >> selectedFields;
}
wordHashSet selectedFields;
args.readIfPresent("fields", selectedFields);
// Construct the vol fields (on the original mesh if subsetted)
......
......@@ -39,7 +39,7 @@ void readFields
const meshSubsetHelper& helper,
const typename GeoField::Mesh& mesh,
const IOobjectList& objects,
const HashSet<word>& selectedFields,
const wordHashSet& selectedFields,
PtrList<const GeoField>& fields
)
{
......
......@@ -51,7 +51,7 @@ void readFields
const meshSubsetHelper&,
const typename GeoField::Mesh& mesh,
const IOobjectList& objects,
const HashSet<word>& selectedFields,
const wordHashSet& selectedFields,
PtrList<const GeoField>& fields
);
......
// check all time directories for the following:
// Any cloud names:
HashSet<word> allCloudDirs;
wordHashSet allCloudDirs;
if (timeDirs.size() && !noLagrangian)
{
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment