From 0f155daf861cfd605ac6849f6fa477a6ab19a3a9 Mon Sep 17 00:00:00 2001 From: Andrew Heather <> Date: Thu, 11 Nov 2021 12:31:34 +0000 Subject: [PATCH] ENH: particleTracks - updated to include field data The utility will now add field data to all tracks (previous version only created the geometry) The new 'fields' entry can be used to output specific fields. Example cloud reactingCloud1; sampleFrequency 1; maxPositions 1000000; fields (d U); // includes wildcard support STYLE: minor typo fix --- .../lagrangian/particleTracks/createFields.H | 8 + .../particleTracks/particleTracks.C | 364 ++++++++++++++---- .../steadyParticleTracks.C | 2 +- 3 files changed, 290 insertions(+), 84 deletions(-) diff --git a/applications/utilities/postProcessing/lagrangian/particleTracks/createFields.H b/applications/utilities/postProcessing/lagrangian/particleTracks/createFields.H index 742fe261d2f..36186a9e92c 100644 --- a/applications/utilities/postProcessing/lagrangian/particleTracks/createFields.H +++ b/applications/utilities/postProcessing/lagrangian/particleTracks/createFields.H @@ -15,4 +15,12 @@ const label sampleFrequency(propsDict.get<label>("sampleFrequency")); const label maxPositions(propsDict.get<label>("maxPositions")); +const label maxTracks(propsDict.getOrDefault<label>("maxTracks", -1)); + const word setFormat(propsDict.getOrDefault<word>("setFormat", "vtk")); + +const wordRes fieldNames(propsDict.getOrDefault<wordRes>("fields", wordRes())); + +const word UName(propsDict.getOrDefault<word>("U", "U")); + +const dictionary formatOptions = propsDict.subOrEmptyDict("formatOptions"); diff --git a/applications/utilities/postProcessing/lagrangian/particleTracks/particleTracks.C b/applications/utilities/postProcessing/lagrangian/particleTracks/particleTracks.C index e247d87021b..c66fe1995fc 100644 --- a/applications/utilities/postProcessing/lagrangian/particleTracks/particleTracks.C +++ b/applications/utilities/postProcessing/lagrangian/particleTracks/particleTracks.C @@ -6,6 +6,7 @@ \\/ M anipulation | ------------------------------------------------------------------------------- Copyright (C) 2011-2016 OpenFOAM Foundation + Copyright (C) 2021 OpenCFD Ltd. ------------------------------------------------------------------------------- License This file is part of OpenFOAM. @@ -30,8 +31,8 @@ Group grpPostProcessingUtilities Description - Generate a legacy VTK file of particle tracks for cases that were - computed using a tracked-parcel-type cloud. + Generate particle tracks for cases that were computed using a + tracked-parcel-type cloud. \*---------------------------------------------------------------------------*/ @@ -44,16 +45,206 @@ Description #include "OFstream.H" #include "passiveParticleCloud.H" #include "writer.H" +#include "ListOps.H" + +#define createTrack(field, trackValues) \ +createTrackField \ +( \ + field, \ + sampleFrequency, \ + maxPositions, \ + startIds, \ + allOrigProcs, \ + allOrigIds, \ + trackValues \ +); + +#define setFields(fields, fieldNames) \ +setTrackFields \ +( \ + obr, \ + fields, \ + fieldNames, \ + nTracks, \ + startIds, \ + allOrigProcs, \ + allOrigIds, \ + maxPositions, \ + sampleFrequency \ +); + +#define writeFields(fields, fieldNames, tracks, times, dirs) \ +writeTrackFields \ +( \ + fields, \ + fieldNames, \ + tracks, \ + times, \ + dirs, \ + setFormat, \ + formatOptions, \ + cloudName \ +); using namespace Foam; +template<class Type> +void createTrackField +( + const Field<Type>& values, + const label sampleFrequency, + const label maxPositions, + const labelList& startIds, + const List<labelField>& allOrigProcs, + const List<labelField>& allOrigIds, + List<DynamicList<Type>>& trackValues +) +{ + List<Field<Type>> procField(Pstream::nProcs()); + procField[Pstream::myProcNo()] = values; + Pstream::gatherList(procField); + + if (!Pstream::master()) + { + return; + } + + const label nTracks = trackValues.size(); + + forAll(procField, proci) + { + forAll(procField[proci], i) + { + const label globalId = + startIds[allOrigProcs[proci][i]] + allOrigIds[proci][i]; + + if (globalId % sampleFrequency == 0) + { + const label trackId = globalId/sampleFrequency; + + if + ( + trackId < nTracks + && trackValues[trackId].size() < maxPositions + ) + { + trackValues[trackId].append(procField[proci][i]); + } + } + } + } +} + + +template<class Type> +void writeTrackFields +( + List<List<DynamicList<Type>>>& fieldValues, + const wordList& fieldNames, + const PtrList<coordSet>& tracks, + const List<scalarField>& times, + const List<vectorField>& dirs, + const word& setFormat, + const dictionary& formatOptions, + const word& cloudName +) +{ + if (fieldValues.empty()) + { + return; + } + + auto writerPtr = writer<Type>::New(setFormat, formatOptions); + + const fileName outFile(writerPtr().getFileName(tracks[0], wordList(0))); + + const fileName outPath + ( + functionObject::outputPrefix/cloud::prefix/cloudName/"particleTracks" + ); + mkDir(outPath); + + OFstream os(outPath/(pTraits<Type>::typeName & "tracks." + outFile.ext())); + + Info<< "Writing " << pTraits<Type>::typeName << " particle tracks in " + << setFormat << " format to " << os.name() << endl; + + + List<List<Field<Type>>> fields(fieldValues.size()); + forAll(fields, fieldi) + { + fields[fieldi].setSize(fieldValues[fieldi].size()); + forAll(fields[fieldi], tracki) + { + fields[fieldi][tracki].transfer(fieldValues[fieldi][tracki]); + } + } + + writerPtr().write(true, times, tracks, fieldNames, fields, os); +} + + +template<class Type> +Foam::label setTrackFields +( + const objectRegistry& obr, + List<List<DynamicList<Type>>>& fields, + List<word>& fieldNames, + const label nTracks, + const labelList& startIds, + const List<labelField>& allOrigProcs, + const List<labelField>& allOrigIds, + const label maxPositions, + const label sampleFrequency +) +{ + const auto availableFieldPtrs = obr.lookupClass<IOField<Type>>(); + + fieldNames = availableFieldPtrs.toc(); + + if (Pstream::parRun()) + { + Pstream::combineGather(fieldNames, ListOps::uniqueEqOp<word>()); + Pstream::combineScatter(fieldNames); + Foam::sort(fieldNames); + } + + const label nFields = fieldNames.size(); + + if (fields.empty()) + { + fields.setSize(nFields); + fieldNames.setSize(nFields); + forAll(fields, i) + { + fields[i].setSize(nTracks); + } + } + + forAll(fieldNames, fieldi) + { + const word& fieldName = fieldNames[fieldi]; + + const auto* fldPtr = obr.cfindObject<IOField<Type>>(fieldName); + + createTrack + ( + fldPtr ? static_cast<const Field<Type>>(*fldPtr) : Field<Type>(), + fields[fieldi] + ); + } + + return nFields; +} + + // * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * // int main(int argc, char *argv[]) { argList::addNote ( - "Generate a legacy VTK file of particle tracks for cases that were" + "Generate a file of particle tracks for cases that were" " computed using a tracked-parcel-type cloud" ); @@ -76,9 +267,9 @@ int main(int argc, char *argv[]) << nl << endl; labelList maxIds(Pstream::nProcs(), -1); - forAll(timeDirs, timeI) + forAll(timeDirs, timei) { - runTime.setTime(timeDirs[timeI], timeI); + runTime.setTime(timeDirs[timei], timei); Info<< "Time = " << runTime.timeName() << endl; Info<< " Reading particle positions" << endl; @@ -92,6 +283,8 @@ int main(int argc, char *argv[]) const label origId = p.origId(); const label origProc = p.origProc(); + // Handle case where we are processing particle data generated in + // parallel using more cores than when running this application. if (origProc >= maxIds.size()) { maxIds.setSize(origProc+1, -1); @@ -124,7 +317,7 @@ int main(int argc, char *argv[]) // Calculate starting ids for particles on each processor labelList startIds(numIds.size(), Zero); - for (label i = 0; i < numIds.size()-1; i++) + for (label i = 0; i < numIds.size()-1; ++i) { startIds[i+1] += startIds[i] + numIds[i]; } @@ -132,130 +325,135 @@ int main(int argc, char *argv[]) // Number of tracks to generate - label nTracks = nParticle/sampleFrequency; + const label nTracks = + maxTracks > 0 + ? min(nParticle/sampleFrequency, maxTracks) + : nParticle/sampleFrequency; // Storage for all particle tracks List<DynamicList<vector>> allTracks(nTracks); + List<DynamicList<scalar>> allTrackTimes(nTracks); + + // Lists of field, tracki, trackValues + //List<List<DynamicList<label>>> labelFields; + List<List<DynamicList<scalar>>> scalarFields; + List<List<DynamicList<vector>>> vectorFields; + List<List<DynamicList<sphericalTensor>>> sphTensorFields; + List<List<DynamicList<symmTensor>>> symTensorFields; + List<List<DynamicList<tensor>>> tensorFields; + //List<word> labelFieldNames; + List<word> scalarFieldNames; + List<word> vectorFieldNames; + List<word> sphTensorFieldNames; + List<word> symTensorFieldNames; + List<word> tensorFieldNames; Info<< "\nGenerating " << nTracks << " particle tracks for cloud " << cloudName << nl << endl; - forAll(timeDirs, timeI) + forAll(timeDirs, timei) { - runTime.setTime(timeDirs[timeI], timeI); + runTime.setTime(timeDirs[timei], timei); Info<< "Time = " << runTime.timeName() << endl; - List<pointField> allPositions(Pstream::nProcs()); - List<labelField> allOrigIds(Pstream::nProcs()); - List<labelField> allOrigProcs(Pstream::nProcs()); - // Read particles. Will be size 0 if no particles. Info<< " Reading particle positions" << endl; passiveParticleCloud myCloud(mesh, cloudName); + pointField localPositions(myCloud.size(), Zero); + scalarField localTimes(myCloud.size(), Zero); + + List<labelField> allOrigIds(Pstream::nProcs()); + List<labelField> allOrigProcs(Pstream::nProcs()); + // Collect the track data on all processors that have positions - allPositions[Pstream::myProcNo()].setSize - ( - myCloud.size(), - point::zero - ); allOrigIds[Pstream::myProcNo()].setSize(myCloud.size(), Zero); allOrigProcs[Pstream::myProcNo()].setSize(myCloud.size(), Zero); label i = 0; for (const passiveParticle& p : myCloud) { - allPositions[Pstream::myProcNo()][i] = p.position(); allOrigIds[Pstream::myProcNo()][i] = p.origId(); allOrigProcs[Pstream::myProcNo()][i] = p.origProc(); + localPositions[i] = p.position(); + localTimes[i] = runTime.value(); ++i; } // Collect the track data on the master processor - Pstream::gatherList(allPositions); Pstream::gatherList(allOrigIds); Pstream::gatherList(allOrigProcs); - Info<< " Constructing tracks" << nl << endl; - if (Pstream::master()) - { - forAll(allPositions, proci) - { - forAll(allPositions[proci], i) - { - label globalId = - startIds[allOrigProcs[proci][i]] - + allOrigIds[proci][i]; - - if (globalId % sampleFrequency == 0) - { - label trackId = globalId/sampleFrequency; - if (allTracks[trackId].size() < maxPositions) - { - allTracks[trackId].append - ( - allPositions[proci][i] - ); - } - } - } - } - } + objectRegistry obr + ( + IOobject + ( + "cloudFields", + runTime.timeName(), + runTime + ) + ); + + myCloud.readFromFiles(obr, fieldNames); + + // Create track positions and track time fields + // (not registered as IOFields) + // Note: createTrack is a local #define to reduce arg count... + createTrack(localPositions, allTracks); + createTrack(localTimes, allTrackTimes); + + // Create the track fields + // Note: setFields is a local #define to reduce arg count... + //setFields(labelFields, labelFieldNames); + setFields(scalarFields, scalarFieldNames); + setFields(vectorFields, vectorFieldNames); + setFields(sphTensorFields, sphTensorFieldNames); + setFields(symTensorFields, symTensorFieldNames); + setFields(tensorFields, tensorFieldNames); } if (Pstream::master()) { PtrList<coordSet> tracks(allTracks.size()); - forAll(allTracks, trackI) + List<scalarField> times(tracks.size()); + forAll(tracks, tracki) { tracks.set ( - trackI, - new coordSet - ( - "track" + Foam::name(trackI), - "distance" - ) + tracki, + new coordSet("track" + Foam::name(tracki), "distance") ); - tracks[trackI].transfer(allTracks[trackI]); + tracks[tracki].transfer(allTracks[tracki]); + times[tracki].transfer(allTrackTimes[tracki]); } - autoPtr<writer<scalar>> scalarFormatterPtr = writer<scalar>::New - ( - setFormat - ); + Info<< nl; - //OFstream vtkTracks(vtkPath/"particleTracks.vtk"); - fileName vtkFile - ( - scalarFormatterPtr().getFileName - ( - tracks[0], - wordList(0) - ) - ); - - OFstream vtkTracks - ( - vtkPath/("particleTracks." + vtkFile.ext()) - ); + const label Uid = vectorFieldNames.find(UName); + List<vectorField> dirs(nTracks); - Info<< "\nWriting particle tracks in " << setFormat - << " format to " << vtkTracks.name() - << nl << endl; + if (Uid != -1) + { + const auto& UTracks = vectorFields[Uid]; + forAll(UTracks, tracki) + { + const auto& U = UTracks[tracki]; + dirs[tracki] = U/(mag(U) + ROOTVSMALL); + } + } - scalarFormatterPtr().write - ( - true, - tracks, - wordList(0), - List<List<scalarField>>(0), - vtkTracks - ); + // Write track fields + // Note: writeFields is a local #define to reduce arg count... + //writeFields(allLabelFields, labelFieldNames, tracks); + writeFields(scalarFields, scalarFieldNames, tracks, times, dirs); + writeFields(vectorFields, vectorFieldNames, tracks, times, dirs); + writeFields(sphTensorFields, sphTensorFieldNames, tracks, times, dirs); + writeFields(symTensorFields, symTensorFieldNames, tracks, times, dirs); + writeFields(tensorFields, tensorFieldNames, tracks, times, dirs); } - Info<< "End\n" << endl; + Info<< nl << "End\n" << endl; return 0; } diff --git a/applications/utilities/postProcessing/lagrangian/steadyParticleTracks/steadyParticleTracks.C b/applications/utilities/postProcessing/lagrangian/steadyParticleTracks/steadyParticleTracks.C index bc26c09774d..8da9c071514 100644 --- a/applications/utilities/postProcessing/lagrangian/steadyParticleTracks/steadyParticleTracks.C +++ b/applications/utilities/postProcessing/lagrangian/steadyParticleTracks/steadyParticleTracks.C @@ -27,7 +27,7 @@ Application steadyParticleTracks Group - grpPostProcessingUtilitie + grpPostProcessingUtilities Description Generate a legacy VTK file of particle tracks for cases that were -- GitLab