Skip to content
Commits on Source (6)
Collection of scripts and setups.
- Some bits may need to migrate to the main OpenFOAM directories
from paraview.simple import *
from paraview import coprocessing
# The frequency to output everything
outputfrequency = 5
# ----------------------- CoProcessor definition -----------------------
def CreateCoProcessor():
def _CreatePipeline(coprocessor, datadescription):
class Pipeline:
for i in range(datadescription.GetNumberOfInputDescriptions()):
inputdescription = datadescription.GetInputDescription(i)
name = datadescription.GetInputDescriptionName(i)
adaptorinput = coprocessor.CreateProducer(datadescription, name)
grid = adaptorinput.GetClientSideObject().GetOutputDataObject(0)
extension = None
if grid.IsA('vtkImageData') or grid.IsA('vtkUniformGrid'):
writer = servermanager.writers.XMLPImageDataWriter(Input=adaptorinput)
extension = '.pvti'
elif grid.IsA('vtkRectilinearGrid'):
writer = servermanager.writers.XMLPRectilinearGridWriter(Input=adaptorinput)
extension = '.pvtr'
elif grid.IsA('vtkStructuredGrid'):
writer = servermanager.writers.XMLPStructuredGridWriter(Input=adaptorinput)
extension = '.pvts'
elif grid.IsA('vtkPolyData'):
writer = servermanager.writers.XMLPPolyDataWriter(Input=adaptorinput)
extension = '.pvtp'
elif grid.IsA('vtkUnstructuredGrid'):
writer = servermanager.writers.XMLPUnstructuredGridWriter(Input=adaptorinput)
extension = '.pvtu'
elif grid.IsA('vtkUniformGridAMR'):
writer = servermanager.writers.XMLHierarchicalBoxDataWriter(Input=adaptorinput)
extension = '.vthb'
elif grid.IsA('vtkMultiBlockDataSet'):
writer = servermanager.writers.XMLMultiBlockDataWriter(Input=adaptorinput)
extension = '.vtm'
else:
print("Don't know how to create a writer for a ", grid.GetClassName())
if extension:
coprocessor.RegisterWriter(writer, filename='insitu/'+name+'_%t'+extension, freq=outputfrequency)
return Pipeline()
class CoProcessor(coprocessing.CoProcessor):
def CreatePipeline(self, datadescription):
self.Pipeline = _CreatePipeline(self, datadescription)
return CoProcessor()
#--------------------------------------------------------------
# Global variables that will hold the pipeline for each timestep
# Creating the CoProcessor object, doesn't actually create the ParaView pipeline.
# It will be automatically setup when coprocessor.UpdateProducers() is called the
# first time.
coprocessor = CreateCoProcessor()
#--------------------------------------------------------------
# Enable Live-Visualizaton with ParaView
coprocessor.EnableLiveVisualization(False)
# ---------------------- Data Selection method ----------------------
def RequestDataDescription(datadescription):
"Callback to populate the request for current timestep"
global coprocessor
if datadescription.GetForceOutput() == True or datadescription.GetTimeStep() % outputfrequency == 0:
# We are just going to request all fields and meshes from the simulation
# code/adaptor.
for i in range(datadescription.GetNumberOfInputDescriptions()):
datadescription.GetInputDescription(i).AllFieldsOn()
datadescription.GetInputDescription(i).GenerateMeshOn()
return
# setup requests for all inputs based on the requirements of the
# pipeline.
coprocessor.LoadRequestedData(datadescription)
# ------------------------ Processing method ------------------------
def DoCoProcessing(datadescription):
"Callback to do co-processing for current timestep"
global coprocessor
# Update the coprocessor by providing it the newly generated simulation data.
# If the pipeline hasn't been setup yet, this will setup the pipeline.
coprocessor.UpdateProducers(datadescription)
# Write output data, if appropriate.
coprocessor.WriteData(datadescription);
# Write image capture (Last arg: rescale lookup table), if appropriate.
coprocessor.WriteImages(datadescription, rescale_lookuptable=False)
# Live Visualization, if enabled.
coprocessor.DoLiveVisualization(datadescription, "localhost", 22222)
......@@ -5,16 +5,12 @@
| \\ / A nd | Web: www.OpenFOAM.com |
| \\/ M anipulation | |
\*---------------------------------------------------------------------------*/
FoamFile
{
version 2.0;
format ascii;
class dictionary;
object catalystArea.cfg;
}
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
// Insitu processing of finiteArea fields with ParaView Catalyst
#include "catalyst.cfg"
type catalyst::area;
libs ("libcatalystFoam.so");
executeControl timeStep;
writeControl none;
// ************************************************************************* //
......@@ -5,16 +5,12 @@
| \\ / A nd | Web: www.OpenFOAM.com |
| \\/ M anipulation | |
\*---------------------------------------------------------------------------*/
FoamFile
{
version 2.0;
format ascii;
class dictionary;
object catalystCloud.cfg;
}
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
// Insitu processing of lagrangian clouds with ParaView Catalyst
#include "catalyst.cfg"
type catalyst::cloud;
libs ("libcatalystFoam.so");
executeControl timeStep;
writeControl none;
// ************************************************************************* //
......@@ -5,17 +5,12 @@
| \\ / A nd | Web: www.OpenFOAM.com |
| \\/ M anipulation | |
\*---------------------------------------------------------------------------*/
FoamFile
{
version 2.0;
format ascii;
class dictionary;
object catalyst.cfg;
}
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
// Insitu processing of finiteVolume fields with ParaView Catalyst
type catalyst;
libs ("libcatalystFoam.so");
executeControl timeStep;
writeControl none;
// ************************************************************************* //
from paraview.simple import *
from paraview import coprocessing
# ----------------------- CoProcessor definition -----------------------
def CreateCoProcessor():
def _CreatePipeline(coprocessor, datadescription):
class Pipeline:
input1 = coprocessor.CreateProducer(datadescription, 'input')
writer1 = servermanager.writers.XMLMultiBlockDataWriter(Input=input1)
coprocessor.RegisterWriter(writer1, filename='insitu/area_%t.vtm', freq=2)
return Pipeline()
class CoProcessor(coprocessing.CoProcessor):
def CreatePipeline(self, datadescription):
self.Pipeline = _CreatePipeline(self, datadescription)
coprocessor = CoProcessor()
freqs = {'input': [10]}
coprocessor.SetUpdateFrequencies(freqs)
return coprocessor
#--------------------------------------------------------------
# Global variables that will hold the pipeline for each timestep
# Creating the CoProcessor object, doesn't actually create the ParaView pipeline.
# It will be automatically setup when coprocessor.UpdateProducers() is called the
# first time.
coprocessor = CreateCoProcessor()
#--------------------------------------------------------------
# Enable Live-Visualizaton with ParaView
coprocessor.EnableLiveVisualization(True)
# ---------------------- Data Selection method ----------------------
def RequestDataDescription(datadescription):
'Callback to populate the request for current timestep'
global coprocessor
if datadescription.GetForceOutput() == True:
# We are just going to request all fields and meshes from the simulation
# code/adaptor.
for i in range(datadescription.GetNumberOfInputDescriptions()):
datadescription.GetInputDescription(i).AllFieldsOn()
datadescription.GetInputDescription(i).GenerateMeshOn()
return
# setup requests for all inputs based on the requirements of the
# pipeline.
coprocessor.LoadRequestedData(datadescription)
# ------------------------ Processing method ------------------------
def DoCoProcessing(datadescription):
'Callback to do co-processing for current timestep'
global coprocessor
# Update the coprocessor by providing it the newly generated simulation data.
# If the pipeline hasn't been setup yet, this will setup the pipeline.
coprocessor.UpdateProducers(datadescription)
# Write output data, if appropriate.
coprocessor.WriteData(datadescription);
# Write image capture (Last arg: rescale lookup table), if appropriate.
coprocessor.WriteImages(datadescription, rescale_lookuptable=False)
# Live Visualization, if enabled.
coprocessor.DoLiveVisualization(datadescription, 'localhost', 22222)
from paraview.simple import *
from paraview import coprocessing
# ----------------------- CoProcessor definition -----------------------
def CreateCoProcessor():
def _CreatePipeline(coprocessor, datadescription):
class Pipeline:
input1 = coprocessor.CreateProducer(datadescription, 'cloud')
writer1 = servermanager.writers.XMLMultiBlockDataWriter(Input=input1)
# register writer with coprocessor, with filename + output freq
coprocessor.RegisterWriter(writer1, filename='insitu/cloud_%t.vtm', freq=10)
return Pipeline()
class CoProcessor(coprocessing.CoProcessor):
def CreatePipeline(self, datadescription):
self.Pipeline = _CreatePipeline(self, datadescription)
coprocessor = CoProcessor()
freqs = {'cloud': [10]}
coprocessor.SetUpdateFrequencies(freqs)
return coprocessor
#--------------------------------------------------------------
# Global variables that will hold the pipeline for each timestep
# Creating the CoProcessor object, doesn't actually create the ParaView pipeline.
# It will be automatically setup when coprocessor.UpdateProducers() is called the
# first time.
coprocessor = CreateCoProcessor()
#--------------------------------------------------------------
# Enable Live-Visualizaton with ParaView
coprocessor.EnableLiveVisualization(True)
# ---------------------- Data Selection method ----------------------
def RequestDataDescription(datadescription):
'Callback to populate the request for current timestep'
global coprocessor
if datadescription.GetForceOutput() == True:
# We are just going to request all fields and meshes from the simulation
# code/adaptor.
for i in range(datadescription.GetNumberOfInputDescriptions()):
datadescription.GetInputDescription(i).AllFieldsOn()
datadescription.GetInputDescription(i).GenerateMeshOn()
return
# setup requests for all inputs based on the requirements of the
# pipeline.
coprocessor.LoadRequestedData(datadescription)
# ------------------------ Processing method ------------------------
def DoCoProcessing(datadescription):
'Callback to do co-processing for current timestep'
global coprocessor
# Update the coprocessor by providing it the newly generated simulation data.
# If the pipeline hasn't been setup yet, this will setup the pipeline.
coprocessor.UpdateProducers(datadescription)
# Write output data, if appropriate.
coprocessor.WriteData(datadescription);
# Write image capture (Last arg: rescale lookup table), if appropriate.
coprocessor.WriteImages(datadescription, rescale_lookuptable=False)
# Live Visualization, if enabled.
coprocessor.DoLiveVisualization(datadescription, 'localhost', 22222)
from paraview.simple import *
from paraview import coprocessing
# ----------------------- CoProcessor definition -----------------------
def CreateCoProcessor():
def _CreatePipeline(coprocessor, datadescription):
class Pipeline:
input1 = coprocessor.CreateProducer(datadescription, 'patches')
writer1 = servermanager.writers.XMLMultiBlockDataWriter(Input=input1)
coprocessor.RegisterWriter(writer1, filename='insitu/patches_%t.vtm', freq=2)
return Pipeline()
class CoProcessor(coprocessing.CoProcessor):
def CreatePipeline(self, datadescription):
self.Pipeline = _CreatePipeline(self, datadescription)
coprocessor = CoProcessor()
freqs = {'patches': [10]}
coprocessor.SetUpdateFrequencies(freqs)
return coprocessor
#--------------------------------------------------------------
# Global variables that will hold the pipeline for each timestep
# Creating the CoProcessor object, doesn't actually create the ParaView pipeline.
# It will be automatically setup when coprocessor.UpdateProducers() is called the
# first time.
coprocessor = CreateCoProcessor()
#--------------------------------------------------------------
# Enable Live-Visualizaton with ParaView
coprocessor.EnableLiveVisualization(True)
# ---------------------- Data Selection method ----------------------
def RequestDataDescription(datadescription):
'Callback to populate the request for current timestep'
global coprocessor
if datadescription.GetForceOutput() == True:
# We are just going to request all fields and meshes from the simulation
# code/adaptor.
for i in range(datadescription.GetNumberOfInputDescriptions()):
datadescription.GetInputDescription(i).AllFieldsOn()
datadescription.GetInputDescription(i).GenerateMeshOn()
return
# setup requests for all inputs based on the requirements of the
# pipeline.
coprocessor.LoadRequestedData(datadescription)
# ------------------------ Processing method ------------------------
def DoCoProcessing(datadescription):
'Callback to do co-processing for current timestep'
global coprocessor
# Update the coprocessor by providing it the newly generated simulation data.
# If the pipeline hasn't been setup yet, this will setup the pipeline.
coprocessor.UpdateProducers(datadescription)
# Write output data, if appropriate.
coprocessor.WriteData(datadescription);
# Write image capture (Last arg: rescale lookup table), if appropriate.
coprocessor.WriteImages(datadescription, rescale_lookuptable=False)
# Live Visualization, if enabled.
coprocessor.DoLiveVisualization(datadescription, 'localhost', 22222)
......@@ -3,6 +3,8 @@ cd ${0%/*} || exit 1 # Run from this directory
. $WM_PROJECT_DIR/wmake/scripts/wmakeFunctions # The wmake functions
rm -f $FOAM_LIBBIN/libcatalystFoam* 2>/dev/null # Cleanup library
rm -f $FOAM_SITE_LIBBIN/libcatalystFoam* 2>/dev/null # ... extra safety
rm -f $FOAM_USER_LIBBIN/libcatalystFoam* 2>/dev/null # ... extra safety
# Cleanup generated files - remove entire top-level
removeObjectDir $PWD
......
......@@ -43,19 +43,19 @@ set(LIBRARY_OUTPUT_PATH $ENV{FOAM_LIBBIN}
file(GLOB SOURCE_FILES
catalystCoprocess.C
catalystCloud.C
foamVtkCloudAdaptor.C
cloud/catalystCloud.C
cloud/foamVtkCloudAdaptor.C
catalystFaMesh.C
foamVtkFaMeshAdaptor.C
foamVtkFaMeshAdaptorGeom.C
foamVtkFaMeshAdaptorFields.C
areaMesh/catalystFaMesh.C
areaMesh/foamVtkFaMeshAdaptor.C
areaMesh/foamVtkFaMeshAdaptorGeom.C
areaMesh/foamVtkFaMeshAdaptorFields.C
catalystFvMesh.C
foamVtkFvMeshAdaptor.C
foamVtkFvMeshAdaptorGeom.C
foamVtkFvMeshAdaptorGeomVtu.C
foamVtkFvMeshAdaptorFields.C
volMesh/catalystFvMesh.C
volMesh/foamVtkFvMeshAdaptor.C
volMesh/foamVtkFvMeshAdaptorGeom.C
volMesh/foamVtkFvMeshAdaptorGeomVtu.C
volMesh/foamVtkFvMeshAdaptorFields.C
)
set(OPENFOAM_LIBRARIES
......
......@@ -48,6 +48,29 @@ namespace functionObjects
// * * * * * * * * * * * * Protected Member Functions * * * * * * * * * * * //
bool Foam::functionObjects::catalystFaMesh::readBasics(const dictionary& dict)
{
int debugLevel = 0;
if (dict.readIfPresent("debug", debugLevel))
{
catalystCoprocess::debug = debugLevel;
}
fileName outputDir;
if (dict.readIfPresent("mkdir", outputDir))
{
outputDir.expand();
outputDir.clean();
Foam::mkDir(outputDir);
}
dict.lookup("scripts") >> scripts_; // Python scripts
catalystCoprocess::expand(scripts_, dict); // Expand and check availability
return true;
}
void Foam::functionObjects::catalystFaMesh::updateState
(
polyMesh::readUpdateState state
......@@ -106,11 +129,7 @@ bool Foam::functionObjects::catalystFaMesh::read(const dictionary& dict)
{
fvMeshFunctionObject::read(dict);
int debugLevel = 0;
if (dict.readIfPresent("debug", debugLevel))
{
catalystCoprocess::debug = debugLevel;
}
readBasics(dict);
// All possible meshes
meshes_ = mesh_.lookupClass<faMesh>();
......@@ -143,9 +162,7 @@ bool Foam::functionObjects::catalystFaMesh::read(const dictionary& dict)
meshes_.filterKeys(wordRes(selectAreas_));
dict.lookup("fields") >> selectFields_;
dict.lookup("scripts") >> scripts_; // Python scripts
catalystCoprocess::expand(scripts_, dict); // Expand and check availability
Info<< type() << " " << name() << ":" << nl
<<" areas " << flatOutput(selectAreas_) << nl
......@@ -153,9 +170,9 @@ bool Foam::functionObjects::catalystFaMesh::read(const dictionary& dict)
<<" fields " << flatOutput(selectFields_) << nl
<<" scripts " << scripts_ << nl;
// Run-time modification of pipeline
if (adaptor_.valid())
{
// Run-time modification of pipeline
adaptor_().reset(scripts_);
}
......@@ -175,8 +192,6 @@ bool Foam::functionObjects::catalystFaMesh::execute()
return false;
}
const catalystCoprocess::timeQuery when(time_);
// Enforce sanity for backends and adaptor
{
bool updateAdaptor = false;
......@@ -213,36 +228,42 @@ bool Foam::functionObjects::catalystFaMesh::execute()
// Data description for co-processing
vtkNew<vtkCPDataDescription> descrip;
// Form query for catalyst
catalystCoprocess::dataQuery query
// Form data query for catalyst
catalystCoprocess::dataQuery dataq
(
vtk::faMeshAdaptor::channelNames.names(),
when,
time_, // timeQuery
descrip.Get()
);
// Query catalyst
HashTable<wordHashSet> expecting = adaptor_().check(query, allFields);
const HashTable<wordHashSet> expecting(adaptor_().query(dataq, allFields));
if (expecting.empty())
if (catalystCoprocess::debug)
{
Info<< "No data expected for ParaView Catalyst. " << when << endl;
return true;
if (expecting.empty())
{
Info<< type() << ": expecting no data" << nl;
}
else
{
Info<< type() << ": expecting data " << expecting << nl;
}
}
else if (catalystCoprocess::debug)
if (expecting.empty())
{
Info<< type() << " expecting data:" << expecting << endl;
return true;
}
auto output = vtkSmartPointer<vtkMultiBlockDataSet>::New();
// TODO: currently don't rely on the results from expecting much at all
// Store each region in a separate block
// Each region in a separate block
unsigned int regionNo = 0;
for (const word& regionName : regionNames)
{
#if 1
auto pieces = backends_[regionName]->output(selectFields_);
output->SetBlock(regionNo, pieces);
......@@ -252,15 +273,14 @@ bool Foam::functionObjects::catalystFaMesh::execute()
vtkCompositeDataSet::NAME(),
regionName
);
#endif
++regionNo;
}
if (regionNo)
{
Info<< "Send data to ParaView Catalyst. " << when << endl;
Log << type() << ": send data" << nl;
adaptor_().process(query, output);
adaptor_().process(dataq, output);
}
return true;
......@@ -273,6 +293,19 @@ bool Foam::functionObjects::catalystFaMesh::write()
}
bool Foam::functionObjects::catalystFaMesh::end()
{
// Only here for extra feedback
if (log && adaptor_.valid())
{
Info<< type() << ": Disconnecting ParaView Catalyst..." << nl;
}
adaptor_.clear();
return true;
}
void Foam::functionObjects::catalystFaMesh::updateMesh(const mapPolyMesh&)
{
updateState(polyMesh::TOPO_CHANGE);
......
......@@ -50,15 +50,24 @@ Usage
\table
Property | Description | Required | Default
type | catalyst::area | yes |
log | report extra information | no | false
mkdir | initial directory to create | no |
region | | no | region0
regions | wordRe list of regions | no |
fields | wordRe list of fields | yes |
scripts | Python pipeline scripts | yes |
\endtable
Note
The execution frequency can be defined by the functionObject and
by the Catalyst pipeline.
See also
Foam::functionObjects::functionObject
Foam::functionObjects::fvMeshFunctionObject
Foam::functionObjects::timeControl
Foam::catalystCoprocess
Foam::vtk::faMeshAdaptor
SourceFiles
catalystFaMesh.C
......@@ -119,8 +128,13 @@ protected:
// Protected Member Functions
//- Common boilerplate settings
bool readBasics(const dictionary& dict);
//- On movement
void updateState(polyMesh::readUpdateState state);
//- No copy construct
catalystFaMesh(const catalystFaMesh&) = delete;
......@@ -153,12 +167,15 @@ public:
//- Read the specification
virtual bool read(const dictionary& dict);
//- Execute pipeline
//- Execute catalyst pipelines
virtual bool execute();
//- Write - does nothing
virtual bool write();
//- On end - provide feedback about disconnecting from catatyst.
virtual bool end();
//- Update for changes of mesh
virtual void updateMesh(const mapPolyMesh& mpm);
......
......@@ -62,7 +62,6 @@ void Foam::vtk::faMeshAdaptor::convertGeometryInternal()
}
}
if (!vtkgeom)
{
if (debug)
......
......@@ -42,7 +42,7 @@ License
namespace Foam
{
defineTypeNameAndDebug(catalystCoprocess, 0);
defineTypeNameAndDebug(catalystCoprocess, 0);
}
......@@ -99,6 +99,7 @@ Foam::label Foam::catalystCoprocess::expand
string& s = scripts[scripti];
stringOps::inplaceExpand(s, dict, true, true);
fileName::clean(s); // Remove trailing, repeated slashes etc.
if (isFile(s))
{
......@@ -121,64 +122,60 @@ Foam::label Foam::catalystCoprocess::expand
template<class DataType>
bool Foam::catalystCoprocess::processImpl
(
const dataQuery& query,
HashTable<vtkSmartPointer<DataType>>& outputs
const dataQuery& dataq,
vtkSmartPointer<DataType>& output
)
{
vtkCPDataDescription* descrip = query.get();
vtkCPDataDescription* descrip = dataq.get();
if (!coproc_->RequestDataDescription(descrip))
{
return false;
}
for (const word& channel : query.channels())
for (const word& chanName : dataq.channels())
{
if (outputs.found(channel))
{
auto* input = descrip->GetInputDescriptionByName(channel.c_str());
auto* input = descrip->GetInputDescriptionByName(chanName.c_str());
if (input && input->GetIfGridIsNecessary())
{
input->SetGrid(outputs[channel]);
}
if (input && input->GetIfGridIsNecessary())
{
input->SetGrid(output);
}
}
coproc_->CoProcess(descrip);
return true;
}
template<class DataType>
bool Foam::catalystCoprocess::process1Impl
bool Foam::catalystCoprocess::processImpl
(
const dataQuery& query,
vtkSmartPointer<DataType>& output
const dataQuery& dataq,
HashTable<vtkSmartPointer<DataType>>& outputs
)
{
vtkCPDataDescription* descrip = query.get();
vtkCPDataDescription* descrip = dataq.get();
if (!coproc_->RequestDataDescription(descrip))
{
return false;
}
for (const word& channel : query.channels())
for (const word& chanName : dataq.channels())
{
auto* input = descrip->GetInputDescriptionByName(channel.c_str());
if (input && input->GetIfGridIsNecessary())
if (outputs.found(chanName))
{
input->SetGrid(output);
}
auto* input = descrip->GetInputDescriptionByName(chanName.c_str());
break;
if (input && input->GetIfGridIsNecessary())
{
input->SetGrid(outputs[chanName]);
}
}
}
coproc_->CoProcess(descrip);
return true;
}
......@@ -256,14 +253,14 @@ void Foam::catalystCoprocess::reset(const UList<string>& scripts)
coproc_->AddPipeline(pipeline.GetPointer());
}
// Do something different with (!nscript) ??
// Do something different with (nscript == 0) ?
}
Foam::HashTable<Foam::wordHashSet>
Foam::catalystCoprocess::check
Foam::catalystCoprocess::query
(
dataQuery& query,
dataQuery& dataq,
const wordHashSet& allFields
)
{
......@@ -276,37 +273,33 @@ Foam::catalystCoprocess::check
return requests;
}
if (query.channels().empty())
if (dataq.channels().empty())
{
// No channels names have been published by the simulation
return requests;
}
vtkCPDataDescription* descrip = query.get();
vtkCPDataDescription* descrip = dataq.get();
descrip->SetTimeData(query.timeValue, query.timeIndex);
descrip->SetForceOutput(query.forced);
descrip->SetTimeData(dataq.timeValue, dataq.timeIndex);
descrip->SetForceOutput(dataq.forced);
// Sort out which channels already exist, are new, or disappeared
{
// The currently defined channels
wordHashSet currentChannels;
wordHashSet currChannels;
const unsigned n = descrip->GetNumberOfInputDescriptions();
for (unsigned i=0; i < n; ++i)
{
currentChannels.insert
currChannels.insert
(
word
(
descrip->GetInputDescriptionName(i),
false // no stripping (ie, accept bad names too)
)
word::validate(descrip->GetInputDescriptionName(i))
);
}
wordHashSet newChannels(query.channels());
wordHashSet oldChannels(currentChannels);
wordHashSet newChannels(dataq.channels());
wordHashSet oldChannels(currChannels);
oldChannels.erase(newChannels);
if (oldChannels.size())
......@@ -315,14 +308,14 @@ Foam::catalystCoprocess::check
}
else
{
newChannels.erase(currentChannels);
newChannels.erase(currChannels);
}
// Add channels
for (const word& channel : newChannels)
for (const word& chanName : newChannels)
{
descrip->AddInput(channel.c_str());
auto* input = descrip->GetInputDescriptionByName(channel.c_str());
descrip->AddInput(chanName.c_str());
auto* input = descrip->GetInputDescriptionByName(chanName.c_str());
for (const word& fieldName : allFields)
{
......@@ -344,13 +337,13 @@ Foam::catalystCoprocess::check
return requests;
}
for (const word& channel : query.channels())
for (const word& chanName : dataq.channels())
{
auto* input = descrip->GetInputDescriptionByName(channel.c_str());
auto* input = descrip->GetInputDescriptionByName(chanName.c_str());
if (input && input->GetIfGridIsNecessary())
{
wordHashSet& fields = requests(channel);
wordHashSet& fields = requests(chanName); // auto-vivify
for (const word& fieldName : allFields)
{
......@@ -368,21 +361,21 @@ Foam::catalystCoprocess::check
bool Foam::catalystCoprocess::process
(
const dataQuery& query,
const dataQuery& dataq,
vtkSmartPointer<vtkMultiBlockDataSet>& output
)
{
return process1Impl(query, output);
return processImpl(dataq, output);
}
bool Foam::catalystCoprocess::process
(
const dataQuery& query,
const dataQuery& dataq,
HashTable<vtkSmartPointer<vtkMultiBlockDataSet>>& outputs
)
{
return processImpl(query, outputs);
return processImpl(dataq, outputs);
}
......
......@@ -28,24 +28,18 @@ Description
Low-level interface between OpenFOAM and ParaView Catalyst.
\code
... initialize catalyst
... initialize catalyst
... define a data query for catalyst
... define a data query for catalyst
// Data description for co-processing
vtkNew<vtkCPDataDescription> descrip;
// Data description for co-processing
vtkNew<vtkCPDataDescription> descrip;
// Form query for catalyst
catalystCoprocess::dataQuery query
(
vtkPVFoam::channelNames.names(),
when,
descrip.Get()
);
// Form data query for catalyst
catalystCoprocess::dataQuery dataq(channelNames, runTime, descrip.Get());
// Query catalyst
HashTable<wordHashSet> expecting = adaptor_().check(query, allFields);
HashTable<wordHashSet> expecting = adaptor_().query(dataq, fields);
\endcode
......@@ -116,9 +110,8 @@ public:
:
public timeQuery
{
//- Catalyst channels to query
//- Catalyst channel names to query
List<word> channels_;
mutable vtkCPDataDescription* descrip_;
public:
......@@ -169,18 +162,20 @@ private:
// Private Member Functions
//- Process single output channel
template<class DataType>
bool processImpl
(
const dataQuery& query,
HashTable<vtkSmartPointer<DataType>>& outputs
vtkSmartPointer<DataType>& outputs
);
//- Process multiple output channels
template<class DataType>
bool process1Impl
bool processImpl
(
const dataQuery& query,
vtkSmartPointer<DataType>& outputs
HashTable<vtkSmartPointer<DataType>>& outputs
);
public:
......@@ -223,36 +218,34 @@ public:
//- Query the coprocess pipelines if they should be executed at this
//- iteration and possibly which fields they require.
//
// \param[in,out] query for catalyst.
// \param[in,out] dataq the data query for catalyst.
// On input it contains the published channel names, the current
// simulation time (index, value) and allocation for the coprocess
// data description.
// On output the data description willbe filled with the field names
// contains the published channel names, the current
// On output the data description will be filled with the field
// names added per channel.
// \param[in] allFields the fields that can be published from the
// simulation.
//
// \return HashTable with fields requested (what Catalyst expects)
// on a per-channel basis.
HashTable<wordHashSet> check
HashTable<wordHashSet> query
(
dataQuery& query,
dataQuery& dataq,
const wordHashSet& allFields
);
//- Single-channel source (eg, "input" or "cloud", ...)
// Uses the currentTime values
bool process
(
const dataQuery& query,
const dataQuery& dataq,
vtkSmartPointer<vtkMultiBlockDataSet>& output
);
//- Multi-channel source (eg, "input", "mesh", "patches")
// Uses the currentTime values
bool process
(
const dataQuery& query,
const dataQuery& dataq,
HashTable<vtkSmartPointer<vtkMultiBlockDataSet>>& outputs
);
......
......@@ -46,6 +46,31 @@ namespace functionObjects
}
// * * * * * * * * * * * * Protected Member Functions * * * * * * * * * * * //
bool Foam::functionObjects::catalystCloud::readBasics(const dictionary& dict)
{
int debugLevel = 0;
if (dict.readIfPresent("debug", debugLevel))
{
catalystCoprocess::debug = debugLevel;
}
fileName outputDir;
if (dict.readIfPresent("mkdir", outputDir))
{
outputDir.expand();
outputDir.clean();
Foam::mkDir(outputDir);
}
dict.lookup("scripts") >> scripts_; // Python scripts
catalystCoprocess::expand(scripts_, dict); // Expand and check availability
return true;
}
// * * * * * * * * * * * * * * * * Constructors * * * * * * * * * * * * * * //
Foam::functionObjects::catalystCloud::catalystCloud
......@@ -76,11 +101,7 @@ bool Foam::functionObjects::catalystCloud::read(const dictionary& dict)
{
fvMeshFunctionObject::read(dict);
int debugLevel = 0;
if (dict.readIfPresent("debug", debugLevel))
{
catalystCoprocess::debug = debugLevel;
}
readBasics(dict);
selectClouds_.clear();
dict.readIfPresent("clouds", selectClouds_);
......@@ -95,18 +116,15 @@ bool Foam::functionObjects::catalystCloud::read(const dictionary& dict)
selectFields_.clear();
dict.readIfPresent("fields", selectFields_);
dict.lookup("scripts") >> scripts_; // Python scripts
catalystCoprocess::expand(scripts_, dict); // Expand and check availability
Info<< type() << " " << name() << ":" << nl
<<" clouds " << flatOutput(selectClouds_) << nl
<<" fields " << flatOutput(selectFields_) << nl
<<" scripts " << scripts_ << nl;
// Run-time modification of pipeline
if (adaptor_.valid())
{
// Run-time modification of pipeline
adaptor_().reset(scripts_);
}
......@@ -123,8 +141,6 @@ bool Foam::functionObjects::catalystCloud::execute()
return true;
}
const catalystCoprocess::timeQuery when(time_);
// Enforce sanity for backends and adaptor
{
if (!adaptor_.valid())
......@@ -143,30 +159,37 @@ bool Foam::functionObjects::catalystCloud::execute()
// Data description for co-processing
vtkNew<vtkCPDataDescription> descrip;
// Form query for catalyst
catalystCoprocess::dataQuery query
// Form data query for catalyst
catalystCoprocess::dataQuery dataq
(
vtk::cloudAdaptor::channelNames.names(),
when,
time_, // timeQuery
descrip.Get()
);
// Query catalyst
HashTable<wordHashSet> expecting = adaptor_().check(query, allFields);
const HashTable<wordHashSet> expecting(adaptor_().query(dataq, allFields));
if (expecting.empty())
if (catalystCoprocess::debug)
{
Info<< "No data expected for ParaView Catalyst. " << when << endl;
return true;
if (expecting.empty())
{
Info<< type() << ": expecting no data" << nl;
}
else
{
Info<< type() << ": expecting data " << expecting << nl;
}
}
else if (catalystCoprocess::debug)
if (expecting.empty())
{
Info<< type() << " expecting data:" << expecting << endl;
return true;
}
auto output = vtkSmartPointer<vtkMultiBlockDataSet>::New();
// Store each cloud in a separate block
// Each cloud in a separate block.
unsigned int cloudNo = 0;
for (const word& cloudName : cloudNames)
{
......@@ -185,9 +208,9 @@ bool Foam::functionObjects::catalystCloud::execute()
if (cloudNo)
{
Info<< "Send data to ParaView Catalyst. " << when << endl;
Log << type() << ": send data" << nl;
adaptor_().process(query, output);
adaptor_().process(dataq, output);
}
return true;
......@@ -200,4 +223,17 @@ bool Foam::functionObjects::catalystCloud::write()
}
bool Foam::functionObjects::catalystCloud::end()
{
// Only here for extra feedback
if (log && adaptor_.valid())
{
Info<< type() << ": Disconnecting ParaView Catalyst..." << nl;
}
adaptor_.clear();
return true;
}
// ************************************************************************* //