OpenFOAM: v1912 released! - For more information see https://www.openfoam.com/releases/openfoam-v1912

Commit a03e77a8 authored by Mark Olesen's avatar Mark Olesen

ENH: use Catalyst Initialize with working directory (issue #4)

- reflects current change being made to the vtkCPProcessor API to
  include chdir capabilities.
parent fc136d27
......@@ -40,7 +40,7 @@ def CreateCoProcessor():
print("Don't know how to create a writer for a ", grid.GetClassName())
if extension:
coprocessor.RegisterWriter(writer, filename='insitu/'+name+'_%t'+extension, freq=outputfrequency)
coprocessor.RegisterWriter(writer, filename=name+'_%t'+extension, freq=outputfrequency)
return Pipeline()
......
from paraview.simple import *
from paraview import coprocessing
# The frequency to output everything
outputfrequency = 1
# ----------------------- CoProcessor definition -----------------------
def CreateCoProcessor():
def _CreatePipeline(coprocessor, datadescription):
class Pipeline:
for i in range(datadescription.GetNumberOfInputDescriptions()):
name = datadescription.GetInputDescriptionName(i)
adaptorinput = coprocessor.CreateProducer(datadescription, name)
grid = adaptorinput.GetClientSideObject().GetOutputDataObject(0)
print "Channel <" + name + "> is a ", grid.GetClassName()
return Pipeline()
class CoProcessor(coprocessing.CoProcessor):
def CreatePipeline(self, datadescription):
self.Pipeline = _CreatePipeline(self, datadescription)
return CoProcessor()
#--------------------------------------------------------------
# Global variables that will hold the pipeline for each timestep
# Creating the CoProcessor object, doesn't actually create the ParaView pipeline.
# It will be automatically setup when coprocessor.UpdateProducers() is called the
# first time.
coprocessor = CreateCoProcessor()
#--------------------------------------------------------------
# Enable Live-Visualizaton with ParaView
coprocessor.EnableLiveVisualization(False)
# ---------------------- Data Selection method ----------------------
def RequestDataDescription(datadescription):
"Callback to populate the request for current timestep"
global coprocessor
if datadescription.GetForceOutput() == True or datadescription.GetTimeStep() % outputfrequency == 0:
# We are just going to request all fields and meshes from the simulation
# code/adaptor.
for i in range(datadescription.GetNumberOfInputDescriptions()):
datadescription.GetInputDescription(i).AllFieldsOn()
datadescription.GetInputDescription(i).GenerateMeshOn()
return
# setup requests for all inputs based on the requirements of the
# pipeline.
coprocessor.LoadRequestedData(datadescription)
# ------------------------ Processing method ------------------------
def DoCoProcessing(datadescription):
"Callback to do co-processing for current timestep"
global coprocessor
# Update the coprocessor by providing it the newly generated simulation data.
# If the pipeline hasn't been setup yet, this will setup the pipeline.
coprocessor.UpdateProducers(datadescription)
# Write output data, if appropriate.
coprocessor.WriteData(datadescription);
......@@ -9,7 +9,7 @@ def CreateCoProcessor():
input1 = coprocessor.CreateProducer(datadescription, 'input')
writer1 = servermanager.writers.XMLMultiBlockDataWriter(Input=input1)
coprocessor.RegisterWriter(writer1, filename='insitu/area_%t.vtm', freq=2)
coprocessor.RegisterWriter(writer1, filename='area_%t.vtm', freq=2)
return Pipeline()
......
......@@ -10,7 +10,7 @@ def CreateCoProcessor():
writer1 = servermanager.writers.XMLMultiBlockDataWriter(Input=input1)
# register writer with coprocessor, with filename + output freq
coprocessor.RegisterWriter(writer1, filename='insitu/cloud_%t.vtm', freq=10)
coprocessor.RegisterWriter(writer1, filename='cloud_%t.vtm', freq=10)
return Pipeline()
......
......@@ -9,7 +9,7 @@ def CreateCoProcessor():
input1 = coprocessor.CreateProducer(datadescription, 'patches')
writer1 = servermanager.writers.XMLMultiBlockDataWriter(Input=input1)
coprocessor.RegisterWriter(writer1, filename='insitu/patches_%t.vtm', freq=2)
coprocessor.RegisterWriter(writer1, filename='patches_%t.vtm', freq=2)
return Pipeline()
......
......@@ -35,6 +35,26 @@ set(CMAKE_CXX_FLAGS_RELEASE
"-O3 -std=c++11 -Wall -Wextra -Wno-unused-parameter -Wnon-virtual-dtor -Wno-overloaded-virtual")
set(CMAKE_C_FLAGS_RELEASE "-O3 -std=c++11")
# Some characteristics
set(test_file ${CMAKE_CURRENT_BINARY_DIR}/check_initialize.cxx)
file(WRITE ${test_file}
"#include <vtkCPProcessor.h>\n"
"int main() {\n"
" vtkCPProcessor* p = vtkCPProcessor::New();\n"
" p->Initialize(\"AAA\");\n"
" p->Delete();\n"
" return 0;\n"
"}")
try_compile(CATALYST_HAS_WORKING_DIRECTORY
${CMAKE_CURRENT_BINARY_DIR} ${test_file}
LINK_LIBRARIES vtkPVPythonCatalyst
CMAKE_FLAGS "-DINCLUDE_DIRECTORIES=${PARAVIEW_INCLUDE_DIRS}"
)
if (CATALYST_HAS_WORKING_DIRECTORY)
add_definitions(-DUSE_CATALYST_WORKING_DIRECTORY)
endif()
# Set output library destination to OpenFOAM library directory
set(LIBRARY_OUTPUT_PATH $ENV{FOAM_LIBBIN}
CACHE INTERNAL
......
......@@ -56,17 +56,34 @@ bool Foam::functionObjects::catalystFaMesh::readBasics(const dictionary& dict)
catalystCoprocess::debug = debugLevel;
}
fileName outputDir;
if (dict.readIfPresent("mkdir", outputDir))
if (Pstream::master())
{
outputDir.expand();
outputDir.clean();
Foam::mkDir(outputDir);
fileName dir;
if (dict.readIfPresent("mkdir", dir))
{
dir.expand();
dir.clean();
}
Foam::mkDir(dir);
}
dict.readIfPresent("outputDir", outputDir_);
outputDir_.expand();
outputDir_.clean();
if (Pstream::master())
{
Foam::mkDir(outputDir_);
}
dict.lookup("scripts") >> scripts_; // Python scripts
catalystCoprocess::expand(scripts_, dict); // Expand and check availability
if (adaptor_.valid())
{
// Run-time modification of pipeline
adaptor_().reset(outputDir_, scripts_);
}
return true;
}
......@@ -106,12 +123,13 @@ Foam::functionObjects::catalystFaMesh::catalystFaMesh
)
:
fvMeshFunctionObject(name, runTime, dict),
outputDir_("<case>/insitu"),
scripts_(),
adaptor_(),
selectAreas_(),
selectFields_(),
scripts_(),
meshes_(),
backends_(),
adaptor_()
backends_()
{
if (postProcess)
{
......@@ -171,18 +189,12 @@ bool Foam::functionObjects::catalystFaMesh::read(const dictionary& dict)
dict.lookup("fields") >> selectFields_;
Info<< type() << " " << name() << ":" << nl
<<" areas " << flatOutput(selectAreas_) << nl
<<" meshes " << flatOutput(meshes_.sortedToc()) << nl
<<" fields " << flatOutput(selectFields_) << nl
<<" scripts " << scripts_ << nl;
if (adaptor_.valid())
{
// Run-time modification of pipeline
adaptor_().reset(scripts_);
}
// Ensure consistency - only retain backends with corresponding mesh region
backends_.retain(meshes_);
......@@ -219,7 +231,7 @@ bool Foam::functionObjects::catalystFaMesh::execute()
if (updateAdaptor && !adaptor_.valid())
{
adaptor_.reset(new catalystCoprocess());
adaptor_().reset(scripts_);
adaptor_().reset(outputDir_, scripts_);
}
}
......
......@@ -107,24 +107,28 @@ protected:
// Protected data
//- The output directory
fileName outputDir_;
//- Python scripts for the catalyst pipeline
stringList scripts_;
//- The catalyst coprocess
autoPtr<catalystCoprocess> adaptor_;
//- Requested names of areas to process
wordRes selectAreas_;
//- Names of fields to process
wordRes selectFields_;
//- Python scripts for the catalyst pipeline
stringList scripts_;
//- Pointers to the requested mesh regions
HashTable<const faMesh*> meshes_;
//- Backends for OpenFOAM to VTK translation (with internal caching)
HashPtrTable<vtk::faMeshAdaptor> backends_;
//- The catalyst coprocess
autoPtr<catalystCoprocess> adaptor_;
// Protected Member Functions
......
......@@ -222,25 +222,56 @@ void Foam::catalystCoprocess::stop()
}
void Foam::catalystCoprocess::reset()
void Foam::catalystCoprocess::reset(const fileName& outputDir)
{
#ifdef USE_CATALYST_WORKING_DIRECTORY
if (coproc_ == nullptr)
{
coproc_ = vtkCPProcessor::New();
coproc_->Initialize();
coproc_->Initialize(outputDir.c_str());
Info<< "Connecting ParaView Catalyst..." << endl;
}
else
{
coproc_->RemoveAllPipelines();
if (outputDir == coproc_->GetWorkingDirectory())
{
Info<< "Rebinding ParaView Catalyst..." << endl;
}
else
{
// Changed working directory ... redo everything.
coproc_->Delete();
coproc_ = nullptr;
reset(outputDir);
}
}
#else
if (coproc_ == nullptr)
{
coproc_ = vtkCPProcessor::New();
coproc_->Initialize();
}
else
{
coproc_->RemoveAllPipelines();
Info<< "Rebinding ParaView Catalyst..." << endl;
}
Info<< " Caution: using current working directory" << nl
<< " which may not be the same as the simulation directory" << endl;
#endif
}
void Foam::catalystCoprocess::reset(const UList<string>& scripts)
void Foam::catalystCoprocess::reset
(
const fileName& outputDir,
const UList<string>& scripts
)
{
reset();
reset(outputDir);
int nscript = 0;
for (const auto& script : scripts)
......
......@@ -208,12 +208,18 @@ public:
//- \return True if the coprocess has been initialized.
bool good() const;
//- Reset/initialize pipeline without pipeline scripts.
void reset();
//- Reset/initialize pipeline with output directory, but without
//- pipeline scripts.
void reset(const fileName& outputDir);
//- Reset/initialize pipeline with python scripts.
//- Reset/initialize pipeline with output directory and with
//- pipeline scripts.
// The script names must have already been resolved prior calling this.
void reset(const UList<string>& scripts);
void reset
(
const fileName& outputDir,
const UList<string>& scripts
);
//- Query the coprocess pipelines if they should be executed at this
//- iteration and possibly which fields they require.
......
......@@ -56,17 +56,34 @@ bool Foam::functionObjects::catalystCloud::readBasics(const dictionary& dict)
catalystCoprocess::debug = debugLevel;
}
fileName outputDir;
if (dict.readIfPresent("mkdir", outputDir))
if (Pstream::master())
{
outputDir.expand();
outputDir.clean();
Foam::mkDir(outputDir);
fileName dir;
if (dict.readIfPresent("mkdir", dir))
{
dir.expand();
dir.clean();
}
Foam::mkDir(dir);
}
dict.readIfPresent("outputDir", outputDir_);
outputDir_.expand();
outputDir_.clean();
if (Pstream::master())
{
Foam::mkDir(outputDir_);
}
dict.lookup("scripts") >> scripts_; // Python scripts
catalystCoprocess::expand(scripts_, dict); // Expand and check availability
if (adaptor_.valid())
{
// Run-time modification of pipeline
adaptor_().reset(outputDir_, scripts_);
}
return true;
}
......@@ -81,9 +98,11 @@ Foam::functionObjects::catalystCloud::catalystCloud
)
:
fvMeshFunctionObject(name, runTime, dict),
outputDir_("<case>/insitu"),
scripts_(),
adaptor_(),
selectClouds_(),
selectFields_(),
adaptor_()
selectFields_()
{
if (postProcess)
{
......@@ -124,18 +143,11 @@ bool Foam::functionObjects::catalystCloud::read(const dictionary& dict)
selectFields_.clear();
dict.readIfPresent("fields", selectFields_);
Info<< type() << " " << name() << ":" << nl
<<" clouds " << flatOutput(selectClouds_) << nl
<<" fields " << flatOutput(selectFields_) << nl
<<" scripts " << scripts_ << nl;
if (adaptor_.valid())
{
// Run-time modification of pipeline
adaptor_().reset(scripts_);
}
return true;
}
......@@ -154,7 +166,7 @@ bool Foam::functionObjects::catalystCloud::execute()
if (!adaptor_.valid())
{
adaptor_.reset(new catalystCoprocess());
adaptor_().reset(scripts_);
adaptor_().reset(outputDir_, scripts_);
}
}
......
......@@ -108,11 +108,8 @@ protected:
// Protected data
//- Requested names of clouds to process
wordRes selectClouds_;
//- Subset of cloud fields to process
wordRes selectFields_;
//- The output directory
fileName outputDir_;
//- Python scripts for the catalyst pipeline
stringList scripts_;
......@@ -121,6 +118,13 @@ protected:
autoPtr<catalystCoprocess> adaptor_;
//- Requested names of clouds to process
wordRes selectClouds_;
//- Subset of cloud fields to process
wordRes selectFields_;
// Protected Member Functions
//- Common boilerplate settings
......
......@@ -54,17 +54,34 @@ bool Foam::functionObjects::catalystFvMesh::readBasics(const dictionary& dict)
catalystCoprocess::debug = debugLevel;
}
fileName outputDir;
if (dict.readIfPresent("mkdir", outputDir))
if (Pstream::master())
{
outputDir.expand();
outputDir.clean();
Foam::mkDir(outputDir);
fileName dir;
if (dict.readIfPresent("mkdir", dir))
{
dir.expand();
dir.clean();
}
Foam::mkDir(dir);
}
dict.readIfPresent("outputDir", outputDir_);
outputDir_.expand();
outputDir_.clean();
if (Pstream::master())
{
Foam::mkDir(outputDir_);
}
dict.lookup("scripts") >> scripts_; // Python scripts
catalystCoprocess::expand(scripts_, dict); // Expand and check availability
if (adaptor_.valid())
{
// Run-time modification of pipeline
adaptor_().reset(outputDir_, scripts_);
}
return true;
}
......@@ -105,12 +122,13 @@ Foam::functionObjects::catalystFvMesh::catalystFvMesh
:
functionObject(name),
time_(runTime),
outputDir_("<case>/insitu"),
scripts_(),
adaptor_(),
selectRegions_(),
selectFields_(),
scripts_(),
meshes_(),
backends_(),
adaptor_()
backends_()
{
if (postProcess)
{
......@@ -135,25 +153,7 @@ Foam::functionObjects::catalystFvMesh::~catalystFvMesh()
bool Foam::functionObjects::catalystFvMesh::read(const dictionary& dict)
{
functionObject::read(dict);
// Common settings
int debugLevel = 0;
if (dict.readIfPresent("debug", debugLevel))
{
catalystCoprocess::debug = debugLevel;
}
fileName outputDir;
if (dict.readIfPresent("mkdir", outputDir))
{
outputDir.expand();
outputDir.clean();
Foam::mkDir(outputDir);
}
dict.lookup("scripts") >> scripts_; // Python scripts
catalystCoprocess::expand(scripts_, dict); // Expand and check availability
readBasics(dict);
// All possible meshes
meshes_ = time_.lookupClass<fvMesh>();
......@@ -173,19 +173,12 @@ bool Foam::functionObjects::catalystFvMesh::read(const dictionary& dict)
dict.lookup("fields") >> selectFields_;
Info<< type() << " " << name() << ":" << nl
<<" regions " << flatOutput(selectRegions_) << nl
<<" meshes " << flatOutput(meshes_.sortedToc()) << nl
<<" fields " << flatOutput(selectFields_) << nl
<<" scripts " << scripts_ << nl;
if (adaptor_.valid())
{
// Run-time modification of pipeline
adaptor_().reset(scripts_);
}
// Ensure consistency - only retain backends with corresponding mesh region
backends_.retain(meshes_);
......@@ -221,7 +214,7 @@ bool Foam::functionObjects::catalystFvMesh::execute()
if (updateAdaptor && !adaptor_.valid())
{
adaptor_.reset(new catalystCoprocess());
adaptor_().reset(scripts_);
adaptor_().reset(outputDir_, scripts_);
}
}
......
......@@ -111,24 +111,28 @@ protected:
//- Reference to the time database
const Time& time_;
//- The output directory
fileName outputDir_;
//- Python scripts for the catalyst pipeline
stringList scripts_;
//- The catalyst coprocess
autoPtr<catalystCoprocess> adaptor_;
//- Requested names of regions to process
wordRes selectRegions_;
//- Names of fields to process
wordRes selectFields_;
//- Python scripts for the catalyst pipeline
stringList scripts_;
//- Pointers to the requested mesh regions
HashTable<const fvMesh*> meshes_;
//- Backends for OpenFOAM to VTK translation (with internal caching)
HashPtrTable<vtk::fvMeshAdaptor> backends_;
//- The catalyst coprocess
autoPtr<catalystCoprocess> adaptor_;
// Protected Member Functions
......
......@@ -9,7 +9,7 @@ def CreateCoProcessor():
input1 = coprocessor.CreateProducer(datadescription, 'input')
writer1 = servermanager.writers.XMLMultiBlockDataWriter(Input=input1)
coprocessor.RegisterWriter(writer1, filename='insitu/area_%t.vtm', freq=2)
coprocessor.RegisterWriter(writer1, filename='area_%t.vtm', freq=2)
return Pipeline()
......
......@@ -18,14 +18,14 @@ def CreateCoProcessor():
# register the writer with coprocessor
# and provide it with information such as the filename to use,
# how frequently to write the data, etc.
coprocessor.RegisterWriter(sliceWriter, filename='insitu/slice_%t.vtm', freq=10)
coprocessor.RegisterWriter(sliceWriter, filename='slice_%t.vtm', freq=10)
meshWriter = servermanager.writers.XMLMultiBlockDataWriter(Input=mesh)
# register the writer with coprocessor
# and provide it with information such as the filename to use,
# how frequently to write the data, etc.
coprocessor.RegisterWriter(meshWriter, filename='insitu/mesh_%t.vtm', freq=100)
coprocessor.RegisterWriter(meshWriter, filename='mesh_%t.vtm', freq=100)
return Pipeline()
......
......@@ -9,7 +9,7 @@ def CreateCoProcessor():
input1 = coprocessor.CreateProducer(datadescription, 'mesh')
writer1 = servermanager.writers.XMLMultiBlockDataWriter(Input=input1)
coprocessor.RegisterWriter(writer1, filename='insitu/mesh_%t.vtm', freq=2)
coprocessor.RegisterWriter(writer1, filename='mesh_%t.vtm', freq=2)
return Pipeline()
......
......@@ -9,7 +9,7 @@ def CreateCoProcessor():
input1 = coprocessor.CreateProducer(datadescription, 'patches')
writer1 = servermanager.writers.XMLMultiBlockDataWriter(Input=input1)
coprocessor.RegisterWriter(writer1, filename='insitu/patches_%t.vtm', freq=2)
coprocessor.RegisterWriter(writer1, filename='patches_%t.vtm', freq=2)
return Pipeline()
......
......@@ -18,7 +18,7 @@ def CreateCoProcessor():
# register the writer with coprocessor
# and provide it with information such as the filename to use,
# how frequently to write the data, etc.
coprocessor.RegisterWriter(sliceWriter, filename='insitu/slice_%t.vtm', freq=3)
coprocessor.RegisterWriter(sliceWriter, filename='slice_%t.vtm', freq=3)
# create a new 'Parallel UnstructuredGrid Writer'
meshWriter = servermanager.writers.XMLMultiBlockDataWriter(Input=input1)
......@@ -26,7 +26,7 @@ def CreateCoProcessor():
# register the writer with coprocessor
# and provide it with information such as the filename to use,
# how frequently to write the data, etc.
coprocessor.RegisterWriter(meshWriter, filename='insitu/fullgrid_%t.vtm', freq=10)
coprocessor.RegisterWriter(meshWriter, filename='fullgrid_%t.vtm', freq=10)
return Pipeline()
......
......@@ -9,11 +9,11 @@ def CreateCoProcessor():
input1 = coprocessor.CreateProducer(datadescription, "mesh")
writer1 = servermanager.writers.XMLMultiBlockDataWriter(Input=input1)
# Register with filename to use, output frequency
coprocessor.RegisterWriter(writer1, filename='insitu/mesh_%t.vtm', freq=2)
coprocessor.RegisterWriter(writer1, filename='mesh_%t.vtm', freq=2)
# input2 = coprocessor.CreateProducer(datadescription, "patches")
# writer2 = servermanager.writers.XMLMultiBlockDataWriter(Input=input2)
# coprocessor.RegisterWriter(writer2, filename='insitu/patches_%t.vtm', freq=2)
# coprocessor.RegisterWriter(writer2, filename='patches_%t.vtm', freq=2)
return Pipeline()
......
......@@ -10,7 +10,7 @@ def CreateCoProcessor():
writer1 = servermanager.writers.XMLMultiBlockDataWriter(Input=input1)
# register writer with coprocessor, with filename + output freq
coprocessor.RegisterWriter(writer1, filename='insitu/cloud_%t.vtm', freq=10)
coprocessor.RegisterWriter(writer1, filename='cloud_%t.vtm', freq=10)
return Pipeline()
......
from paraview.simple import *
from paraview import coprocessing
# ----------------------- CoProcessor definition -----------------------
def CreateCoProcessor():
def _CreatePipeline(coprocessor, datadescription):
class Pipeline:
input1 = coprocessor.CreateProducer(datadescription, 'mesh')
writer1 = servermanager.writers.XMLMultiBlockDataWriter(Input=input1)
coprocessor.RegisterWriter(writer1, filename='mesh_%t.vtm', freq=2)
return Pipeline()
class CoProcessor(coprocessing.CoProcessor):
def CreatePipeline(self, datadescription):
self.Pipeline = _CreatePipeline(self, datadescription)
coprocessor = CoProcessor()
freqs = {'mesh': [10]}
coprocessor.SetUpdateFrequencies(freqs)
return coprocessor
#--------------------------------------------------------------
# Global variables that will hold the pipeline for each timestep
# Creating the CoProcessor object, doesn't actually create the ParaView pipeline.
# It will be automatically setup when coprocessor.UpdateProducers() is called the
# first time.
coprocessor = CreateCoProcessor()
#--------------------------------------------------------------
# Enable Live-Visualizaton with ParaView
coprocessor.EnableLiveVisualization(True)
# ---------------------- Data Selection method ----------------------
def RequestDataDescription(datadescription):