Commit 82a83cab authored by Mark Olesen's avatar Mark Olesen
Browse files

Merge branch 'feature-unified-output' into 'develop'

Feature unified output

Closes #3 and #4

See merge request !1
parents 14eaf2dd 4f692e4d
What:
Library and function object for embedding the ParaView Catalyst
into OpenFOAM.
Requirements:
OpenFOAM.com development version (April-2018 or newer) or OpenFOAM-v1806.
ParaView or ParaView Catalyst 5.4 or newer, compiled with mpi and
python support.
Authors:
Mark Olesen <Mark.Olesen@esi-group.com>
Simone Bna <Simone.Bna@cineca.it>
License:
Same terms as OpenFOAM.
Licensed under GNU General Public License <http://www.gnu.org/licenses/>.
## What
Library and function object for embedding ParaView Catalyst into OpenFOAM.
## Requirements
1. OpenFOAM.com development version (11-May-2018 or newer) or OpenFOAM-v1806.
2. ParaView or ParaView Catalyst 5.5 or newer, compiled with mpi and
python support.
It is highly recommended to patch the ParaView 5.5 sources (eg,
using the OpenFOAM ThirdParty makeParaView script) to ensure that
they properly handle outputting results in directories other than
the main simulation directory:
* [MR2433]
* [MR2436]
## Authors
* Mark Olesen <Mark.Olesen@esi-group.com>
* Simone Bna <Simone.Bna@cineca.it>
## License
Same terms as OpenFOAM.
Licensed under GNU General Public License <http://www.gnu.org/licenses/>.
[MR2433]: https://gitlab.kitware.com/paraview/paraview/merge_requests/2433
[MR2436]: https://gitlab.kitware.com/paraview/paraview/merge_requests/2436
......@@ -10,7 +10,6 @@ def CreateCoProcessor():
def _CreatePipeline(coprocessor, datadescription):
class Pipeline:
for i in range(datadescription.GetNumberOfInputDescriptions()):
inputdescription = datadescription.GetInputDescription(i)
name = datadescription.GetInputDescriptionName(i)
adaptorinput = coprocessor.CreateProducer(datadescription, name)
grid = adaptorinput.GetClientSideObject().GetOutputDataObject(0)
......@@ -40,7 +39,7 @@ def CreateCoProcessor():
print("Don't know how to create a writer for a ", grid.GetClassName())
if extension:
coprocessor.RegisterWriter(writer, filename='insitu/'+name+'_%t'+extension, freq=outputfrequency)
coprocessor.RegisterWriter(writer, filename=name+'_%t'+extension, freq=outputfrequency)
return Pipeline()
......
/*--------------------------------*- C++ -*----------------------------------*\
| ========= | |
| \\ / F ield | OpenFOAM: The Open Source CFD Toolbox |
| \\ / O peration | Version: plus |
| \\ / A nd | Web: www.OpenFOAM.com |
| \\/ M anipulation | |
\*---------------------------------------------------------------------------*/
// Insitu processing of finiteArea fields with ParaView Catalyst
type catalyst::area;
libs ("libcatalystFoam.so");
executeControl timeStep;
writeControl none;
// ************************************************************************* //
/*--------------------------------*- C++ -*----------------------------------*\
| ========= | |
| \\ / F ield | OpenFOAM: The Open Source CFD Toolbox |
| \\ / O peration | Version: plus |
| \\ / A nd | Web: www.OpenFOAM.com |
| \\/ M anipulation | |
\*---------------------------------------------------------------------------*/
// Insitu processing of lagrangian clouds with ParaView Catalyst
type catalyst::cloud;
libs ("libcatalystFoam.so");
executeControl timeStep;
writeControl none;
// ************************************************************************* //
/*--------------------------------*- C++ -*----------------------------------*\
| ========= | |
| \\ / F ield | OpenFOAM: The Open Source CFD Toolbox |
| \\ / O peration | Version: plus |
| \\ / A nd | Web: www.OpenFOAM.com |
| \\/ M anipulation | |
\*---------------------------------------------------------------------------*/
// Insitu processing of finiteVolume fields with ParaView Catalyst
type catalyst;
libs ("libcatalystFoam.so");
executeControl timeStep;
writeControl none;
// ************************************************************************* //
from paraview.simple import *
from paraview import coprocessing
# ----------------------- CoProcessor definition -----------------------
def CreateCoProcessor():
def _CreatePipeline(coprocessor, datadescription):
class Pipeline:
input1 = coprocessor.CreateProducer(datadescription, 'input')
writer1 = servermanager.writers.XMLMultiBlockDataWriter(Input=input1)
coprocessor.RegisterWriter(writer1, filename='insitu/area_%t.vtm', freq=2)
return Pipeline()
class CoProcessor(coprocessing.CoProcessor):
def CreatePipeline(self, datadescription):
self.Pipeline = _CreatePipeline(self, datadescription)
coprocessor = CoProcessor()
freqs = {'input': [10]}
coprocessor.SetUpdateFrequencies(freqs)
return coprocessor
#--------------------------------------------------------------
# Global variables that will hold the pipeline for each timestep
# Creating the CoProcessor object, doesn't actually create the ParaView pipeline.
# It will be automatically setup when coprocessor.UpdateProducers() is called the
# first time.
coprocessor = CreateCoProcessor()
#--------------------------------------------------------------
# Enable Live-Visualizaton with ParaView
coprocessor.EnableLiveVisualization(True)
# ---------------------- Data Selection method ----------------------
def RequestDataDescription(datadescription):
'Callback to populate the request for current timestep'
global coprocessor
if datadescription.GetForceOutput() == True:
# We are just going to request all fields and meshes from the simulation
# code/adaptor.
for i in range(datadescription.GetNumberOfInputDescriptions()):
datadescription.GetInputDescription(i).AllFieldsOn()
datadescription.GetInputDescription(i).GenerateMeshOn()
return
# setup requests for all inputs based on the requirements of the
# pipeline.
coprocessor.LoadRequestedData(datadescription)
# ------------------------ Processing method ------------------------
def DoCoProcessing(datadescription):
'Callback to do co-processing for current timestep'
global coprocessor
# Update the coprocessor by providing it the newly generated simulation data.
# If the pipeline hasn't been setup yet, this will setup the pipeline.
coprocessor.UpdateProducers(datadescription)
# Write output data, if appropriate.
coprocessor.WriteData(datadescription);
# Write image capture (Last arg: rescale lookup table), if appropriate.
coprocessor.WriteImages(datadescription, rescale_lookuptable=False)
# Live Visualization, if enabled.
coprocessor.DoLiveVisualization(datadescription, 'localhost', 22222)
from paraview.simple import *
from paraview import coprocessing
# ----------------------- CoProcessor definition -----------------------
def CreateCoProcessor():
def _CreatePipeline(coprocessor, datadescription):
class Pipeline:
input1 = coprocessor.CreateProducer(datadescription, 'cloud')
writer1 = servermanager.writers.XMLMultiBlockDataWriter(Input=input1)
# register writer with coprocessor, with filename + output freq
coprocessor.RegisterWriter(writer1, filename='insitu/cloud_%t.vtm', freq=10)
return Pipeline()
class CoProcessor(coprocessing.CoProcessor):
def CreatePipeline(self, datadescription):
self.Pipeline = _CreatePipeline(self, datadescription)
coprocessor = CoProcessor()
freqs = {'cloud': [10]}
coprocessor.SetUpdateFrequencies(freqs)
return coprocessor
#--------------------------------------------------------------
# Global variables that will hold the pipeline for each timestep
# Creating the CoProcessor object, doesn't actually create the ParaView pipeline.
# It will be automatically setup when coprocessor.UpdateProducers() is called the
# first time.
coprocessor = CreateCoProcessor()
#--------------------------------------------------------------
# Enable Live-Visualizaton with ParaView
coprocessor.EnableLiveVisualization(True)
# ---------------------- Data Selection method ----------------------
def RequestDataDescription(datadescription):
'Callback to populate the request for current timestep'
global coprocessor
if datadescription.GetForceOutput() == True:
# We are just going to request all fields and meshes from the simulation
# code/adaptor.
for i in range(datadescription.GetNumberOfInputDescriptions()):
datadescription.GetInputDescription(i).AllFieldsOn()
datadescription.GetInputDescription(i).GenerateMeshOn()
return
# setup requests for all inputs based on the requirements of the
# pipeline.
coprocessor.LoadRequestedData(datadescription)
# ------------------------ Processing method ------------------------
def DoCoProcessing(datadescription):
'Callback to do co-processing for current timestep'
global coprocessor
# Update the coprocessor by providing it the newly generated simulation data.
# If the pipeline hasn't been setup yet, this will setup the pipeline.
coprocessor.UpdateProducers(datadescription)
# Write output data, if appropriate.
coprocessor.WriteData(datadescription);
# Write image capture (Last arg: rescale lookup table), if appropriate.
coprocessor.WriteImages(datadescription, rescale_lookuptable=False)
# Live Visualization, if enabled.
coprocessor.DoLiveVisualization(datadescription, 'localhost', 22222)
......@@ -35,6 +35,26 @@ set(CMAKE_CXX_FLAGS_RELEASE
"-O3 -std=c++11 -Wall -Wextra -Wno-unused-parameter -Wnon-virtual-dtor -Wno-overloaded-virtual")
set(CMAKE_C_FLAGS_RELEASE "-O3 -std=c++11")
# Some characteristics
set(test_file ${CMAKE_CURRENT_BINARY_DIR}/check_initialize.cxx)
file(WRITE ${test_file}
"#include <vtkCPProcessor.h>\n"
"int main() {\n"
" vtkCPProcessor* p = vtkCPProcessor::New();\n"
" p->Initialize(\"AAA\");\n"
" p->Delete();\n"
" return 0;\n"
"}")
try_compile(CATALYST_HAS_WORKING_DIRECTORY
${CMAKE_CURRENT_BINARY_DIR} ${test_file}
LINK_LIBRARIES vtkPVPythonCatalyst
CMAKE_FLAGS "-DINCLUDE_DIRECTORIES=${PARAVIEW_INCLUDE_DIRS}"
)
if (CATALYST_HAS_WORKING_DIRECTORY)
add_definitions(-DUSE_CATALYST_WORKING_DIRECTORY)
endif()
# Set output library destination to OpenFOAM library directory
set(LIBRARY_OUTPUT_PATH $ENV{FOAM_LIBBIN}
CACHE INTERNAL
......@@ -43,6 +63,9 @@ set(LIBRARY_OUTPUT_PATH $ENV{FOAM_LIBBIN}
file(GLOB SOURCE_FILES
catalystCoprocess.C
catalystTools.C
catalystInput.C
catalystFunctionObject.C
cloud/catalystCloud.C
cloud/foamVtkCloudAdaptor.C
......
......@@ -24,12 +24,10 @@ License
\*---------------------------------------------------------------------------*/
#include "catalystFaMesh.H"
#include "catalystCoprocess.H"
#include "addToRunTimeSelectionTable.H"
#include "faMesh.H"
#include "fvMesh.H"
#include <vtkNew.h>
#include <vtkCPDataDescription.h>
#include <vtkMultiBlockDataSet.h>
#include <vtkInformation.h>
......@@ -38,59 +36,39 @@ License
namespace Foam
{
namespace functionObjects
namespace catalyst
{
defineTypeNameAndDebug(catalystFaMesh, 0);
addToRunTimeSelectionTable(functionObject, catalystFaMesh, dictionary);
defineTypeNameAndDebug(faMeshInput, 0);
addNamedToRunTimeSelectionTable
(
catalystInput,
faMeshInput,
dictionary,
area
);
}
}
// * * * * * * * * * * * * Protected Member Functions * * * * * * * * * * * //
bool Foam::functionObjects::catalystFaMesh::readBasics(const dictionary& dict)
{
int debugLevel = 0;
if (dict.readIfPresent("debug", debugLevel))
{
catalystCoprocess::debug = debugLevel;
}
fileName outputDir;
if (dict.readIfPresent("mkdir", outputDir))
{
outputDir.expand();
outputDir.clean();
Foam::mkDir(outputDir);
}
dict.lookup("scripts") >> scripts_; // Python scripts
catalystCoprocess::expand(scripts_, dict); // Expand and check availability
return true;
}
void Foam::functionObjects::catalystFaMesh::updateState
(
polyMesh::readUpdateState state
)
void Foam::catalyst::faMeshInput::update()
{
// Trigger change of state
// Be really paranoid and verify if the mesh actually exists
const wordList regionNames(backends_.toc());
// Backend requires a corresponding mesh
backends_.filterKeys
(
[this](const word& k){ return meshes_.found(k); }
);
for (const word& regionName : regionNames)
forAllConstIters(meshes_, iter)
{
if (meshes_.found(regionName) && time_.found(regionName))
if (!backends_.found(iter.key()))
{
backends_[regionName]->updateState(state);
}
else
{
backends_.erase(regionName);
meshes_.erase(regionName);
backends_.insert
(
iter.key(),
new Foam::vtk::faMeshAdaptor(*(iter.object()))
);
}
}
}
......@@ -98,52 +76,52 @@ void Foam::functionObjects::catalystFaMesh::updateState
// * * * * * * * * * * * * * * * * Constructors * * * * * * * * * * * * * * //
Foam::functionObjects::catalystFaMesh::catalystFaMesh
Foam::catalyst::faMeshInput::faMeshInput
(
const word& name,
const Time& runTime,
const dictionary& dict
)
:
fvMeshFunctionObject(name, runTime, dict),
catalystInput(name),
time_(runTime),
regionName_(),
selectAreas_(),
selectFields_(),
scripts_(),
meshes_(),
backends_(),
adaptor_()
backends_()
{
read(dict);
}
// * * * * * * * * * * * * * * * * Destructor * * * * * * * * * * * * * * * //
Foam::functionObjects::catalystFaMesh::~catalystFaMesh()
{}
// * * * * * * * * * * * * * * * Member Functions * * * * * * * * * * * * * //
bool Foam::functionObjects::catalystFaMesh::read(const dictionary& dict)
bool Foam::catalyst::faMeshInput::read(const dictionary& dict)
{
fvMeshFunctionObject::read(dict);
catalystInput::read(dict);
readBasics(dict);
selectAreas_.clear();
selectFields_.clear();
backends_.clear();
// All possible meshes
meshes_ = mesh_.lookupClass<faMesh>();
regionName_ =
dict.lookupOrDefault<word>("region", polyMesh::defaultRegion);
const objectRegistry& obr =
time_.lookupObject<objectRegistry>(regionName_);
// All possible meshes for the given region
meshes_ = obr.lookupClass<faMesh>();
selectAreas_.clear();
dict.readIfPresent("areas", selectAreas_);
if (selectAreas_.empty())
{
word areaName;
if (!dict.readIfPresent("area", areaName))
{
wordList available = mesh_.sortedNames<faMesh>();
wordList available = obr.sortedNames<faMesh>();
if (available.size())
{
......@@ -159,65 +137,49 @@ bool Foam::functionObjects::catalystFaMesh::read(const dictionary& dict)
}
// Restrict to specified meshes
meshes_.filterKeys(wordRes(selectAreas_));
meshes_.filterKeys(selectAreas_);
dict.lookup("fields") >> selectFields_;
Info<< type() << " " << name() << ":" << nl
<<" areas " << flatOutput(selectAreas_) << nl
<<" meshes " << flatOutput(meshes_.sortedToc()) << nl
<<" fields " << flatOutput(selectFields_) << nl
<<" scripts " << scripts_ << nl;
if (adaptor_.valid())
{
// Run-time modification of pipeline
adaptor_().reset(scripts_);
}
// Ensure consistency - only retain backends with corresponding mesh region
backends_.retain(meshes_);
return true;
}
bool Foam::functionObjects::catalystFaMesh::execute()
void Foam::catalyst::faMeshInput::update(polyMesh::readUpdateState state)
{
const wordList regionNames(meshes_.sortedToc());
// Trigger change of state
if (regionNames.empty())
{
return false;
}
const objectRegistry& obr =
time_.lookupObject<objectRegistry>(regionName_);
// Enforce sanity for backends and adaptor
// Be really paranoid and verify if the mesh actually exists
const wordList regionNames(backends_.toc());
for (const word& regionName : regionNames)
{
bool updateAdaptor = false;
forAllConstIters(meshes_, iter)
if (meshes_.found(regionName) && obr.found(regionName))
{
if (!backends_.found(iter.key()))
{
backends_.insert
(
iter.key(),
new Foam::vtk::faMeshAdaptor(*(iter.object()))
);
updateAdaptor = true;
}
backends_[regionName]->updateState(state);
}
if (updateAdaptor && !adaptor_.valid())
else
{
adaptor_.reset(new catalystCoprocess());
adaptor_().reset(scripts_);
backends_.erase(regionName);
meshes_.erase(regionName);
}
}
}
// Gather all fields that we know how to convert
Foam::label Foam::catalyst::faMeshInput::addChannels(dataQuery& dataq)
{
update(); // Enforce sanity for backends and adaptor
if (backends_.empty())
{
return 0;
}
// Gather all fields that we know how to convert
wordHashSet allFields;
forAllConstIters(backends_, iter)
{
......@@ -225,96 +187,89 @@ bool Foam::functionObjects::catalystFaMesh::execute()
}
// Data description for co-processing
vtkNew<vtkCPDataDescription> descrip;
dataq.set(name(), allFields);
// Form data query for catalyst
catalystCoprocess::dataQuery dataq
(
vtk::faMeshAdaptor::channelNames.names(),
time_, // timeQuery
descrip.Get()
);
return 1;
}
// Query catalyst
const HashTable<wordHashSet> expecting(adaptor_().query(dataq, allFields));
if (catalystCoprocess::debug)
bool Foam::catalyst::faMeshInput::convert
(
dataQuery& dataq,
outputChannels& outputs
)
{
const wordList regionNames(backends_.sortedToc());
if (regionNames.empty())
{
if (expecting.empty())
{
Info<< type() << ": expecting no data" << nl;
}
else
{
Info<< type() << ": expecting data " << expecting << nl;
}
return false; // skip - not available
}
if (expecting.empty())
// Single channel only