OpenFOAM: v1912 released! - For more information see https://www.openfoam.com/releases/openfoam-v1912

Commit 3c0a2e79 authored by Mark Olesen's avatar Mark Olesen

ENH: support mkdir as dictionary keyword

- this is a convenient means of creating an output directory from within
  the function object without corresponding python or shell script.

  Some pipelines (image generation) balk if the output directory does
  not exist. Others (vtm writer etc) will create their own.

STYLE: use OpenFOAM caseDicts config files, newer string expansion.

- Eg, "<system>/scripts" instead of "$FOAM_CASE/system/scripts"

- OpenFOAM CleanFunctions now include removal of the 'insitu' directory
parent 24f17124
from paraview.simple import *
from paraview import coprocessing
# ----------------------- CoProcessor definition -----------------------
def CreateCoProcessor():
def _CreatePipeline(coprocessor, datadescription):
class Pipeline:
input1 = coprocessor.CreateProducer(datadescription, 'input')
writer1 = servermanager.writers.XMLMultiBlockDataWriter(Input=input1)
coprocessor.RegisterWriter(writer1, filename='insitu/area_%t.vtm', freq=2)
return Pipeline()
class CoProcessor(coprocessing.CoProcessor):
def CreatePipeline(self, datadescription):
self.Pipeline = _CreatePipeline(self, datadescription)
coprocessor = CoProcessor()
freqs = {'input': [10]}
coprocessor.SetUpdateFrequencies(freqs)
return coprocessor
#--------------------------------------------------------------
# Global variables that will hold the pipeline for each timestep
# Creating the CoProcessor object, doesn't actually create the ParaView pipeline.
# It will be automatically setup when coprocessor.UpdateProducers() is called the
# first time.
coprocessor = CreateCoProcessor()
#--------------------------------------------------------------
# Enable Live-Visualizaton with ParaView
coprocessor.EnableLiveVisualization(True)
# ---------------------- Data Selection method ----------------------
def RequestDataDescription(datadescription):
'Callback to populate the request for current timestep'
global coprocessor
if datadescription.GetForceOutput() == True:
# We are just going to request all fields and meshes from the simulation
# code/adaptor.
for i in range(datadescription.GetNumberOfInputDescriptions()):
datadescription.GetInputDescription(i).AllFieldsOn()
datadescription.GetInputDescription(i).GenerateMeshOn()
return
# setup requests for all inputs based on the requirements of the
# pipeline.
coprocessor.LoadRequestedData(datadescription)
# ------------------------ Processing method ------------------------
def DoCoProcessing(datadescription):
'Callback to do co-processing for current timestep'
global coprocessor
# Update the coprocessor by providing it the newly generated simulation data.
# If the pipeline hasn't been setup yet, this will setup the pipeline.
coprocessor.UpdateProducers(datadescription)
# Write output data, if appropriate.
coprocessor.WriteData(datadescription);
# Write image capture (Last arg: rescale lookup table), if appropriate.
coprocessor.WriteImages(datadescription, rescale_lookuptable=False)
# Live Visualization, if enabled.
coprocessor.DoLiveVisualization(datadescription, 'localhost', 22222)
from paraview.simple import *
from paraview import coprocessing
# ----------------------- CoProcessor definition -----------------------
def CreateCoProcessor():
def _CreatePipeline(coprocessor, datadescription):
class Pipeline:
input1 = coprocessor.CreateProducer(datadescription, 'cloud')
writer1 = servermanager.writers.XMLMultiBlockDataWriter(Input=input1)
# register writer with coprocessor, with filename + output freq
coprocessor.RegisterWriter(writer1, filename='insitu/cloud_%t.vtm', freq=10)
return Pipeline()
class CoProcessor(coprocessing.CoProcessor):
def CreatePipeline(self, datadescription):
self.Pipeline = _CreatePipeline(self, datadescription)
coprocessor = CoProcessor()
freqs = {'cloud': [10]}
coprocessor.SetUpdateFrequencies(freqs)
return coprocessor
#--------------------------------------------------------------
# Global variables that will hold the pipeline for each timestep
# Creating the CoProcessor object, doesn't actually create the ParaView pipeline.
# It will be automatically setup when coprocessor.UpdateProducers() is called the
# first time.
coprocessor = CreateCoProcessor()
#--------------------------------------------------------------
# Enable Live-Visualizaton with ParaView
coprocessor.EnableLiveVisualization(True)
# ---------------------- Data Selection method ----------------------
def RequestDataDescription(datadescription):
'Callback to populate the request for current timestep'
global coprocessor
if datadescription.GetForceOutput() == True:
# We are just going to request all fields and meshes from the simulation
# code/adaptor.
for i in range(datadescription.GetNumberOfInputDescriptions()):
datadescription.GetInputDescription(i).AllFieldsOn()
datadescription.GetInputDescription(i).GenerateMeshOn()
return
# setup requests for all inputs based on the requirements of the
# pipeline.
coprocessor.LoadRequestedData(datadescription)
# ------------------------ Processing method ------------------------
def DoCoProcessing(datadescription):
'Callback to do co-processing for current timestep'
global coprocessor
# Update the coprocessor by providing it the newly generated simulation data.
# If the pipeline hasn't been setup yet, this will setup the pipeline.
coprocessor.UpdateProducers(datadescription)
# Write output data, if appropriate.
coprocessor.WriteData(datadescription);
# Write image capture (Last arg: rescale lookup table), if appropriate.
coprocessor.WriteImages(datadescription, rescale_lookuptable=False)
# Live Visualization, if enabled.
coprocessor.DoLiveVisualization(datadescription, 'localhost', 22222)
from paraview.simple import *
from paraview import coprocessing
# ----------------------- CoProcessor definition -----------------------
def CreateCoProcessor():
def _CreatePipeline(coprocessor, datadescription):
class Pipeline:
input1 = coprocessor.CreateProducer(datadescription, 'patches')
writer1 = servermanager.writers.XMLMultiBlockDataWriter(Input=input1)
coprocessor.RegisterWriter(writer1, filename='insitu/patches_%t.vtm', freq=2)
return Pipeline()
class CoProcessor(coprocessing.CoProcessor):
def CreatePipeline(self, datadescription):
self.Pipeline = _CreatePipeline(self, datadescription)
coprocessor = CoProcessor()
freqs = {'patches': [10]}
coprocessor.SetUpdateFrequencies(freqs)
return coprocessor
#--------------------------------------------------------------
# Global variables that will hold the pipeline for each timestep
# Creating the CoProcessor object, doesn't actually create the ParaView pipeline.
# It will be automatically setup when coprocessor.UpdateProducers() is called the
# first time.
coprocessor = CreateCoProcessor()
#--------------------------------------------------------------
# Enable Live-Visualizaton with ParaView
coprocessor.EnableLiveVisualization(True)
# ---------------------- Data Selection method ----------------------
def RequestDataDescription(datadescription):
'Callback to populate the request for current timestep'
global coprocessor
if datadescription.GetForceOutput() == True:
# We are just going to request all fields and meshes from the simulation
# code/adaptor.
for i in range(datadescription.GetNumberOfInputDescriptions()):
datadescription.GetInputDescription(i).AllFieldsOn()
datadescription.GetInputDescription(i).GenerateMeshOn()
return
# setup requests for all inputs based on the requirements of the
# pipeline.
coprocessor.LoadRequestedData(datadescription)
# ------------------------ Processing method ------------------------
def DoCoProcessing(datadescription):
'Callback to do co-processing for current timestep'
global coprocessor
# Update the coprocessor by providing it the newly generated simulation data.
# If the pipeline hasn't been setup yet, this will setup the pipeline.
coprocessor.UpdateProducers(datadescription)
# Write output data, if appropriate.
coprocessor.WriteData(datadescription);
# Write image capture (Last arg: rescale lookup table), if appropriate.
coprocessor.WriteImages(datadescription, rescale_lookuptable=False)
# Live Visualization, if enabled.
coprocessor.DoLiveVisualization(datadescription, 'localhost', 22222)
......@@ -3,6 +3,8 @@ cd ${0%/*} || exit 1 # Run from this directory
. $WM_PROJECT_DIR/wmake/scripts/wmakeFunctions # The wmake functions
rm -f $FOAM_LIBBIN/libcatalystFoam* 2>/dev/null # Cleanup library
rm -f $FOAM_SITE_LIBBIN/libcatalystFoam* 2>/dev/null # ... extra safety
rm -f $FOAM_USER_LIBBIN/libcatalystFoam* 2>/dev/null # ... extra safety
# Cleanup generated files - remove entire top-level
removeObjectDir $PWD
......
......@@ -48,6 +48,29 @@ namespace functionObjects
// * * * * * * * * * * * * Protected Member Functions * * * * * * * * * * * //
bool Foam::functionObjects::catalystFaMesh::readBasics(const dictionary& dict)
{
int debugLevel = 0;
if (dict.readIfPresent("debug", debugLevel))
{
catalystCoprocess::debug = debugLevel;
}
fileName outputDir;
if (dict.readIfPresent("mkdir", outputDir))
{
outputDir.expand();
outputDir.clean();
Foam::mkDir(outputDir);
}
dict.lookup("scripts") >> scripts_; // Python scripts
catalystCoprocess::expand(scripts_, dict); // Expand and check availability
return true;
}
void Foam::functionObjects::catalystFaMesh::updateState
(
polyMesh::readUpdateState state
......@@ -106,11 +129,7 @@ bool Foam::functionObjects::catalystFaMesh::read(const dictionary& dict)
{
fvMeshFunctionObject::read(dict);
int debugLevel = 0;
if (dict.readIfPresent("debug", debugLevel))
{
catalystCoprocess::debug = debugLevel;
}
readBasics(dict);
// All possible meshes
meshes_ = mesh_.lookupClass<faMesh>();
......@@ -143,9 +162,7 @@ bool Foam::functionObjects::catalystFaMesh::read(const dictionary& dict)
meshes_.filterKeys(wordRes(selectAreas_));
dict.lookup("fields") >> selectFields_;
dict.lookup("scripts") >> scripts_; // Python scripts
catalystCoprocess::expand(scripts_, dict); // Expand and check availability
Info<< type() << " " << name() << ":" << nl
<<" areas " << flatOutput(selectAreas_) << nl
......@@ -153,9 +170,9 @@ bool Foam::functionObjects::catalystFaMesh::read(const dictionary& dict)
<<" fields " << flatOutput(selectFields_) << nl
<<" scripts " << scripts_ << nl;
// Run-time modification of pipeline
if (adaptor_.valid())
{
// Run-time modification of pipeline
adaptor_().reset(scripts_);
}
......
......@@ -51,6 +51,7 @@ Usage
Property | Description | Required | Default
type | catalyst::area | yes |
log | report extra information | no | false
mkdir | initial directory to create | no |
region | | no | region0
regions | wordRe list of regions | no |
fields | wordRe list of fields | yes |
......@@ -127,8 +128,13 @@ protected:
// Protected Member Functions
//- Common boilerplate settings
bool readBasics(const dictionary& dict);
//- On movement
void updateState(polyMesh::readUpdateState state);
//- No copy construct
catalystFaMesh(const catalystFaMesh&) = delete;
......
......@@ -99,6 +99,7 @@ Foam::label Foam::catalystCoprocess::expand
string& s = scripts[scripti];
stringOps::inplaceExpand(s, dict, true, true);
fileName::clean(s); // Remove trailing, repeated slashes etc.
if (isFile(s))
{
......
......@@ -46,6 +46,31 @@ namespace functionObjects
}
// * * * * * * * * * * * * Protected Member Functions * * * * * * * * * * * //
bool Foam::functionObjects::catalystCloud::readBasics(const dictionary& dict)
{
int debugLevel = 0;
if (dict.readIfPresent("debug", debugLevel))
{
catalystCoprocess::debug = debugLevel;
}
fileName outputDir;
if (dict.readIfPresent("mkdir", outputDir))
{
outputDir.expand();
outputDir.clean();
Foam::mkDir(outputDir);
}
dict.lookup("scripts") >> scripts_; // Python scripts
catalystCoprocess::expand(scripts_, dict); // Expand and check availability
return true;
}
// * * * * * * * * * * * * * * * * Constructors * * * * * * * * * * * * * * //
Foam::functionObjects::catalystCloud::catalystCloud
......@@ -76,11 +101,7 @@ bool Foam::functionObjects::catalystCloud::read(const dictionary& dict)
{
fvMeshFunctionObject::read(dict);
int debugLevel = 0;
if (dict.readIfPresent("debug", debugLevel))
{
catalystCoprocess::debug = debugLevel;
}
readBasics(dict);
selectClouds_.clear();
dict.readIfPresent("clouds", selectClouds_);
......@@ -95,18 +116,15 @@ bool Foam::functionObjects::catalystCloud::read(const dictionary& dict)
selectFields_.clear();
dict.readIfPresent("fields", selectFields_);
dict.lookup("scripts") >> scripts_; // Python scripts
catalystCoprocess::expand(scripts_, dict); // Expand and check availability
Info<< type() << " " << name() << ":" << nl
<<" clouds " << flatOutput(selectClouds_) << nl
<<" fields " << flatOutput(selectFields_) << nl
<<" scripts " << scripts_ << nl;
// Run-time modification of pipeline
if (adaptor_.valid())
{
// Run-time modification of pipeline
adaptor_().reset(scripts_);
}
......
......@@ -52,6 +52,7 @@ Usage
Property | Description | Required | Default value
type | catalyst::cloud | yes |
log | report extra information | no | false
mkdir | initial directory to create | no |
cloud | | no | defaultCloud
clouds | wordRe list of clouds | no |
region | | no | region0
......@@ -120,7 +121,11 @@ protected:
autoPtr<catalystCoprocess> adaptor_;
// Private Member Functions
// Protected Member Functions
//- Common boilerplate settings
bool readBasics(const dictionary& dict);
//- No copy construct
catalystCloud(const catalystCloud&) = delete;
......
......@@ -46,6 +46,29 @@ namespace functionObjects
// * * * * * * * * * * * * Protected Member Functions * * * * * * * * * * * //
bool Foam::functionObjects::catalystFvMesh::readBasics(const dictionary& dict)
{
int debugLevel = 0;
if (dict.readIfPresent("debug", debugLevel))
{
catalystCoprocess::debug = debugLevel;
}
fileName outputDir;
if (dict.readIfPresent("mkdir", outputDir))
{
outputDir.expand();
outputDir.clean();
Foam::mkDir(outputDir);
}
dict.lookup("scripts") >> scripts_; // Python scripts
catalystCoprocess::expand(scripts_, dict); // Expand and check availability
return true;
}
void Foam::functionObjects::catalystFvMesh::updateState
(
polyMesh::readUpdateState state
......@@ -105,12 +128,25 @@ bool Foam::functionObjects::catalystFvMesh::read(const dictionary& dict)
{
functionObject::read(dict);
// Common settings
int debugLevel = 0;
if (dict.readIfPresent("debug", debugLevel))
{
catalystCoprocess::debug = debugLevel;
}
fileName outputDir;
if (dict.readIfPresent("mkdir", outputDir))
{
outputDir.expand();
outputDir.clean();
Foam::mkDir(outputDir);
}
dict.lookup("scripts") >> scripts_; // Python scripts
catalystCoprocess::expand(scripts_, dict); // Expand and check availability
// All possible meshes
meshes_ = time_.lookupClass<fvMesh>();
......@@ -128,9 +164,7 @@ bool Foam::functionObjects::catalystFvMesh::read(const dictionary& dict)
meshes_.filterKeys(wordRes(selectRegions_));
dict.lookup("fields") >> selectFields_;
dict.lookup("scripts") >> scripts_; // Python scripts
catalystCoprocess::expand(scripts_, dict); // Expand and check availability
Info<< type() << " " << name() << ":" << nl
<<" regions " << flatOutput(selectRegions_) << nl
......@@ -138,9 +172,9 @@ bool Foam::functionObjects::catalystFvMesh::read(const dictionary& dict)
<<" fields " << flatOutput(selectFields_) << nl
<<" scripts " << scripts_ << nl;
// Run-time modification of pipeline
if (adaptor_.valid())
{
// Run-time modification of pipeline
adaptor_().reset(scripts_);
}
......
......@@ -52,6 +52,7 @@ Usage
Property | Description | Required | Default
type | catalyst | yes |
log | report extra information | no | false
mkdir | initial directory to create | no |
region | | no | region0
regions | wordRe list of regions | no |
fields | wordRe list of fields | yes |
......@@ -131,8 +132,13 @@ protected:
// Protected Member Functions
//- Common boilerplate settings
bool readBasics(const dictionary& dict);
//- On movement
void updateState(polyMesh::readUpdateState state);
//- No copy construct
catalystFvMesh(const catalystFvMesh&) = delete;
......
......@@ -7,7 +7,6 @@ cleanTimeDirectories
cleanFaMesh
rm -rf processor*
rm -rf insitu
#------------------------------------------------------------------------------
/*--------------------------------*- C++ -*----------------------------------*\
| ========= | |
| \\ / F ield | OpenFOAM: The Open Source CFD Toolbox |
| \\ / O peration | Version: plus |
| \\ / A nd | Web: www.OpenFOAM.com |
| \\/ M anipulation | |
\*---------------------------------------------------------------------------*/
// Insitu processing of finiteArea fields with ParaView Catalyst
type catalyst::area;
libs ("libcatalystFoam.so");
executeControl timeStep;
writeControl none;
// ************************************************************************* //
......@@ -4,15 +4,17 @@ functions
{
catalystArea
{
#include "area.cfg";
// FUTURE: #includeEtc "caseDicts/postProcessing/catalyst/area.cfg"
#includeEtc "caseDicts/postProcessing/catalyst/area.cfg"
// Selected fields (words or regex)
fields ( ".*" );
// Output directory
//mkdir "<case>/images";
scripts
(
"$FOAM_CASE/system/scripts/writeArea.py"
"<system>/scripts/writeArea.py"
);
}
}
......
......@@ -3,14 +3,6 @@ cd ${0%/*} || exit 1 # Run from this directory
. $WM_PROJECT_DIR/bin/tools/CleanFunctions # Tutorial clean functions
cleanCase0
rm -rf VTK
rm -rf constant/cellToRegion
rm -rf constant/*/polyMesh
# Remove ParaView Catalyst output
rm -rf insitu
# Remove ADIOS output
rm -rf adiosData
# -----------------------------------------------------------------------------
......@@ -4,8 +4,7 @@ functions
{
catalyst
{
#include "catalyst.cfg";
// FUTURE: #includeEtc "caseDicts/postProcessing/catalyst/default.cfg"
#includeEtc "caseDicts/postProcessing/catalyst/default.cfg"
// All regions
regions (".*");
......@@ -15,8 +14,8 @@ functions
scripts
(
"$FOAM_CASE/system/scripts/slice1.py"
"$FOAM_CASE/system/scripts/writePatches.py"
"<system>/scripts/slice1.py"
"<system>/scripts/writePatches.py"
);
}
}
......
/*--------------------------------*- C++ -*----------------------------------*\
| ========= | |
| \\ / F ield | OpenFOAM: The Open Source CFD Toolbox |
| \\ / O peration | Version: plus |
| \\ / A nd | Web: www.OpenFOAM.com |
| \\/ M anipulation | |
\*---------------------------------------------------------------------------*/
// Insitu processing of finiteVolume fields with ParaView Catalyst
type catalyst;
libs ("libcatalystFoam.so");
executeControl timeStep;
writeControl none;
// ************************************************************************* //
from paraview.simple import *
from paraview import coprocessing
# ----------------------- CoProcessor definition -----------------------
def CreateCoProcessor():
def _CreatePipeline(coprocessor, datadescription):
class Pipeline:
input1 = coprocessor.CreateProducer(datadescription, 'mesh')
writer1 = servermanager.writers.XMLMultiBlockDataWriter(Input=input1)
coprocessor.RegisterWriter(writer1, filename='insitu/mesh_%t.vtm', freq=2)
return Pipeline()
class CoProcessor(coprocessing.CoProcessor):
def CreatePipeline(self, datadescription):
self.Pipeline = _CreatePipeline(self, datadescription)
coprocessor = CoProcessor()
freqs = {'mesh': [10]}
coprocessor.SetUpdateFrequencies(freqs)
return coprocessor
#--------------------------------------------------------------
# Global variables that will hold the pipeline for each timestep
# Creating the CoProcessor object, doesn't actually create the ParaView pipeline.
# It will be automatically setup when coprocessor.UpdateProducers() is called the
# first time.
coprocessor = CreateCoProcessor()
#--------------------------------------------------------------
# Enable Live-Visualizaton with ParaView
coprocessor.EnableLiveVisualization(True)
# ---------------------- Data Selection method ----------------------
def RequestDataDescription(datadescription):
'Callback to populate the request for current timestep'
global coprocessor
if datadescription.GetForceOutput() == True:
# We are just going to request all fields and meshes from the simulation
# code/adaptor.
for i in range(datadescription.GetNumberOfInputDescriptions()):
datadescription.GetInputDescription(i).AllFieldsOn()
datadescription.GetInputDescription(i).GenerateMeshOn()
return
# setup requests for all inputs based on the requirements of the
# pipeline.
coprocessor.LoadRequestedData(datadescription)
# ------------------------ Processing method ------------------------
def DoCoProcessing(datadescription):
'Callback to do co-processing for current timestep'
global coprocessor
# Update the coprocessor by providing it the newly generated simulation data.
# If the pipeline hasn't been setup yet, this will setup the pipeline.
coprocessor.UpdateProducers(datadescription)
# Write output data, if appropriate.
coprocessor.WriteData(datadescription);
# Write image capture (Last arg: rescale lookup table), if appropriate.
coprocessor.WriteImages(datadescription, rescale_lookuptable=False)
# Live Visualization, if enabled.
coprocessor.DoLiveVisualization(datadescription, 'localhost', 22222)
from paraview.simple import *
from paraview import coprocessing
# ----------------------- CoProcessor definition -----------------------
def CreateCoProcessor():
def _CreatePipeline(coprocessor, datadescription):
class Pipeline:
input1 = coprocessor.CreateProducer(datadescription, 'patches')
writer1 = servermanager.writers.XMLMultiBlockDataWriter(Input=input1)
coprocessor.RegisterWriter(writer1, filename='insitu/patches_%t.vtm', freq=2)
return Pipeline()
class CoProcessor(coprocessing.CoProcessor):
def CreatePipeline(self, datadescription):
self.Pipeline = _CreatePipeline(self, datadescription)
coprocessor = CoProcessor()
freqs = {'patches': [10]}
coprocessor.SetUpdateFrequencies(freqs)
return coprocessor
#--------------------------------------------------------------
# Global variables that will hold the pipeline for each timestep
# Creating the CoProcessor object, doesn't actually create the ParaView pipeline.
# It will be automatically setup when coprocessor.UpdateProducers() is called the
# first time.
coprocessor = CreateCoProcessor()
#--------------------------------------------------------------
# Enable Live-Visualizaton with ParaView
coprocessor.EnableLiveVisualization(True)
# ---------------------- Data Selection method ----------------------
def RequestDataDescription(datadescription):
'Callback to populate the request for current timestep'
global coprocessor
if datadescription.GetForceOutput() == True:
# We are just going to request all fields and meshes from the simulation
# code/adaptor.
for i in range(datadescription.GetNumberOfInputDescriptions()):