diff --git a/etc/allinputsgridwriter.py b/etc/allinputsgridwriter.py index 88cdcdcf41590b06c723c8b53554766e31c48f39..fcd0c77826cb0365547fd59f5b842ac3d42bc9d6 100644 --- a/etc/allinputsgridwriter.py +++ b/etc/allinputsgridwriter.py @@ -40,7 +40,7 @@ def CreateCoProcessor(): print("Don't know how to create a writer for a ", grid.GetClassName()) if extension: - coprocessor.RegisterWriter(writer, filename='insitu/'+name+'_%t'+extension, freq=outputfrequency) + coprocessor.RegisterWriter(writer, filename=name+'_%t'+extension, freq=outputfrequency) return Pipeline() diff --git a/etc/printChannels.py b/etc/printChannels.py new file mode 100644 index 0000000000000000000000000000000000000000..8fcb4472e7cf15c9e3db16f474c7a6fea2a80892 --- /dev/null +++ b/etc/printChannels.py @@ -0,0 +1,66 @@ +from paraview.simple import * +from paraview import coprocessing + +# The frequency to output everything +outputfrequency = 1 + +# ----------------------- CoProcessor definition ----------------------- + +def CreateCoProcessor(): + def _CreatePipeline(coprocessor, datadescription): + class Pipeline: + for i in range(datadescription.GetNumberOfInputDescriptions()): + name = datadescription.GetInputDescriptionName(i) + adaptorinput = coprocessor.CreateProducer(datadescription, name) + grid = adaptorinput.GetClientSideObject().GetOutputDataObject(0) + print "Channel <" + name + "> is a ", grid.GetClassName() + + return Pipeline() + + class CoProcessor(coprocessing.CoProcessor): + def CreatePipeline(self, datadescription): + self.Pipeline = _CreatePipeline(self, datadescription) + + return CoProcessor() + +#-------------------------------------------------------------- +# Global variables that will hold the pipeline for each timestep +# Creating the CoProcessor object, doesn't actually create the ParaView pipeline. +# It will be automatically setup when coprocessor.UpdateProducers() is called the +# first time. +coprocessor = CreateCoProcessor() + +#-------------------------------------------------------------- +# Enable Live-Visualizaton with ParaView +coprocessor.EnableLiveVisualization(False) + + +# ---------------------- Data Selection method ---------------------- + +def RequestDataDescription(datadescription): + "Callback to populate the request for current timestep" + global coprocessor + if datadescription.GetForceOutput() == True or datadescription.GetTimeStep() % outputfrequency == 0: + # We are just going to request all fields and meshes from the simulation + # code/adaptor. + for i in range(datadescription.GetNumberOfInputDescriptions()): + datadescription.GetInputDescription(i).AllFieldsOn() + datadescription.GetInputDescription(i).GenerateMeshOn() + return + + # setup requests for all inputs based on the requirements of the + # pipeline. + coprocessor.LoadRequestedData(datadescription) + +# ------------------------ Processing method ------------------------ + +def DoCoProcessing(datadescription): + "Callback to do co-processing for current timestep" + global coprocessor + + # Update the coprocessor by providing it the newly generated simulation data. + # If the pipeline hasn't been setup yet, this will setup the pipeline. + coprocessor.UpdateProducers(datadescription) + + # Write output data, if appropriate. + coprocessor.WriteData(datadescription); diff --git a/etc/writeArea.py b/etc/writeArea.py index b94085ca56626336777916e612f0f2154b89bf4f..1911a9da9fc2d9cd404cbe5cb0fdb9cff5290295 100644 --- a/etc/writeArea.py +++ b/etc/writeArea.py @@ -9,7 +9,7 @@ def CreateCoProcessor(): input1 = coprocessor.CreateProducer(datadescription, 'input') writer1 = servermanager.writers.XMLMultiBlockDataWriter(Input=input1) - coprocessor.RegisterWriter(writer1, filename='insitu/area_%t.vtm', freq=2) + coprocessor.RegisterWriter(writer1, filename='area_%t.vtm', freq=2) return Pipeline() diff --git a/etc/writeCloud.py b/etc/writeCloud.py index 73512ec053dbcd72980165d7ee0221c05dcc10d7..db348ea49c18683d31f030958c1237c52921fdb6 100644 --- a/etc/writeCloud.py +++ b/etc/writeCloud.py @@ -10,7 +10,7 @@ def CreateCoProcessor(): writer1 = servermanager.writers.XMLMultiBlockDataWriter(Input=input1) # register writer with coprocessor, with filename + output freq - coprocessor.RegisterWriter(writer1, filename='insitu/cloud_%t.vtm', freq=10) + coprocessor.RegisterWriter(writer1, filename='cloud_%t.vtm', freq=10) return Pipeline() diff --git a/etc/writePatches.py b/etc/writePatches.py index 92b8c9f374e46b2ebcda93d2f7e27cd15fa1fb13..067e41d6d12993ffcc68e1e7946d4a3159366e33 100644 --- a/etc/writePatches.py +++ b/etc/writePatches.py @@ -9,7 +9,7 @@ def CreateCoProcessor(): input1 = coprocessor.CreateProducer(datadescription, 'patches') writer1 = servermanager.writers.XMLMultiBlockDataWriter(Input=input1) - coprocessor.RegisterWriter(writer1, filename='insitu/patches_%t.vtm', freq=2) + coprocessor.RegisterWriter(writer1, filename='patches_%t.vtm', freq=2) return Pipeline() diff --git a/src/catalyst/CMakeLists-Common.txt b/src/catalyst/CMakeLists-Common.txt index dc47eb6ecf234c651de17d663ffab18d556bd9fd..4ee2eef7b5cddfed78ff1d5ff72dd70635385bbb 100644 --- a/src/catalyst/CMakeLists-Common.txt +++ b/src/catalyst/CMakeLists-Common.txt @@ -35,6 +35,26 @@ set(CMAKE_CXX_FLAGS_RELEASE "-O3 -std=c++11 -Wall -Wextra -Wno-unused-parameter -Wnon-virtual-dtor -Wno-overloaded-virtual") set(CMAKE_C_FLAGS_RELEASE "-O3 -std=c++11") +# Some characteristics +set(test_file ${CMAKE_CURRENT_BINARY_DIR}/check_initialize.cxx) +file(WRITE ${test_file} + "#include <vtkCPProcessor.h>\n" + "int main() {\n" + " vtkCPProcessor* p = vtkCPProcessor::New();\n" + " p->Initialize(\"AAA\");\n" + " p->Delete();\n" + " return 0;\n" + "}") +try_compile(CATALYST_HAS_WORKING_DIRECTORY + ${CMAKE_CURRENT_BINARY_DIR} ${test_file} + LINK_LIBRARIES vtkPVPythonCatalyst + CMAKE_FLAGS "-DINCLUDE_DIRECTORIES=${PARAVIEW_INCLUDE_DIRS}" + ) +if (CATALYST_HAS_WORKING_DIRECTORY) + add_definitions(-DUSE_CATALYST_WORKING_DIRECTORY) +endif() + + # Set output library destination to OpenFOAM library directory set(LIBRARY_OUTPUT_PATH $ENV{FOAM_LIBBIN} CACHE INTERNAL diff --git a/src/catalyst/areaMesh/catalystFaMesh.C b/src/catalyst/areaMesh/catalystFaMesh.C index 68c4d464ca4de5e6ac075b1054211d19eea31e24..02f8760df165f137627db3a38afa504b6b464ad7 100644 --- a/src/catalyst/areaMesh/catalystFaMesh.C +++ b/src/catalyst/areaMesh/catalystFaMesh.C @@ -56,17 +56,34 @@ bool Foam::functionObjects::catalystFaMesh::readBasics(const dictionary& dict) catalystCoprocess::debug = debugLevel; } - fileName outputDir; - if (dict.readIfPresent("mkdir", outputDir)) + if (Pstream::master()) { - outputDir.expand(); - outputDir.clean(); - Foam::mkDir(outputDir); + fileName dir; + if (dict.readIfPresent("mkdir", dir)) + { + dir.expand(); + dir.clean(); + } + Foam::mkDir(dir); + } + + dict.readIfPresent("outputDir", outputDir_); + outputDir_.expand(); + outputDir_.clean(); + if (Pstream::master()) + { + Foam::mkDir(outputDir_); } dict.lookup("scripts") >> scripts_; // Python scripts catalystCoprocess::expand(scripts_, dict); // Expand and check availability + if (adaptor_.valid()) + { + // Run-time modification of pipeline + adaptor_().reset(outputDir_, scripts_); + } + return true; } @@ -106,12 +123,13 @@ Foam::functionObjects::catalystFaMesh::catalystFaMesh ) : fvMeshFunctionObject(name, runTime, dict), + outputDir_("<case>/insitu"), + scripts_(), + adaptor_(), selectAreas_(), selectFields_(), - scripts_(), meshes_(), - backends_(), - adaptor_() + backends_() { if (postProcess) { @@ -171,18 +189,12 @@ bool Foam::functionObjects::catalystFaMesh::read(const dictionary& dict) dict.lookup("fields") >> selectFields_; - Info<< type() << " " << name() << ":" << nl <<" areas " << flatOutput(selectAreas_) << nl <<" meshes " << flatOutput(meshes_.sortedToc()) << nl <<" fields " << flatOutput(selectFields_) << nl <<" scripts " << scripts_ << nl; - if (adaptor_.valid()) - { - // Run-time modification of pipeline - adaptor_().reset(scripts_); - } // Ensure consistency - only retain backends with corresponding mesh region backends_.retain(meshes_); @@ -219,7 +231,7 @@ bool Foam::functionObjects::catalystFaMesh::execute() if (updateAdaptor && !adaptor_.valid()) { adaptor_.reset(new catalystCoprocess()); - adaptor_().reset(scripts_); + adaptor_().reset(outputDir_, scripts_); } } diff --git a/src/catalyst/areaMesh/catalystFaMesh.H b/src/catalyst/areaMesh/catalystFaMesh.H index 07b5b72e995685c77d4b3c4bcb11dc635c556e43..17dc3f3eac4f5b7a66f95dde48d57989ad019999 100644 --- a/src/catalyst/areaMesh/catalystFaMesh.H +++ b/src/catalyst/areaMesh/catalystFaMesh.H @@ -107,24 +107,28 @@ protected: // Protected data + //- The output directory + fileName outputDir_; + + //- Python scripts for the catalyst pipeline + stringList scripts_; + + //- The catalyst coprocess + autoPtr<catalystCoprocess> adaptor_; + + //- Requested names of areas to process wordRes selectAreas_; //- Names of fields to process wordRes selectFields_; - //- Python scripts for the catalyst pipeline - stringList scripts_; - //- Pointers to the requested mesh regions HashTable<const faMesh*> meshes_; //- Backends for OpenFOAM to VTK translation (with internal caching) HashPtrTable<vtk::faMeshAdaptor> backends_; - //- The catalyst coprocess - autoPtr<catalystCoprocess> adaptor_; - // Protected Member Functions diff --git a/src/catalyst/catalystCoprocess.C b/src/catalyst/catalystCoprocess.C index 120859fe88b49908fba2a68ee83972032ba354ba..967fdfb1c06361f0bf0006577df08300f4ad4691 100644 --- a/src/catalyst/catalystCoprocess.C +++ b/src/catalyst/catalystCoprocess.C @@ -222,25 +222,56 @@ void Foam::catalystCoprocess::stop() } -void Foam::catalystCoprocess::reset() +void Foam::catalystCoprocess::reset(const fileName& outputDir) { + #ifdef USE_CATALYST_WORKING_DIRECTORY if (coproc_ == nullptr) { coproc_ = vtkCPProcessor::New(); - coproc_->Initialize(); + coproc_->Initialize(outputDir.c_str()); Info<< "Connecting ParaView Catalyst..." << endl; } else + { + coproc_->RemoveAllPipelines(); + + if (outputDir == coproc_->GetWorkingDirectory()) + { + Info<< "Rebinding ParaView Catalyst..." << endl; + } + else + { + // Changed working directory ... redo everything. + coproc_->Delete(); + coproc_ = nullptr; + + reset(outputDir); + } + } + #else + if (coproc_ == nullptr) + { + coproc_ = vtkCPProcessor::New(); + coproc_->Initialize(); + } + else { coproc_->RemoveAllPipelines(); Info<< "Rebinding ParaView Catalyst..." << endl; } + Info<< " Caution: using current working directory" << nl + << " which may not be the same as the simulation directory" << endl; + #endif } -void Foam::catalystCoprocess::reset(const UList<string>& scripts) +void Foam::catalystCoprocess::reset +( + const fileName& outputDir, + const UList<string>& scripts +) { - reset(); + reset(outputDir); int nscript = 0; for (const auto& script : scripts) diff --git a/src/catalyst/catalystCoprocess.H b/src/catalyst/catalystCoprocess.H index 754b79f40d81f319f2d917e0917e19a3f755d93d..3366a4c8d2b6f71db2f123dfcaf29c5c0c74002b 100644 --- a/src/catalyst/catalystCoprocess.H +++ b/src/catalyst/catalystCoprocess.H @@ -208,12 +208,18 @@ public: //- \return True if the coprocess has been initialized. bool good() const; - //- Reset/initialize pipeline without pipeline scripts. - void reset(); + //- Reset/initialize pipeline with output directory, but without + //- pipeline scripts. + void reset(const fileName& outputDir); - //- Reset/initialize pipeline with python scripts. + //- Reset/initialize pipeline with output directory and with + //- pipeline scripts. // The script names must have already been resolved prior calling this. - void reset(const UList<string>& scripts); + void reset + ( + const fileName& outputDir, + const UList<string>& scripts + ); //- Query the coprocess pipelines if they should be executed at this //- iteration and possibly which fields they require. diff --git a/src/catalyst/cloud/catalystCloud.C b/src/catalyst/cloud/catalystCloud.C index 94934b600c65a39fafc142e29c93bd198b5af9d2..206e769cabad8a85eb0bc138592eaf0e849c5b7a 100644 --- a/src/catalyst/cloud/catalystCloud.C +++ b/src/catalyst/cloud/catalystCloud.C @@ -56,17 +56,34 @@ bool Foam::functionObjects::catalystCloud::readBasics(const dictionary& dict) catalystCoprocess::debug = debugLevel; } - fileName outputDir; - if (dict.readIfPresent("mkdir", outputDir)) + if (Pstream::master()) { - outputDir.expand(); - outputDir.clean(); - Foam::mkDir(outputDir); + fileName dir; + if (dict.readIfPresent("mkdir", dir)) + { + dir.expand(); + dir.clean(); + } + Foam::mkDir(dir); + } + + dict.readIfPresent("outputDir", outputDir_); + outputDir_.expand(); + outputDir_.clean(); + if (Pstream::master()) + { + Foam::mkDir(outputDir_); } dict.lookup("scripts") >> scripts_; // Python scripts catalystCoprocess::expand(scripts_, dict); // Expand and check availability + if (adaptor_.valid()) + { + // Run-time modification of pipeline + adaptor_().reset(outputDir_, scripts_); + } + return true; } @@ -81,9 +98,11 @@ Foam::functionObjects::catalystCloud::catalystCloud ) : fvMeshFunctionObject(name, runTime, dict), + outputDir_("<case>/insitu"), + scripts_(), + adaptor_(), selectClouds_(), - selectFields_(), - adaptor_() + selectFields_() { if (postProcess) { @@ -124,18 +143,11 @@ bool Foam::functionObjects::catalystCloud::read(const dictionary& dict) selectFields_.clear(); dict.readIfPresent("fields", selectFields_); - Info<< type() << " " << name() << ":" << nl <<" clouds " << flatOutput(selectClouds_) << nl <<" fields " << flatOutput(selectFields_) << nl <<" scripts " << scripts_ << nl; - if (adaptor_.valid()) - { - // Run-time modification of pipeline - adaptor_().reset(scripts_); - } - return true; } @@ -154,7 +166,7 @@ bool Foam::functionObjects::catalystCloud::execute() if (!adaptor_.valid()) { adaptor_.reset(new catalystCoprocess()); - adaptor_().reset(scripts_); + adaptor_().reset(outputDir_, scripts_); } } diff --git a/src/catalyst/cloud/catalystCloud.H b/src/catalyst/cloud/catalystCloud.H index fb31f281ca610b8d5baddc88af77402007fb2f2d..1f81166bbdb43983524878ec66a511806ea7b3cc 100644 --- a/src/catalyst/cloud/catalystCloud.H +++ b/src/catalyst/cloud/catalystCloud.H @@ -108,11 +108,8 @@ protected: // Protected data - //- Requested names of clouds to process - wordRes selectClouds_; - - //- Subset of cloud fields to process - wordRes selectFields_; + //- The output directory + fileName outputDir_; //- Python scripts for the catalyst pipeline stringList scripts_; @@ -121,6 +118,13 @@ protected: autoPtr<catalystCoprocess> adaptor_; + //- Requested names of clouds to process + wordRes selectClouds_; + + //- Subset of cloud fields to process + wordRes selectFields_; + + // Protected Member Functions //- Common boilerplate settings diff --git a/src/catalyst/volMesh/catalystFvMesh.C b/src/catalyst/volMesh/catalystFvMesh.C index 17e69908fa7f6ac61a0e75581a6881e7be96de49..22e86889c784882a0728a5289f3f9f70c91e04ec 100644 --- a/src/catalyst/volMesh/catalystFvMesh.C +++ b/src/catalyst/volMesh/catalystFvMesh.C @@ -54,17 +54,34 @@ bool Foam::functionObjects::catalystFvMesh::readBasics(const dictionary& dict) catalystCoprocess::debug = debugLevel; } - fileName outputDir; - if (dict.readIfPresent("mkdir", outputDir)) + if (Pstream::master()) { - outputDir.expand(); - outputDir.clean(); - Foam::mkDir(outputDir); + fileName dir; + if (dict.readIfPresent("mkdir", dir)) + { + dir.expand(); + dir.clean(); + } + Foam::mkDir(dir); + } + + dict.readIfPresent("outputDir", outputDir_); + outputDir_.expand(); + outputDir_.clean(); + if (Pstream::master()) + { + Foam::mkDir(outputDir_); } dict.lookup("scripts") >> scripts_; // Python scripts catalystCoprocess::expand(scripts_, dict); // Expand and check availability + if (adaptor_.valid()) + { + // Run-time modification of pipeline + adaptor_().reset(outputDir_, scripts_); + } + return true; } @@ -105,12 +122,13 @@ Foam::functionObjects::catalystFvMesh::catalystFvMesh : functionObject(name), time_(runTime), + outputDir_("<case>/insitu"), + scripts_(), + adaptor_(), selectRegions_(), selectFields_(), - scripts_(), meshes_(), - backends_(), - adaptor_() + backends_() { if (postProcess) { @@ -135,25 +153,7 @@ Foam::functionObjects::catalystFvMesh::~catalystFvMesh() bool Foam::functionObjects::catalystFvMesh::read(const dictionary& dict) { functionObject::read(dict); - - // Common settings - int debugLevel = 0; - if (dict.readIfPresent("debug", debugLevel)) - { - catalystCoprocess::debug = debugLevel; - } - - fileName outputDir; - if (dict.readIfPresent("mkdir", outputDir)) - { - outputDir.expand(); - outputDir.clean(); - Foam::mkDir(outputDir); - } - - dict.lookup("scripts") >> scripts_; // Python scripts - catalystCoprocess::expand(scripts_, dict); // Expand and check availability - + readBasics(dict); // All possible meshes meshes_ = time_.lookupClass<fvMesh>(); @@ -173,19 +173,12 @@ bool Foam::functionObjects::catalystFvMesh::read(const dictionary& dict) dict.lookup("fields") >> selectFields_; - Info<< type() << " " << name() << ":" << nl <<" regions " << flatOutput(selectRegions_) << nl <<" meshes " << flatOutput(meshes_.sortedToc()) << nl <<" fields " << flatOutput(selectFields_) << nl <<" scripts " << scripts_ << nl; - if (adaptor_.valid()) - { - // Run-time modification of pipeline - adaptor_().reset(scripts_); - } - // Ensure consistency - only retain backends with corresponding mesh region backends_.retain(meshes_); @@ -221,7 +214,7 @@ bool Foam::functionObjects::catalystFvMesh::execute() if (updateAdaptor && !adaptor_.valid()) { adaptor_.reset(new catalystCoprocess()); - adaptor_().reset(scripts_); + adaptor_().reset(outputDir_, scripts_); } } diff --git a/src/catalyst/volMesh/catalystFvMesh.H b/src/catalyst/volMesh/catalystFvMesh.H index a5a264bcf1569ecbe8728f4824c43d46922c7f6a..3a59ef7e9ef635b7480110d991550e56209bbc53 100644 --- a/src/catalyst/volMesh/catalystFvMesh.H +++ b/src/catalyst/volMesh/catalystFvMesh.H @@ -111,24 +111,28 @@ protected: //- Reference to the time database const Time& time_; + //- The output directory + fileName outputDir_; + + //- Python scripts for the catalyst pipeline + stringList scripts_; + + //- The catalyst coprocess + autoPtr<catalystCoprocess> adaptor_; + + //- Requested names of regions to process wordRes selectRegions_; //- Names of fields to process wordRes selectFields_; - //- Python scripts for the catalyst pipeline - stringList scripts_; - //- Pointers to the requested mesh regions HashTable<const fvMesh*> meshes_; //- Backends for OpenFOAM to VTK translation (with internal caching) HashPtrTable<vtk::fvMeshAdaptor> backends_; - //- The catalyst coprocess - autoPtr<catalystCoprocess> adaptor_; - // Protected Member Functions diff --git a/tutorials/finiteArea/sphereSurfactantFoam/sphereTransport/system/scripts/writeArea.py b/tutorials/finiteArea/sphereSurfactantFoam/sphereTransport/system/scripts/writeArea.py index b94085ca56626336777916e612f0f2154b89bf4f..1911a9da9fc2d9cd404cbe5cb0fdb9cff5290295 100644 --- a/tutorials/finiteArea/sphereSurfactantFoam/sphereTransport/system/scripts/writeArea.py +++ b/tutorials/finiteArea/sphereSurfactantFoam/sphereTransport/system/scripts/writeArea.py @@ -9,7 +9,7 @@ def CreateCoProcessor(): input1 = coprocessor.CreateProducer(datadescription, 'input') writer1 = servermanager.writers.XMLMultiBlockDataWriter(Input=input1) - coprocessor.RegisterWriter(writer1, filename='insitu/area_%t.vtm', freq=2) + coprocessor.RegisterWriter(writer1, filename='area_%t.vtm', freq=2) return Pipeline() diff --git a/tutorials/heatTransfer/chtMultiRegionFoam/multiRegionHeater/system/scripts/slice1.py b/tutorials/heatTransfer/chtMultiRegionFoam/multiRegionHeater/system/scripts/slice1.py index 8237977bba693447cdf277cd7381bcee868bbd02..864dba02aebfea515fb6ae4114a70134cc31f386 100644 --- a/tutorials/heatTransfer/chtMultiRegionFoam/multiRegionHeater/system/scripts/slice1.py +++ b/tutorials/heatTransfer/chtMultiRegionFoam/multiRegionHeater/system/scripts/slice1.py @@ -18,14 +18,14 @@ def CreateCoProcessor(): # register the writer with coprocessor # and provide it with information such as the filename to use, # how frequently to write the data, etc. - coprocessor.RegisterWriter(sliceWriter, filename='insitu/slice_%t.vtm', freq=10) + coprocessor.RegisterWriter(sliceWriter, filename='slice_%t.vtm', freq=10) meshWriter = servermanager.writers.XMLMultiBlockDataWriter(Input=mesh) # register the writer with coprocessor # and provide it with information such as the filename to use, # how frequently to write the data, etc. - coprocessor.RegisterWriter(meshWriter, filename='insitu/mesh_%t.vtm', freq=100) + coprocessor.RegisterWriter(meshWriter, filename='mesh_%t.vtm', freq=100) return Pipeline() diff --git a/tutorials/heatTransfer/chtMultiRegionFoam/multiRegionHeater/system/scripts/writeMesh.py b/tutorials/heatTransfer/chtMultiRegionFoam/multiRegionHeater/system/scripts/writeMesh.py index 43533a935f14b459731a657bc8f9401ef7b3eabc..c40f022235937f83e1d9cf8adcf010c245bfe3e3 100644 --- a/tutorials/heatTransfer/chtMultiRegionFoam/multiRegionHeater/system/scripts/writeMesh.py +++ b/tutorials/heatTransfer/chtMultiRegionFoam/multiRegionHeater/system/scripts/writeMesh.py @@ -9,7 +9,7 @@ def CreateCoProcessor(): input1 = coprocessor.CreateProducer(datadescription, 'mesh') writer1 = servermanager.writers.XMLMultiBlockDataWriter(Input=input1) - coprocessor.RegisterWriter(writer1, filename='insitu/mesh_%t.vtm', freq=2) + coprocessor.RegisterWriter(writer1, filename='mesh_%t.vtm', freq=2) return Pipeline() diff --git a/tutorials/heatTransfer/chtMultiRegionFoam/multiRegionHeater/system/scripts/writePatches.py b/tutorials/heatTransfer/chtMultiRegionFoam/multiRegionHeater/system/scripts/writePatches.py index 92b8c9f374e46b2ebcda93d2f7e27cd15fa1fb13..067e41d6d12993ffcc68e1e7946d4a3159366e33 100644 --- a/tutorials/heatTransfer/chtMultiRegionFoam/multiRegionHeater/system/scripts/writePatches.py +++ b/tutorials/heatTransfer/chtMultiRegionFoam/multiRegionHeater/system/scripts/writePatches.py @@ -9,7 +9,7 @@ def CreateCoProcessor(): input1 = coprocessor.CreateProducer(datadescription, 'patches') writer1 = servermanager.writers.XMLMultiBlockDataWriter(Input=input1) - coprocessor.RegisterWriter(writer1, filename='insitu/patches_%t.vtm', freq=2) + coprocessor.RegisterWriter(writer1, filename='patches_%t.vtm', freq=2) return Pipeline() diff --git a/tutorials/incompressible/icoFoam/cavity/system/scripts/slice1.py b/tutorials/incompressible/icoFoam/cavity/system/scripts/slice1.py index 34c06e7a5f0088b21c627c2e67b61e8d010db4a4..39d7d28f6bd40ed6b441fd8db2322349b48c9951 100644 --- a/tutorials/incompressible/icoFoam/cavity/system/scripts/slice1.py +++ b/tutorials/incompressible/icoFoam/cavity/system/scripts/slice1.py @@ -18,7 +18,7 @@ def CreateCoProcessor(): # register the writer with coprocessor # and provide it with information such as the filename to use, # how frequently to write the data, etc. - coprocessor.RegisterWriter(sliceWriter, filename='insitu/slice_%t.vtm', freq=3) + coprocessor.RegisterWriter(sliceWriter, filename='slice_%t.vtm', freq=3) # create a new 'Parallel UnstructuredGrid Writer' meshWriter = servermanager.writers.XMLMultiBlockDataWriter(Input=input1) @@ -26,7 +26,7 @@ def CreateCoProcessor(): # register the writer with coprocessor # and provide it with information such as the filename to use, # how frequently to write the data, etc. - coprocessor.RegisterWriter(meshWriter, filename='insitu/fullgrid_%t.vtm', freq=10) + coprocessor.RegisterWriter(meshWriter, filename='fullgrid_%t.vtm', freq=10) return Pipeline() diff --git a/tutorials/incompressible/icoFoam/cavity/system/scripts/writeMesh.py b/tutorials/incompressible/icoFoam/cavity/system/scripts/writeMesh.py index a8f6236b42f8b849a17fd80fa8d2619c3b225daa..576083955b3e07afc416ebc62e7a2d1f653ce11d 100644 --- a/tutorials/incompressible/icoFoam/cavity/system/scripts/writeMesh.py +++ b/tutorials/incompressible/icoFoam/cavity/system/scripts/writeMesh.py @@ -9,11 +9,11 @@ def CreateCoProcessor(): input1 = coprocessor.CreateProducer(datadescription, "mesh") writer1 = servermanager.writers.XMLMultiBlockDataWriter(Input=input1) # Register with filename to use, output frequency - coprocessor.RegisterWriter(writer1, filename='insitu/mesh_%t.vtm', freq=2) + coprocessor.RegisterWriter(writer1, filename='mesh_%t.vtm', freq=2) # input2 = coprocessor.CreateProducer(datadescription, "patches") # writer2 = servermanager.writers.XMLMultiBlockDataWriter(Input=input2) - # coprocessor.RegisterWriter(writer2, filename='insitu/patches_%t.vtm', freq=2) + # coprocessor.RegisterWriter(writer2, filename='patches_%t.vtm', freq=2) return Pipeline() diff --git a/tutorials/lagrangian/coalChemistryFoam/simplifiedSiwek/system/scripts/writeCloud.py b/tutorials/lagrangian/coalChemistryFoam/simplifiedSiwek/system/scripts/writeCloud.py index 73512ec053dbcd72980165d7ee0221c05dcc10d7..db348ea49c18683d31f030958c1237c52921fdb6 100644 --- a/tutorials/lagrangian/coalChemistryFoam/simplifiedSiwek/system/scripts/writeCloud.py +++ b/tutorials/lagrangian/coalChemistryFoam/simplifiedSiwek/system/scripts/writeCloud.py @@ -10,7 +10,7 @@ def CreateCoProcessor(): writer1 = servermanager.writers.XMLMultiBlockDataWriter(Input=input1) # register writer with coprocessor, with filename + output freq - coprocessor.RegisterWriter(writer1, filename='insitu/cloud_%t.vtm', freq=10) + coprocessor.RegisterWriter(writer1, filename='cloud_%t.vtm', freq=10) return Pipeline() diff --git a/tutorials/lagrangian/coalChemistryFoam/simplifiedSiwek/system/scripts/writeMesh.py b/tutorials/lagrangian/coalChemistryFoam/simplifiedSiwek/system/scripts/writeMesh.py new file mode 100644 index 0000000000000000000000000000000000000000..c40f022235937f83e1d9cf8adcf010c245bfe3e3 --- /dev/null +++ b/tutorials/lagrangian/coalChemistryFoam/simplifiedSiwek/system/scripts/writeMesh.py @@ -0,0 +1,70 @@ +from paraview.simple import * +from paraview import coprocessing + +# ----------------------- CoProcessor definition ----------------------- + +def CreateCoProcessor(): + def _CreatePipeline(coprocessor, datadescription): + class Pipeline: + input1 = coprocessor.CreateProducer(datadescription, 'mesh') + writer1 = servermanager.writers.XMLMultiBlockDataWriter(Input=input1) + + coprocessor.RegisterWriter(writer1, filename='mesh_%t.vtm', freq=2) + + return Pipeline() + + class CoProcessor(coprocessing.CoProcessor): + def CreatePipeline(self, datadescription): + self.Pipeline = _CreatePipeline(self, datadescription) + + coprocessor = CoProcessor() + freqs = {'mesh': [10]} + coprocessor.SetUpdateFrequencies(freqs) + return coprocessor + +#-------------------------------------------------------------- +# Global variables that will hold the pipeline for each timestep +# Creating the CoProcessor object, doesn't actually create the ParaView pipeline. +# It will be automatically setup when coprocessor.UpdateProducers() is called the +# first time. +coprocessor = CreateCoProcessor() + +#-------------------------------------------------------------- +# Enable Live-Visualizaton with ParaView +coprocessor.EnableLiveVisualization(True) + +# ---------------------- Data Selection method ---------------------- + +def RequestDataDescription(datadescription): + 'Callback to populate the request for current timestep' + global coprocessor + if datadescription.GetForceOutput() == True: + # We are just going to request all fields and meshes from the simulation + # code/adaptor. + for i in range(datadescription.GetNumberOfInputDescriptions()): + datadescription.GetInputDescription(i).AllFieldsOn() + datadescription.GetInputDescription(i).GenerateMeshOn() + return + + # setup requests for all inputs based on the requirements of the + # pipeline. + coprocessor.LoadRequestedData(datadescription) + +# ------------------------ Processing method ------------------------ + +def DoCoProcessing(datadescription): + 'Callback to do co-processing for current timestep' + global coprocessor + + # Update the coprocessor by providing it the newly generated simulation data. + # If the pipeline hasn't been setup yet, this will setup the pipeline. + coprocessor.UpdateProducers(datadescription) + + # Write output data, if appropriate. + coprocessor.WriteData(datadescription); + + # Write image capture (Last arg: rescale lookup table), if appropriate. + coprocessor.WriteImages(datadescription, rescale_lookuptable=False) + + # Live Visualization, if enabled. + coprocessor.DoLiveVisualization(datadescription, 'localhost', 22222)