From a03e77a889b720ab19cbcfdec6918cf61e4f6993 Mon Sep 17 00:00:00 2001
From: Mark Olesen <Mark.Olesen@esi-group.com>
Date: Tue, 8 May 2018 13:18:38 +0200
Subject: [PATCH] ENH: use Catalyst Initialize with working directory (issue
 #4)

- reflects current change being made to the vtkCPProcessor API to
  include chdir capabilities.
---
 etc/allinputsgridwriter.py                    |  2 +-
 etc/printChannels.py                          | 66 +++++++++++++++++
 etc/writeArea.py                              |  2 +-
 etc/writeCloud.py                             |  2 +-
 etc/writePatches.py                           |  2 +-
 src/catalyst/CMakeLists-Common.txt            | 20 ++++++
 src/catalyst/areaMesh/catalystFaMesh.C        | 42 +++++++----
 src/catalyst/areaMesh/catalystFaMesh.H        | 16 +++--
 src/catalyst/catalystCoprocess.C              | 39 +++++++++--
 src/catalyst/catalystCoprocess.H              | 14 ++--
 src/catalyst/cloud/catalystCloud.C            | 42 +++++++----
 src/catalyst/cloud/catalystCloud.H            | 14 ++--
 src/catalyst/volMesh/catalystFvMesh.C         | 63 ++++++++---------
 src/catalyst/volMesh/catalystFvMesh.H         | 16 +++--
 .../system/scripts/writeArea.py               |  2 +-
 .../system/scripts/slice1.py                  |  4 +-
 .../system/scripts/writeMesh.py               |  2 +-
 .../system/scripts/writePatches.py            |  2 +-
 .../icoFoam/cavity/system/scripts/slice1.py   |  4 +-
 .../cavity/system/scripts/writeMesh.py        |  4 +-
 .../system/scripts/writeCloud.py              |  2 +-
 .../system/scripts/writeMesh.py               | 70 +++++++++++++++++++
 22 files changed, 326 insertions(+), 104 deletions(-)
 create mode 100644 etc/printChannels.py
 create mode 100644 tutorials/lagrangian/coalChemistryFoam/simplifiedSiwek/system/scripts/writeMesh.py

diff --git a/etc/allinputsgridwriter.py b/etc/allinputsgridwriter.py
index 88cdcdc..fcd0c77 100644
--- a/etc/allinputsgridwriter.py
+++ b/etc/allinputsgridwriter.py
@@ -40,7 +40,7 @@ def CreateCoProcessor():
           print("Don't know how to create a writer for a ", grid.GetClassName())
 
         if extension:
-          coprocessor.RegisterWriter(writer, filename='insitu/'+name+'_%t'+extension, freq=outputfrequency)
+          coprocessor.RegisterWriter(writer, filename=name+'_%t'+extension, freq=outputfrequency)
 
     return Pipeline()
 
diff --git a/etc/printChannels.py b/etc/printChannels.py
new file mode 100644
index 0000000..8fcb447
--- /dev/null
+++ b/etc/printChannels.py
@@ -0,0 +1,66 @@
+from paraview.simple import *
+from paraview import coprocessing
+
+# The frequency to output everything
+outputfrequency = 1
+
+# ----------------------- CoProcessor definition -----------------------
+
+def CreateCoProcessor():
+  def _CreatePipeline(coprocessor, datadescription):
+    class Pipeline:
+      for i in range(datadescription.GetNumberOfInputDescriptions()):
+        name = datadescription.GetInputDescriptionName(i)
+        adaptorinput = coprocessor.CreateProducer(datadescription, name)
+        grid = adaptorinput.GetClientSideObject().GetOutputDataObject(0)
+        print "Channel <" + name + "> is a ", grid.GetClassName()
+
+    return Pipeline()
+
+  class CoProcessor(coprocessing.CoProcessor):
+    def CreatePipeline(self, datadescription):
+      self.Pipeline = _CreatePipeline(self, datadescription)
+
+  return CoProcessor()
+
+#--------------------------------------------------------------
+# Global variables that will hold the pipeline for each timestep
+# Creating the CoProcessor object, doesn't actually create the ParaView pipeline.
+# It will be automatically setup when coprocessor.UpdateProducers() is called the
+# first time.
+coprocessor = CreateCoProcessor()
+
+#--------------------------------------------------------------
+# Enable Live-Visualizaton with ParaView
+coprocessor.EnableLiveVisualization(False)
+
+
+# ---------------------- Data Selection method ----------------------
+
+def RequestDataDescription(datadescription):
+    "Callback to populate the request for current timestep"
+    global coprocessor
+    if datadescription.GetForceOutput() == True or datadescription.GetTimeStep() % outputfrequency == 0:
+        # We are just going to request all fields and meshes from the simulation
+        # code/adaptor.
+        for i in range(datadescription.GetNumberOfInputDescriptions()):
+            datadescription.GetInputDescription(i).AllFieldsOn()
+            datadescription.GetInputDescription(i).GenerateMeshOn()
+        return
+
+    # setup requests for all inputs based on the requirements of the
+    # pipeline.
+    coprocessor.LoadRequestedData(datadescription)
+
+# ------------------------ Processing method ------------------------
+
+def DoCoProcessing(datadescription):
+    "Callback to do co-processing for current timestep"
+    global coprocessor
+
+    # Update the coprocessor by providing it the newly generated simulation data.
+    # If the pipeline hasn't been setup yet, this will setup the pipeline.
+    coprocessor.UpdateProducers(datadescription)
+
+    # Write output data, if appropriate.
+    coprocessor.WriteData(datadescription);
diff --git a/etc/writeArea.py b/etc/writeArea.py
index b94085c..1911a9d 100644
--- a/etc/writeArea.py
+++ b/etc/writeArea.py
@@ -9,7 +9,7 @@ def CreateCoProcessor():
       input1 = coprocessor.CreateProducer(datadescription, 'input')
       writer1 = servermanager.writers.XMLMultiBlockDataWriter(Input=input1)
 
-      coprocessor.RegisterWriter(writer1, filename='insitu/area_%t.vtm', freq=2)
+      coprocessor.RegisterWriter(writer1, filename='area_%t.vtm', freq=2)
 
     return Pipeline()
 
diff --git a/etc/writeCloud.py b/etc/writeCloud.py
index 73512ec..db348ea 100644
--- a/etc/writeCloud.py
+++ b/etc/writeCloud.py
@@ -10,7 +10,7 @@ def CreateCoProcessor():
       writer1 = servermanager.writers.XMLMultiBlockDataWriter(Input=input1)
 
       # register writer with coprocessor, with filename + output freq
-      coprocessor.RegisterWriter(writer1, filename='insitu/cloud_%t.vtm', freq=10)
+      coprocessor.RegisterWriter(writer1, filename='cloud_%t.vtm', freq=10)
 
     return Pipeline()
 
diff --git a/etc/writePatches.py b/etc/writePatches.py
index 92b8c9f..067e41d 100644
--- a/etc/writePatches.py
+++ b/etc/writePatches.py
@@ -9,7 +9,7 @@ def CreateCoProcessor():
       input1 = coprocessor.CreateProducer(datadescription, 'patches')
       writer1 = servermanager.writers.XMLMultiBlockDataWriter(Input=input1)
 
-      coprocessor.RegisterWriter(writer1, filename='insitu/patches_%t.vtm', freq=2)
+      coprocessor.RegisterWriter(writer1, filename='patches_%t.vtm', freq=2)
 
     return Pipeline()
 
diff --git a/src/catalyst/CMakeLists-Common.txt b/src/catalyst/CMakeLists-Common.txt
index dc47eb6..4ee2eef 100644
--- a/src/catalyst/CMakeLists-Common.txt
+++ b/src/catalyst/CMakeLists-Common.txt
@@ -35,6 +35,26 @@ set(CMAKE_CXX_FLAGS_RELEASE
     "-O3 -std=c++11 -Wall -Wextra -Wno-unused-parameter -Wnon-virtual-dtor -Wno-overloaded-virtual")
 set(CMAKE_C_FLAGS_RELEASE "-O3 -std=c++11")
 
+# Some characteristics
+set(test_file ${CMAKE_CURRENT_BINARY_DIR}/check_initialize.cxx)
+file(WRITE ${test_file}
+  "#include <vtkCPProcessor.h>\n"
+  "int main() {\n"
+  "  vtkCPProcessor* p = vtkCPProcessor::New();\n"
+  "  p->Initialize(\"AAA\");\n"
+  "  p->Delete();\n"
+  "  return 0;\n"
+  "}")
+try_compile(CATALYST_HAS_WORKING_DIRECTORY
+  ${CMAKE_CURRENT_BINARY_DIR} ${test_file}
+  LINK_LIBRARIES vtkPVPythonCatalyst
+  CMAKE_FLAGS "-DINCLUDE_DIRECTORIES=${PARAVIEW_INCLUDE_DIRS}"
+  )
+if (CATALYST_HAS_WORKING_DIRECTORY)
+  add_definitions(-DUSE_CATALYST_WORKING_DIRECTORY)
+endif()
+
+
 # Set output library destination to OpenFOAM library directory
 set(LIBRARY_OUTPUT_PATH $ENV{FOAM_LIBBIN}
     CACHE INTERNAL
diff --git a/src/catalyst/areaMesh/catalystFaMesh.C b/src/catalyst/areaMesh/catalystFaMesh.C
index 68c4d46..02f8760 100644
--- a/src/catalyst/areaMesh/catalystFaMesh.C
+++ b/src/catalyst/areaMesh/catalystFaMesh.C
@@ -56,17 +56,34 @@ bool Foam::functionObjects::catalystFaMesh::readBasics(const dictionary& dict)
         catalystCoprocess::debug = debugLevel;
     }
 
-    fileName outputDir;
-    if (dict.readIfPresent("mkdir", outputDir))
+    if (Pstream::master())
     {
-        outputDir.expand();
-        outputDir.clean();
-        Foam::mkDir(outputDir);
+        fileName dir;
+        if (dict.readIfPresent("mkdir", dir))
+        {
+            dir.expand();
+            dir.clean();
+        }
+        Foam::mkDir(dir);
+    }
+
+    dict.readIfPresent("outputDir", outputDir_);
+    outputDir_.expand();
+    outputDir_.clean();
+    if (Pstream::master())
+    {
+        Foam::mkDir(outputDir_);
     }
 
     dict.lookup("scripts") >> scripts_;         // Python scripts
     catalystCoprocess::expand(scripts_, dict);  // Expand and check availability
 
+    if (adaptor_.valid())
+    {
+        // Run-time modification of pipeline
+        adaptor_().reset(outputDir_, scripts_);
+    }
+
     return true;
 }
 
@@ -106,12 +123,13 @@ Foam::functionObjects::catalystFaMesh::catalystFaMesh
 )
 :
     fvMeshFunctionObject(name, runTime, dict),
+    outputDir_("<case>/insitu"),
+    scripts_(),
+    adaptor_(),
     selectAreas_(),
     selectFields_(),
-    scripts_(),
     meshes_(),
-    backends_(),
-    adaptor_()
+    backends_()
 {
     if (postProcess)
     {
@@ -171,18 +189,12 @@ bool Foam::functionObjects::catalystFaMesh::read(const dictionary& dict)
 
     dict.lookup("fields") >> selectFields_;
 
-
     Info<< type() << " " << name() << ":" << nl
         <<"    areas   " << flatOutput(selectAreas_) << nl
         <<"    meshes  " << flatOutput(meshes_.sortedToc()) << nl
         <<"    fields  " << flatOutput(selectFields_) << nl
         <<"    scripts " << scripts_ << nl;
 
-    if (adaptor_.valid())
-    {
-        // Run-time modification of pipeline
-        adaptor_().reset(scripts_);
-    }
 
     // Ensure consistency - only retain backends with corresponding mesh region
     backends_.retain(meshes_);
@@ -219,7 +231,7 @@ bool Foam::functionObjects::catalystFaMesh::execute()
         if (updateAdaptor && !adaptor_.valid())
         {
             adaptor_.reset(new catalystCoprocess());
-            adaptor_().reset(scripts_);
+            adaptor_().reset(outputDir_, scripts_);
         }
     }
 
diff --git a/src/catalyst/areaMesh/catalystFaMesh.H b/src/catalyst/areaMesh/catalystFaMesh.H
index 07b5b72..17dc3f3 100644
--- a/src/catalyst/areaMesh/catalystFaMesh.H
+++ b/src/catalyst/areaMesh/catalystFaMesh.H
@@ -107,24 +107,28 @@ protected:
 
     // Protected data
 
+        //- The output directory
+        fileName outputDir_;
+
+        //- Python scripts for the catalyst pipeline
+        stringList scripts_;
+
+        //- The catalyst coprocess
+        autoPtr<catalystCoprocess> adaptor_;
+
+
         //- Requested names of areas to process
         wordRes selectAreas_;
 
         //- Names of fields to process
         wordRes selectFields_;
 
-        //- Python scripts for the catalyst pipeline
-        stringList scripts_;
-
         //- Pointers to the requested mesh regions
         HashTable<const faMesh*> meshes_;
 
         //- Backends for OpenFOAM to VTK translation (with internal caching)
         HashPtrTable<vtk::faMeshAdaptor> backends_;
 
-        //- The catalyst coprocess
-        autoPtr<catalystCoprocess> adaptor_;
-
 
     // Protected Member Functions
 
diff --git a/src/catalyst/catalystCoprocess.C b/src/catalyst/catalystCoprocess.C
index 120859f..967fdfb 100644
--- a/src/catalyst/catalystCoprocess.C
+++ b/src/catalyst/catalystCoprocess.C
@@ -222,25 +222,56 @@ void Foam::catalystCoprocess::stop()
 }
 
 
-void Foam::catalystCoprocess::reset()
+void Foam::catalystCoprocess::reset(const fileName& outputDir)
 {
+    #ifdef USE_CATALYST_WORKING_DIRECTORY
     if (coproc_ == nullptr)
     {
         coproc_ = vtkCPProcessor::New();
-        coproc_->Initialize();
+        coproc_->Initialize(outputDir.c_str());
         Info<< "Connecting ParaView Catalyst..." << endl;
     }
     else
+    {
+        coproc_->RemoveAllPipelines();
+
+        if (outputDir == coproc_->GetWorkingDirectory())
+        {
+            Info<< "Rebinding ParaView Catalyst..." << endl;
+        }
+        else
+        {
+            // Changed working directory ... redo everything.
+            coproc_->Delete();
+            coproc_ = nullptr;
+
+            reset(outputDir);
+        }
+    }
+    #else
+    if (coproc_ == nullptr)
+    {
+        coproc_ = vtkCPProcessor::New();
+        coproc_->Initialize();
+    }
+    else
     {
         coproc_->RemoveAllPipelines();
         Info<< "Rebinding ParaView Catalyst..." << endl;
     }
+    Info<< "    Caution: using current working directory" << nl
+        << "    which may not be the same as the simulation directory" << endl;
+    #endif
 }
 
 
-void Foam::catalystCoprocess::reset(const UList<string>& scripts)
+void Foam::catalystCoprocess::reset
+(
+    const fileName& outputDir,
+    const UList<string>& scripts
+)
 {
-    reset();
+    reset(outputDir);
 
     int nscript = 0;
     for (const auto& script : scripts)
diff --git a/src/catalyst/catalystCoprocess.H b/src/catalyst/catalystCoprocess.H
index 754b79f..3366a4c 100644
--- a/src/catalyst/catalystCoprocess.H
+++ b/src/catalyst/catalystCoprocess.H
@@ -208,12 +208,18 @@ public:
         //- \return True if the coprocess has been initialized.
         bool good() const;
 
-        //- Reset/initialize pipeline without pipeline scripts.
-        void reset();
+        //- Reset/initialize pipeline with output directory, but without
+        //- pipeline scripts.
+        void reset(const fileName& outputDir);
 
-        //- Reset/initialize pipeline with python scripts.
+        //- Reset/initialize pipeline with output directory and with
+        //- pipeline scripts.
         //  The script names must have already been resolved prior calling this.
-        void reset(const UList<string>& scripts);
+        void reset
+        (
+            const fileName& outputDir,
+            const UList<string>& scripts
+        );
 
         //- Query the coprocess pipelines if they should be executed at this
         //- iteration and possibly which fields they require.
diff --git a/src/catalyst/cloud/catalystCloud.C b/src/catalyst/cloud/catalystCloud.C
index 94934b6..206e769 100644
--- a/src/catalyst/cloud/catalystCloud.C
+++ b/src/catalyst/cloud/catalystCloud.C
@@ -56,17 +56,34 @@ bool Foam::functionObjects::catalystCloud::readBasics(const dictionary& dict)
         catalystCoprocess::debug = debugLevel;
     }
 
-    fileName outputDir;
-    if (dict.readIfPresent("mkdir", outputDir))
+    if (Pstream::master())
     {
-        outputDir.expand();
-        outputDir.clean();
-        Foam::mkDir(outputDir);
+        fileName dir;
+        if (dict.readIfPresent("mkdir", dir))
+        {
+            dir.expand();
+            dir.clean();
+        }
+        Foam::mkDir(dir);
+    }
+
+    dict.readIfPresent("outputDir", outputDir_);
+    outputDir_.expand();
+    outputDir_.clean();
+    if (Pstream::master())
+    {
+        Foam::mkDir(outputDir_);
     }
 
     dict.lookup("scripts") >> scripts_;         // Python scripts
     catalystCoprocess::expand(scripts_, dict);  // Expand and check availability
 
+    if (adaptor_.valid())
+    {
+        // Run-time modification of pipeline
+        adaptor_().reset(outputDir_, scripts_);
+    }
+
     return true;
 }
 
@@ -81,9 +98,11 @@ Foam::functionObjects::catalystCloud::catalystCloud
 )
 :
     fvMeshFunctionObject(name, runTime, dict),
+    outputDir_("<case>/insitu"),
+    scripts_(),
+    adaptor_(),
     selectClouds_(),
-    selectFields_(),
-    adaptor_()
+    selectFields_()
 {
     if (postProcess)
     {
@@ -124,18 +143,11 @@ bool Foam::functionObjects::catalystCloud::read(const dictionary& dict)
     selectFields_.clear();
     dict.readIfPresent("fields", selectFields_);
 
-
     Info<< type() << " " << name() << ":" << nl
         <<"    clouds  " << flatOutput(selectClouds_) << nl
         <<"    fields  " << flatOutput(selectFields_) << nl
         <<"    scripts " << scripts_ << nl;
 
-    if (adaptor_.valid())
-    {
-        // Run-time modification of pipeline
-        adaptor_().reset(scripts_);
-    }
-
     return true;
 }
 
@@ -154,7 +166,7 @@ bool Foam::functionObjects::catalystCloud::execute()
         if (!adaptor_.valid())
         {
             adaptor_.reset(new catalystCoprocess());
-            adaptor_().reset(scripts_);
+            adaptor_().reset(outputDir_, scripts_);
         }
     }
 
diff --git a/src/catalyst/cloud/catalystCloud.H b/src/catalyst/cloud/catalystCloud.H
index fb31f28..1f81166 100644
--- a/src/catalyst/cloud/catalystCloud.H
+++ b/src/catalyst/cloud/catalystCloud.H
@@ -108,11 +108,8 @@ protected:
 
     // Protected data
 
-        //- Requested names of clouds to process
-        wordRes selectClouds_;
-
-        //- Subset of cloud fields to process
-        wordRes selectFields_;
+        //- The output directory
+        fileName outputDir_;
 
         //- Python scripts for the catalyst pipeline
         stringList scripts_;
@@ -121,6 +118,13 @@ protected:
         autoPtr<catalystCoprocess> adaptor_;
 
 
+        //- Requested names of clouds to process
+        wordRes selectClouds_;
+
+        //- Subset of cloud fields to process
+        wordRes selectFields_;
+
+
     // Protected Member Functions
 
         //- Common boilerplate settings
diff --git a/src/catalyst/volMesh/catalystFvMesh.C b/src/catalyst/volMesh/catalystFvMesh.C
index 17e6990..22e8688 100644
--- a/src/catalyst/volMesh/catalystFvMesh.C
+++ b/src/catalyst/volMesh/catalystFvMesh.C
@@ -54,17 +54,34 @@ bool Foam::functionObjects::catalystFvMesh::readBasics(const dictionary& dict)
         catalystCoprocess::debug = debugLevel;
     }
 
-    fileName outputDir;
-    if (dict.readIfPresent("mkdir", outputDir))
+    if (Pstream::master())
     {
-        outputDir.expand();
-        outputDir.clean();
-        Foam::mkDir(outputDir);
+        fileName dir;
+        if (dict.readIfPresent("mkdir", dir))
+        {
+            dir.expand();
+            dir.clean();
+        }
+        Foam::mkDir(dir);
+    }
+
+    dict.readIfPresent("outputDir", outputDir_);
+    outputDir_.expand();
+    outputDir_.clean();
+    if (Pstream::master())
+    {
+        Foam::mkDir(outputDir_);
     }
 
     dict.lookup("scripts") >> scripts_;         // Python scripts
     catalystCoprocess::expand(scripts_, dict);  // Expand and check availability
 
+    if (adaptor_.valid())
+    {
+        // Run-time modification of pipeline
+        adaptor_().reset(outputDir_, scripts_);
+    }
+
     return true;
 }
 
@@ -105,12 +122,13 @@ Foam::functionObjects::catalystFvMesh::catalystFvMesh
 :
     functionObject(name),
     time_(runTime),
+    outputDir_("<case>/insitu"),
+    scripts_(),
+    adaptor_(),
     selectRegions_(),
     selectFields_(),
-    scripts_(),
     meshes_(),
-    backends_(),
-    adaptor_()
+    backends_()
 {
     if (postProcess)
     {
@@ -135,25 +153,7 @@ Foam::functionObjects::catalystFvMesh::~catalystFvMesh()
 bool Foam::functionObjects::catalystFvMesh::read(const dictionary& dict)
 {
     functionObject::read(dict);
-
-    // Common settings
-    int debugLevel = 0;
-    if (dict.readIfPresent("debug", debugLevel))
-    {
-        catalystCoprocess::debug = debugLevel;
-    }
-
-    fileName outputDir;
-    if (dict.readIfPresent("mkdir", outputDir))
-    {
-        outputDir.expand();
-        outputDir.clean();
-        Foam::mkDir(outputDir);
-    }
-
-    dict.lookup("scripts") >> scripts_;         // Python scripts
-    catalystCoprocess::expand(scripts_, dict);  // Expand and check availability
-
+    readBasics(dict);
 
     // All possible meshes
     meshes_ = time_.lookupClass<fvMesh>();
@@ -173,19 +173,12 @@ bool Foam::functionObjects::catalystFvMesh::read(const dictionary& dict)
 
     dict.lookup("fields") >> selectFields_;
 
-
     Info<< type() << " " << name() << ":" << nl
         <<"    regions " << flatOutput(selectRegions_) << nl
         <<"    meshes  " << flatOutput(meshes_.sortedToc()) << nl
         <<"    fields  " << flatOutput(selectFields_) << nl
         <<"    scripts " << scripts_ << nl;
 
-    if (adaptor_.valid())
-    {
-        // Run-time modification of pipeline
-        adaptor_().reset(scripts_);
-    }
-
     // Ensure consistency - only retain backends with corresponding mesh region
     backends_.retain(meshes_);
 
@@ -221,7 +214,7 @@ bool Foam::functionObjects::catalystFvMesh::execute()
         if (updateAdaptor && !adaptor_.valid())
         {
             adaptor_.reset(new catalystCoprocess());
-            adaptor_().reset(scripts_);
+            adaptor_().reset(outputDir_, scripts_);
         }
     }
 
diff --git a/src/catalyst/volMesh/catalystFvMesh.H b/src/catalyst/volMesh/catalystFvMesh.H
index a5a264b..3a59ef7 100644
--- a/src/catalyst/volMesh/catalystFvMesh.H
+++ b/src/catalyst/volMesh/catalystFvMesh.H
@@ -111,24 +111,28 @@ protected:
         //- Reference to the time database
         const Time& time_;
 
+        //- The output directory
+        fileName outputDir_;
+
+        //- Python scripts for the catalyst pipeline
+        stringList scripts_;
+
+        //- The catalyst coprocess
+        autoPtr<catalystCoprocess> adaptor_;
+
+
         //- Requested names of regions to process
         wordRes selectRegions_;
 
         //- Names of fields to process
         wordRes selectFields_;
 
-        //- Python scripts for the catalyst pipeline
-        stringList scripts_;
-
         //- Pointers to the requested mesh regions
         HashTable<const fvMesh*> meshes_;
 
         //- Backends for OpenFOAM to VTK translation (with internal caching)
         HashPtrTable<vtk::fvMeshAdaptor> backends_;
 
-        //- The catalyst coprocess
-        autoPtr<catalystCoprocess> adaptor_;
-
 
     // Protected Member Functions
 
diff --git a/tutorials/finiteArea/sphereSurfactantFoam/sphereTransport/system/scripts/writeArea.py b/tutorials/finiteArea/sphereSurfactantFoam/sphereTransport/system/scripts/writeArea.py
index b94085c..1911a9d 100644
--- a/tutorials/finiteArea/sphereSurfactantFoam/sphereTransport/system/scripts/writeArea.py
+++ b/tutorials/finiteArea/sphereSurfactantFoam/sphereTransport/system/scripts/writeArea.py
@@ -9,7 +9,7 @@ def CreateCoProcessor():
       input1 = coprocessor.CreateProducer(datadescription, 'input')
       writer1 = servermanager.writers.XMLMultiBlockDataWriter(Input=input1)
 
-      coprocessor.RegisterWriter(writer1, filename='insitu/area_%t.vtm', freq=2)
+      coprocessor.RegisterWriter(writer1, filename='area_%t.vtm', freq=2)
 
     return Pipeline()
 
diff --git a/tutorials/heatTransfer/chtMultiRegionFoam/multiRegionHeater/system/scripts/slice1.py b/tutorials/heatTransfer/chtMultiRegionFoam/multiRegionHeater/system/scripts/slice1.py
index 8237977..864dba0 100644
--- a/tutorials/heatTransfer/chtMultiRegionFoam/multiRegionHeater/system/scripts/slice1.py
+++ b/tutorials/heatTransfer/chtMultiRegionFoam/multiRegionHeater/system/scripts/slice1.py
@@ -18,14 +18,14 @@ def CreateCoProcessor():
       # register the writer with coprocessor
       # and provide it with information such as the filename to use,
       # how frequently to write the data, etc.
-      coprocessor.RegisterWriter(sliceWriter, filename='insitu/slice_%t.vtm', freq=10)
+      coprocessor.RegisterWriter(sliceWriter, filename='slice_%t.vtm', freq=10)
 
       meshWriter = servermanager.writers.XMLMultiBlockDataWriter(Input=mesh)
 
       # register the writer with coprocessor
       # and provide it with information such as the filename to use,
       # how frequently to write the data, etc.
-      coprocessor.RegisterWriter(meshWriter, filename='insitu/mesh_%t.vtm', freq=100)
+      coprocessor.RegisterWriter(meshWriter, filename='mesh_%t.vtm', freq=100)
 
     return Pipeline()
 
diff --git a/tutorials/heatTransfer/chtMultiRegionFoam/multiRegionHeater/system/scripts/writeMesh.py b/tutorials/heatTransfer/chtMultiRegionFoam/multiRegionHeater/system/scripts/writeMesh.py
index 43533a9..c40f022 100644
--- a/tutorials/heatTransfer/chtMultiRegionFoam/multiRegionHeater/system/scripts/writeMesh.py
+++ b/tutorials/heatTransfer/chtMultiRegionFoam/multiRegionHeater/system/scripts/writeMesh.py
@@ -9,7 +9,7 @@ def CreateCoProcessor():
       input1 = coprocessor.CreateProducer(datadescription, 'mesh')
       writer1 = servermanager.writers.XMLMultiBlockDataWriter(Input=input1)
 
-      coprocessor.RegisterWriter(writer1, filename='insitu/mesh_%t.vtm', freq=2)
+      coprocessor.RegisterWriter(writer1, filename='mesh_%t.vtm', freq=2)
 
     return Pipeline()
 
diff --git a/tutorials/heatTransfer/chtMultiRegionFoam/multiRegionHeater/system/scripts/writePatches.py b/tutorials/heatTransfer/chtMultiRegionFoam/multiRegionHeater/system/scripts/writePatches.py
index 92b8c9f..067e41d 100644
--- a/tutorials/heatTransfer/chtMultiRegionFoam/multiRegionHeater/system/scripts/writePatches.py
+++ b/tutorials/heatTransfer/chtMultiRegionFoam/multiRegionHeater/system/scripts/writePatches.py
@@ -9,7 +9,7 @@ def CreateCoProcessor():
       input1 = coprocessor.CreateProducer(datadescription, 'patches')
       writer1 = servermanager.writers.XMLMultiBlockDataWriter(Input=input1)
 
-      coprocessor.RegisterWriter(writer1, filename='insitu/patches_%t.vtm', freq=2)
+      coprocessor.RegisterWriter(writer1, filename='patches_%t.vtm', freq=2)
 
     return Pipeline()
 
diff --git a/tutorials/incompressible/icoFoam/cavity/system/scripts/slice1.py b/tutorials/incompressible/icoFoam/cavity/system/scripts/slice1.py
index 34c06e7..39d7d28 100644
--- a/tutorials/incompressible/icoFoam/cavity/system/scripts/slice1.py
+++ b/tutorials/incompressible/icoFoam/cavity/system/scripts/slice1.py
@@ -18,7 +18,7 @@ def CreateCoProcessor():
       # register the writer with coprocessor
       # and provide it with information such as the filename to use,
       # how frequently to write the data, etc.
-      coprocessor.RegisterWriter(sliceWriter, filename='insitu/slice_%t.vtm', freq=3)
+      coprocessor.RegisterWriter(sliceWriter, filename='slice_%t.vtm', freq=3)
 
       # create a new 'Parallel UnstructuredGrid Writer'
       meshWriter = servermanager.writers.XMLMultiBlockDataWriter(Input=input1)
@@ -26,7 +26,7 @@ def CreateCoProcessor():
       # register the writer with coprocessor
       # and provide it with information such as the filename to use,
       # how frequently to write the data, etc.
-      coprocessor.RegisterWriter(meshWriter, filename='insitu/fullgrid_%t.vtm', freq=10)
+      coprocessor.RegisterWriter(meshWriter, filename='fullgrid_%t.vtm', freq=10)
 
     return Pipeline()
 
diff --git a/tutorials/incompressible/icoFoam/cavity/system/scripts/writeMesh.py b/tutorials/incompressible/icoFoam/cavity/system/scripts/writeMesh.py
index a8f6236..5760839 100644
--- a/tutorials/incompressible/icoFoam/cavity/system/scripts/writeMesh.py
+++ b/tutorials/incompressible/icoFoam/cavity/system/scripts/writeMesh.py
@@ -9,11 +9,11 @@ def CreateCoProcessor():
       input1 = coprocessor.CreateProducer(datadescription, "mesh")
       writer1 = servermanager.writers.XMLMultiBlockDataWriter(Input=input1)
       # Register with filename to use, output frequency
-      coprocessor.RegisterWriter(writer1, filename='insitu/mesh_%t.vtm', freq=2)
+      coprocessor.RegisterWriter(writer1, filename='mesh_%t.vtm', freq=2)
 
       # input2 = coprocessor.CreateProducer(datadescription, "patches")
       # writer2 = servermanager.writers.XMLMultiBlockDataWriter(Input=input2)
-      # coprocessor.RegisterWriter(writer2, filename='insitu/patches_%t.vtm', freq=2)
+      # coprocessor.RegisterWriter(writer2, filename='patches_%t.vtm', freq=2)
 
     return Pipeline()
 
diff --git a/tutorials/lagrangian/coalChemistryFoam/simplifiedSiwek/system/scripts/writeCloud.py b/tutorials/lagrangian/coalChemistryFoam/simplifiedSiwek/system/scripts/writeCloud.py
index 73512ec..db348ea 100644
--- a/tutorials/lagrangian/coalChemistryFoam/simplifiedSiwek/system/scripts/writeCloud.py
+++ b/tutorials/lagrangian/coalChemistryFoam/simplifiedSiwek/system/scripts/writeCloud.py
@@ -10,7 +10,7 @@ def CreateCoProcessor():
       writer1 = servermanager.writers.XMLMultiBlockDataWriter(Input=input1)
 
       # register writer with coprocessor, with filename + output freq
-      coprocessor.RegisterWriter(writer1, filename='insitu/cloud_%t.vtm', freq=10)
+      coprocessor.RegisterWriter(writer1, filename='cloud_%t.vtm', freq=10)
 
     return Pipeline()
 
diff --git a/tutorials/lagrangian/coalChemistryFoam/simplifiedSiwek/system/scripts/writeMesh.py b/tutorials/lagrangian/coalChemistryFoam/simplifiedSiwek/system/scripts/writeMesh.py
new file mode 100644
index 0000000..c40f022
--- /dev/null
+++ b/tutorials/lagrangian/coalChemistryFoam/simplifiedSiwek/system/scripts/writeMesh.py
@@ -0,0 +1,70 @@
+from paraview.simple import *
+from paraview import coprocessing
+
+# ----------------------- CoProcessor definition -----------------------
+
+def CreateCoProcessor():
+  def _CreatePipeline(coprocessor, datadescription):
+    class Pipeline:
+      input1 = coprocessor.CreateProducer(datadescription, 'mesh')
+      writer1 = servermanager.writers.XMLMultiBlockDataWriter(Input=input1)
+
+      coprocessor.RegisterWriter(writer1, filename='mesh_%t.vtm', freq=2)
+
+    return Pipeline()
+
+  class CoProcessor(coprocessing.CoProcessor):
+    def CreatePipeline(self, datadescription):
+      self.Pipeline = _CreatePipeline(self, datadescription)
+
+  coprocessor = CoProcessor()
+  freqs = {'mesh': [10]}
+  coprocessor.SetUpdateFrequencies(freqs)
+  return coprocessor
+
+#--------------------------------------------------------------
+# Global variables that will hold the pipeline for each timestep
+# Creating the CoProcessor object, doesn't actually create the ParaView pipeline.
+# It will be automatically setup when coprocessor.UpdateProducers() is called the
+# first time.
+coprocessor = CreateCoProcessor()
+
+#--------------------------------------------------------------
+# Enable Live-Visualizaton with ParaView
+coprocessor.EnableLiveVisualization(True)
+
+# ---------------------- Data Selection method ----------------------
+
+def RequestDataDescription(datadescription):
+    'Callback to populate the request for current timestep'
+    global coprocessor
+    if datadescription.GetForceOutput() == True:
+        # We are just going to request all fields and meshes from the simulation
+        # code/adaptor.
+        for i in range(datadescription.GetNumberOfInputDescriptions()):
+            datadescription.GetInputDescription(i).AllFieldsOn()
+            datadescription.GetInputDescription(i).GenerateMeshOn()
+        return
+
+    # setup requests for all inputs based on the requirements of the
+    # pipeline.
+    coprocessor.LoadRequestedData(datadescription)
+
+# ------------------------ Processing method ------------------------
+
+def DoCoProcessing(datadescription):
+    'Callback to do co-processing for current timestep'
+    global coprocessor
+
+    # Update the coprocessor by providing it the newly generated simulation data.
+    # If the pipeline hasn't been setup yet, this will setup the pipeline.
+    coprocessor.UpdateProducers(datadescription)
+
+    # Write output data, if appropriate.
+    coprocessor.WriteData(datadescription);
+
+    # Write image capture (Last arg: rescale lookup table), if appropriate.
+    coprocessor.WriteImages(datadescription, rescale_lookuptable=False)
+
+    # Live Visualization, if enabled.
+    coprocessor.DoLiveVisualization(datadescription, 'localhost', 22222)
-- 
GitLab