diff --git a/applications/solvers/combustion/PDRFoam/PDRModels/turbulence/PDRkEpsilon/PDRkEpsilon.H b/applications/solvers/combustion/PDRFoam/PDRModels/turbulence/PDRkEpsilon/PDRkEpsilon.H
index 5e919efcc440848a1fbf9454698022e1ef2bdc98..5cd98cd9d63c5d7e1cdbe18557253e9fe7046ab3 100644
--- a/applications/solvers/combustion/PDRFoam/PDRModels/turbulence/PDRkEpsilon/PDRkEpsilon.H
+++ b/applications/solvers/combustion/PDRFoam/PDRModels/turbulence/PDRkEpsilon/PDRkEpsilon.H
@@ -124,11 +124,6 @@ public:
 
     // Member Functions
 
-        tmp<volScalarField> mut() const
-        {
-            return mut_;
-        }
-
         //- Return the effective diffusivity for k
         tmp<volScalarField> DkEff() const
         {
@@ -147,41 +142,44 @@ public:
             );
         }
 
-        //- Return the effective turbulent thermal diffusivity
-        tmp<volScalarField> alphaEff() const
+        //- Return the turbulence viscosity
+        virtual tmp<volScalarField> mut() const
         {
-            return tmp<volScalarField>
-            (
-                new volScalarField("alphaEff", alphat_ + alpha())
-            );
+            return mut_;
+        }
+
+        //- Return the turbulence thermal diffusivity
+        virtual tmp<volScalarField> alphat() const
+        {
+            return alphat_;
         }
 
         //- Return the turbulence kinetic energy
-        tmp<volScalarField> k() const
+        virtual tmp<volScalarField> k() const
         {
             return k_;
         }
 
         //- Return the turbulence kinetic energy dissipation rate
-        tmp<volScalarField> epsilon() const
+        virtual tmp<volScalarField> epsilon() const
         {
             return epsilon_;
         }
 
         //- Return the Reynolds stress tensor
-        tmp<volSymmTensorField> R() const;
+        virtual tmp<volSymmTensorField> R() const;
 
         //- Return the effective stress tensor including the laminar stress
-        tmp<volSymmTensorField> devRhoReff() const;
+        virtual tmp<volSymmTensorField> devRhoReff() const;
 
         //- Return the source term for the momentum equation
-        tmp<fvVectorMatrix> divDevRhoReff(volVectorField& U) const;
+        virtual tmp<fvVectorMatrix> divDevRhoReff(volVectorField& U) const;
 
         //- Solve the turbulence equations and correct the turbulence viscosity
-        void correct();
+        virtual void correct();
 
         //- Read turbulenceProperties dictionary
-        bool read();
+        virtual bool read();
 };
 
 
diff --git a/applications/solvers/incompressible/simpleFoam/pEqn.H b/applications/solvers/incompressible/simpleFoam/pEqn.H
index dd9cae30a9c3a94d4be9c1bd3d5ab2df2d65f998..207c514d44155ccf7c6687f497fd1d2f8339acac 100644
--- a/applications/solvers/incompressible/simpleFoam/pEqn.H
+++ b/applications/solvers/incompressible/simpleFoam/pEqn.H
@@ -40,4 +40,3 @@
     // Momentum corrector
     U -= fvc::grad(p)/AU;
     U.correctBoundaryConditions();
-
diff --git a/applications/utilities/mesh/generation/snappyHexMesh/snappyHexMeshDict b/applications/utilities/mesh/generation/snappyHexMesh/snappyHexMeshDict
index c7f2d5b6f3ca297dda04026929420315ac798cb4..783469ee7d95e2a6dc24b9e287d2dcecd96e90bc 100644
--- a/applications/utilities/mesh/generation/snappyHexMesh/snappyHexMeshDict
+++ b/applications/utilities/mesh/generation/snappyHexMesh/snappyHexMeshDict
@@ -40,6 +40,10 @@ geometry
     {
         type triSurfaceMesh;
 
+        //tolerance   1E-6;   // optional:non-default tolerance on intersections
+        //maxTreeDepth 10;    // optional:depth of octree. Decrease only in case
+                              // of memory limitations.
+
         // Per region the patchname. If not provided will be <name>_<region>.
         regions
         {
diff --git a/applications/utilities/mesh/manipulation/setSet/setSet.C b/applications/utilities/mesh/manipulation/setSet/setSet.C
index 7833b4ec24d35259934e217eeedaf30d866ad351..cf24a1c4472510382195b7b004c2b1962dc5ac03 100644
--- a/applications/utilities/mesh/manipulation/setSet/setSet.C
+++ b/applications/utilities/mesh/manipulation/setSet/setSet.C
@@ -94,7 +94,7 @@ void backup
 {
     if (fromSet.size())
     {
-        Pout<< "    Backing up " << fromName << " into " << toName << endl;
+        Info<< "    Backing up " << fromName << " into " << toName << endl;
 
         topoSet::New(setType, mesh, toName, fromSet)().write();
     }
@@ -525,7 +525,7 @@ bool doCommand
         {
             topoSet& currentSet = currentSetPtr();
 
-            Pout<< "    Set:" << currentSet.name()
+            Info<< "    Set:" << currentSet.name()
                 << "  Size:" << currentSet.size()
                 << "  Action:" << actionName
                 << endl;
@@ -622,7 +622,7 @@ bool doCommand
                       + ".vtk"
                     );
 
-                    Pout<< "    Writing " << currentSet.name()
+                    Info<< "    Writing " << currentSet.name()
                         << " (size " << currentSet.size() << ") to "
                         << currentSet.instance()/currentSet.local()
                            /currentSet.name()
@@ -634,7 +634,7 @@ bool doCommand
                 }
                 else
                 {
-                    Pout<< "    Writing " << currentSet.name()
+                    Info<< "    Writing " << currentSet.name()
                         << " (size " << currentSet.size() << ") to "
                         << currentSet.instance()/currentSet.local()
                            /currentSet.name() << endl << endl;
@@ -683,7 +683,7 @@ enum commandStatus
 
 void printMesh(const Time& runTime, const polyMesh& mesh)
 {
-    Pout<< "Time:" << runTime.timeName()
+    Info<< "Time:" << runTime.timeName()
         << "  cells:" << mesh.nCells()
         << "  faces:" << mesh.nFaces()
         << "  points:" << mesh.nPoints()
@@ -703,19 +703,19 @@ commandStatus parseType
 {
     if (setType.empty())
     {
-        Pout<< "Type 'help' for usage information" << endl;
+        Info<< "Type 'help' for usage information" << endl;
 
         return INVALID;
     }
     else if (setType == "help")
     {
-        printHelp(Pout);
+        printHelp(Info);
 
         return INVALID;
     }
     else if (setType == "list")
     {
-        printAllSets(mesh, Pout);
+        printAllSets(mesh, Info);
 
         return INVALID;
     }
@@ -726,7 +726,7 @@ commandStatus parseType
 
         label nearestIndex = Time::findClosestTimeIndex(Times, requestedTime);
 
-        Pout<< "Changing time from " << runTime.timeName()
+        Info<< "Changing time from " << runTime.timeName()
             << " to " << Times[nearestIndex].name()
             << endl;
 
@@ -737,24 +737,24 @@ commandStatus parseType
         {
             case polyMesh::UNCHANGED:
             {
-                Pout<< "    mesh not changed." << endl;
+                Info<< "    mesh not changed." << endl;
                 break;
             }
             case polyMesh::POINTS_MOVED:
             {
-                Pout<< "    points moved; topology unchanged." << endl;
+                Info<< "    points moved; topology unchanged." << endl;
                 break;
             }
             case polyMesh::TOPO_CHANGE:
             {
-                Pout<< "    topology changed; patches unchanged." << nl
+                Info<< "    topology changed; patches unchanged." << nl
                     << "    ";
                 printMesh(runTime, mesh);
                 break;
             }
             case polyMesh::TOPO_PATCH_CHANGE:
             {
-                Pout<< "    topology changed and patches changed." << nl
+                Info<< "    topology changed and patches changed." << nl
                     << "    ";
                 printMesh(runTime, mesh);
 
@@ -773,7 +773,7 @@ commandStatus parseType
     }
     else if (setType == "quit")
     {
-        Pout<< "Quitting ..." << endl;
+        Info<< "Quitting ..." << endl;
 
         return QUIT;
     }
@@ -864,7 +864,7 @@ int main(int argc, char *argv[])
     printMesh(runTime, mesh);
 
     // Print current sets
-    printAllSets(mesh, Pout);
+    printAllSets(mesh, Info);
 
 
 
@@ -874,7 +874,7 @@ int main(int argc, char *argv[])
     {
         fileName batchFile(args.option("batch"));
 
-        Pout<< "Reading commands from file " << batchFile << endl;
+        Info<< "Reading commands from file " << batchFile << endl;
 
         // we cannot handle .gz files
         if (!isFile(batchFile, false))
@@ -888,11 +888,11 @@ int main(int argc, char *argv[])
 #if READLINE != 0
     else if (!read_history(historyFile))
     {
-        Pout<< "Successfully read history from " << historyFile << endl;
+        Info<< "Successfully read history from " << historyFile << endl;
     }
 #endif
 
-    Pout<< "Please type 'help', 'quit' or a set command after prompt." << endl;
+    Info<< "Please type 'help', 'quit' or a set command after prompt." << endl;
 
     bool ok = true;
 
@@ -916,7 +916,7 @@ int main(int argc, char *argv[])
         {
             if (!fileStreamPtr->good())
             {
-                Pout<< "End of batch file" << endl;
+                Info<< "End of batch file" << endl;
                 break;
             }
 
@@ -924,7 +924,7 @@ int main(int argc, char *argv[])
 
             if (rawLine.size())
             {
-                Pout<< "Doing:" << rawLine << endl;
+                Info<< "Doing:" << rawLine << endl;
             }
         }
         else
@@ -945,7 +945,7 @@ int main(int argc, char *argv[])
             }
 #           else
             {
-                Pout<< "Command>" << flush;
+                Info<< "Command>" << flush;
                 std::getline(std::cin, rawLine);
             }
 #           endif
@@ -992,7 +992,7 @@ int main(int argc, char *argv[])
         delete fileStreamPtr;
     }
 
-    Pout<< "\nEnd\n" << endl;
+    Info<< "\nEnd\n" << endl;
 
     return 0;
 }
diff --git a/applications/utilities/mesh/manipulation/stitchMesh/stitchMesh.C b/applications/utilities/mesh/manipulation/stitchMesh/stitchMesh.C
index 70927ca8f34e8b6b677bab279668faba69f809b8..553ea07138efbe9203492a52e424801328bbc182 100644
--- a/applications/utilities/mesh/manipulation/stitchMesh/stitchMesh.C
+++ b/applications/utilities/mesh/manipulation/stitchMesh/stitchMesh.C
@@ -96,6 +96,7 @@ void checkPatch(const polyBoundaryMesh& bMesh, const word& name)
 int main(int argc, char *argv[])
 {
     argList::noParallel();
+#   include "addRegionOption.H"
 
     argList::validArgs.append("masterPatch");
     argList::validArgs.append("slavePatch");
@@ -109,7 +110,7 @@ int main(int argc, char *argv[])
 #   include "setRootCase.H"
 #   include "createTime.H"
     runTime.functionObjects().off();
-#   include "createMesh.H"
+#   include "createNamedMesh.H"
     const word oldInstance = mesh.pointsInstance();
 
 
diff --git a/applications/utilities/miscellaneous/foamFormatConvert/foamFormatConvert.C b/applications/utilities/miscellaneous/foamFormatConvert/foamFormatConvert.C
index 7e4363ffb0583d847bbbbf6e150225a9fcff3914..cd51599ca1013e4a4cf1ba7188a9cfc895cda2ee 100644
--- a/applications/utilities/miscellaneous/foamFormatConvert/foamFormatConvert.C
+++ b/applications/utilities/miscellaneous/foamFormatConvert/foamFormatConvert.C
@@ -31,6 +31,14 @@ Description
 
     Mainly used to convert binary mesh/field files to ASCII.
 
+    Problem: any zero-size List written binary gets written as '0'. When
+    reading the file as a dictionary this is interpreted as a label. This
+    is (usually) not a problem when doing patch fields since these get the
+    'uniform', 'nonuniform' prefix. However zone contents are labelLists
+    not labelFields and these go wrong. For now hacked a solution where
+    we detect the keywords in zones and redo the dictionary entries
+    to be labelLists.
+
 \*---------------------------------------------------------------------------*/
 
 #include "argList.H"
@@ -56,6 +64,82 @@ namespace Foam
 }
 
 
+// Hack to do zones which have Lists in them. See above.
+bool writeZones(const word& name, Time& runTime)
+{
+    IOobject io
+    (
+        name,
+        runTime.timeName(),
+        polyMesh::meshSubDir,
+        runTime,
+        IOobject::MUST_READ,
+        IOobject::NO_WRITE,
+        false
+    );
+
+    bool writeOk = false;
+
+    if (io.headerOk())
+    {
+        Info<< "        Reading " << io.headerClassName()
+            << " : " << name << endl;
+
+        // Switch off type checking (for reading e.g. faceZones as
+        // generic list of dictionaries).
+        const word oldTypeName = IOPtrList<entry>::typeName;
+        const_cast<word&>(IOPtrList<entry>::typeName) = word::null;
+
+        IOPtrList<entry> meshObject(io);
+
+        forAll(meshObject, i)
+        {
+            if (meshObject[i].isDict())
+            {
+                dictionary& d = meshObject[i].dict();
+
+                if (d.found("faceLabels"))
+                {
+                    d.set("faceLabels", labelList(d.lookup("faceLabels")));
+                }
+
+                if (d.found("flipMap"))
+                {
+                    d.set("flipMap", boolList(d.lookup("flipMap")));
+                }
+
+                if (d.found("cellLabels"))
+                {
+                    d.set("cellLabels", labelList(d.lookup("cellLabels")));
+                }
+
+                if (d.found("pointLabels"))
+                {
+                    d.set("pointLabels", labelList(d.lookup("pointLabels")));
+                }
+            }
+        }
+
+        const_cast<word&>(IOPtrList<entry>::typeName) = oldTypeName;
+        // Fake type back to what was in field
+        const_cast<word&>(meshObject.type()) = io.headerClassName();
+
+        Info<< "        Writing " << name << endl;
+
+        // Force writing as ascii
+        writeOk = meshObject.regIOobject::writeObject
+        (
+            IOstream::ASCII,
+            IOstream::currentVersion,
+            runTime.writeCompression()
+        );
+    }
+
+    return writeOk;
+}
+
+
+
 // Main program:
 
 int main(int argc, char *argv[])
@@ -76,9 +160,19 @@ int main(int argc, char *argv[])
         writeMeshObject<labelIOList>("neighbour", runTime);
         writeMeshObject<faceIOList>("faces", runTime);
         writeMeshObject<pointIOField>("points", runTime);
-        writeMeshObject<IOPtrList<entry> >("cellZones", runTime);
-        writeMeshObject<IOPtrList<entry> >("faceZones", runTime);
-        writeMeshObject<IOPtrList<entry> >("pointZones", runTime);
+        writeMeshObject<labelIOList>("pointProcAddressing", runTime);
+        writeMeshObject<labelIOList>("faceProcAddressing", runTime);
+        writeMeshObject<labelIOList>("cellProcAddressing", runTime);
+        writeMeshObject<labelIOList>("boundaryProcAddressing", runTime);
+
+        if (runTime.writeFormat() == IOstream::ASCII)
+        {
+            // Only do zones when converting from binary to ascii
+            // The other way gives problems since working on dictionary level.
+            writeZones("cellZones", runTime);
+            writeZones("faceZones", runTime);
+            writeZones("pointZones", runTime);
+        }
 
         // Get list of objects from the database
         IOobjectList objects(runTime, runTime.timeName());
diff --git a/applications/utilities/postProcessing/dataConversion/foamToTecplot360/foamToTecplot360.C b/applications/utilities/postProcessing/dataConversion/foamToTecplot360/foamToTecplot360.C
index 6c1065a489bd03ba9ba0a144eb594ecdaf3efef3..44384b7efe560600e4a7736e13445f07631b426c 100644
--- a/applications/utilities/postProcessing/dataConversion/foamToTecplot360/foamToTecplot360.C
+++ b/applications/utilities/postProcessing/dataConversion/foamToTecplot360/foamToTecplot360.C
@@ -1077,83 +1077,92 @@ int main(int argc, char *argv[])
             {
                 const faceZone& pp = zones[zoneI];
 
-                const indirectPrimitivePatch ipp
-                (
-                    IndirectList<face>(mesh.faces(), pp),
-                    mesh.points()
-                );
-
-                writer.writePolygonalZone
-                (
-                    pp.name(),
-                    strandID++, //1+patchIDs.size()+zoneI,    //strandID,
-                    ipp,
-                    allVarLocation
-                );
-
-                // Write coordinates
-                writer.writeField(ipp.localPoints().component(0)());
-                writer.writeField(ipp.localPoints().component(1)());
-                writer.writeField(ipp.localPoints().component(2)());
-
-                // Write all volfields
-                forAll(vsf, i)
+                if (pp.size() > 0)
                 {
-                    writer.writeField
+                    const indirectPrimitivePatch ipp
                     (
-                        writer.getFaceField
-                        (
-                            linearInterpolate(vsf[i])(),
-                            pp
-                        )()
+                        IndirectList<face>(mesh.faces(), pp),
+                        mesh.points()
                     );
-                }
-                forAll(vvf, i)
-                {
-                    writer.writeField
+
+                    writer.writePolygonalZone
                     (
-                        writer.getFaceField
-                        (
-                            linearInterpolate(vvf[i])(),
-                            pp
-                        )()
+                        pp.name(),
+                        strandID++, //1+patchIDs.size()+zoneI,    //strandID,
+                        ipp,
+                        allVarLocation
                     );
-                }
-                forAll(vSpheretf, i)
-                {
-                    writer.writeField
-                    (
-                        writer.getFaceField
+
+                    // Write coordinates
+                    writer.writeField(ipp.localPoints().component(0)());
+                    writer.writeField(ipp.localPoints().component(1)());
+                    writer.writeField(ipp.localPoints().component(2)());
+
+                    // Write all volfields
+                    forAll(vsf, i)
+                    {
+                        writer.writeField
                         (
-                            linearInterpolate(vSpheretf[i])(),
-                            pp
-                        )()
-                    );
-                }
-                forAll(vSymmtf, i)
-                {
-                    writer.writeField
-                    (
-                        writer.getFaceField
+                            writer.getFaceField
+                            (
+                                linearInterpolate(vsf[i])(),
+                                pp
+                            )()
+                        );
+                    }
+                    forAll(vvf, i)
+                    {
+                        writer.writeField
                         (
-                            linearInterpolate(vSymmtf[i])(),
-                            pp
-                        )()
-                    );
+                            writer.getFaceField
+                            (
+                                linearInterpolate(vvf[i])(),
+                                pp
+                            )()
+                        );
+                    }
+                    forAll(vSpheretf, i)
+                    {
+                        writer.writeField
+                        (
+                            writer.getFaceField
+                            (
+                                linearInterpolate(vSpheretf[i])(),
+                                pp
+                            )()
+                        );
+                    }
+                    forAll(vSymmtf, i)
+                    {
+                        writer.writeField
+                        (
+                            writer.getFaceField
+                            (
+                                linearInterpolate(vSymmtf[i])(),
+                                pp
+                            )()
+                        );
+                    }
+                    forAll(vtf, i)
+                    {
+                        writer.writeField
+                        (
+                            writer.getFaceField
+                            (
+                                linearInterpolate(vtf[i])(),
+                                pp
+                            )()
+                        );
+                    }
+
+                    writer.writeConnectivity(ipp);
                 }
-                forAll(vtf, i)
+                else
                 {
-                    writer.writeField
-                    (
-                        writer.getFaceField
-                        (
-                            linearInterpolate(vtf[i])(),
-                            pp
-                        )()
-                    );
+                    Info<< "    Skipping zero sized faceZone " << zoneI
+                        << "\t" << pp.name()
+                        << nl << endl;
                 }
-
-                writer.writeConnectivity(ipp);
             }
         }
 
diff --git a/etc/apps/paraview3/bashrc b/etc/apps/paraview3/bashrc
index 7209dc0da8367746ca5eb9a2008258a60bbfef8e..bbeb6c0ebdad592e39ca3d8b4bbc413b50f2bd71 100644
--- a/etc/apps/paraview3/bashrc
+++ b/etc/apps/paraview3/bashrc
@@ -68,7 +68,7 @@ fi
 if [ -r $ParaView_DIR ]
 then
     export PATH=$ParaView_DIR/bin:$PATH
-    export PV_PLUGIN_PATH=$FOAM_LIBBIN
+    export PV_PLUGIN_PATH=$FOAM_LIBBIN/paraview
 fi
 
 unset cmake ParaView_PYTHON_DIR
diff --git a/etc/apps/paraview3/cshrc b/etc/apps/paraview3/cshrc
index 9160edf0f89918b2294b404c9bf12395c0ccc213..33c863215ecca5118d4eb1b249264731919bc623 100644
--- a/etc/apps/paraview3/cshrc
+++ b/etc/apps/paraview3/cshrc
@@ -62,7 +62,7 @@ endif
 
 if ( -r $ParaView_INST_DIR ) then
     set path=($ParaView_DIR/bin $path)
-    setenv PV_PLUGIN_PATH $FOAM_LIBBIN
+    setenv PV_PLUGIN_PATH $FOAM_LIBBIN/paraview
 endif
 
 unset cmake paraviewMajor paraviewPython
diff --git a/src/OpenFOAM/db/IOobject/IOobjectReadHeader.C b/src/OpenFOAM/db/IOobject/IOobjectReadHeader.C
index 2816b40d59eb3b42800e10ea2bcdc21a32bf3a0c..2f5bf1e946c5249732432e63f296a9c8ceea9dc2 100644
--- a/src/OpenFOAM/db/IOobject/IOobjectReadHeader.C
+++ b/src/OpenFOAM/db/IOobject/IOobjectReadHeader.C
@@ -91,7 +91,7 @@ bool Foam::IOobject::readHeader(Istream& is)
             << "First token could not be read or is not the keyword 'FoamFile'"
             << nl << nl << "Check header is of the form:" << nl << endl;
 
-        writeHeader(SeriousError);
+        writeHeader(Info);
 
         return false;
     }
diff --git a/src/OpenFOAM/fields/pointPatchFields/pointPatchField/pointConstraint/pointConstraint.H b/src/OpenFOAM/fields/pointPatchFields/pointPatchField/pointConstraint/pointConstraint.H
index 5e28ac1e1439785bfa8d388092f70e52a9df6494..10aa63f2c9f629bb707d1e9ac5faab30b1a362fc 100644
--- a/src/OpenFOAM/fields/pointPatchFields/pointPatchField/pointConstraint/pointConstraint.H
+++ b/src/OpenFOAM/fields/pointPatchFields/pointPatchField/pointConstraint/pointConstraint.H
@@ -77,6 +77,9 @@ public:
         //- Apply and accumulate the effect of the given constraint direction
         inline void applyConstraint(const vector& cd);
 
+        //- Combine constraints
+        inline void combine(const pointConstraint&);
+
         //- Return the accumulated constraint transformation tensor
         inline tensor constraintTransformation() const;
 };
diff --git a/src/OpenFOAM/fields/pointPatchFields/pointPatchField/pointConstraint/pointConstraintI.H b/src/OpenFOAM/fields/pointPatchFields/pointPatchField/pointConstraint/pointConstraintI.H
index a0af05b5afad4c5530bcabd0f8ab02b0496c0acb..c488bf603e57e6e6eb79f6712492b54cf52f8c18 100644
--- a/src/OpenFOAM/fields/pointPatchFields/pointPatchField/pointConstraint/pointConstraintI.H
+++ b/src/OpenFOAM/fields/pointPatchFields/pointPatchField/pointConstraint/pointConstraintI.H
@@ -69,6 +69,47 @@ void Foam::pointConstraint::applyConstraint(const vector& cd)
 }
 
 
+void Foam::pointConstraint::combine(const pointConstraint& pc)
+{
+    if (first() == 0)
+    {
+        operator=(pc);
+    }
+    else if (first() == 1)
+    {
+        // Save single normal
+        vector n = second();
+        // Apply to supplied point constaint
+        operator=(pc);
+        applyConstraint(n);
+    }
+    else if (first() == 2)
+    {
+        if (pc.first() == 0)
+        {}
+        else if (pc.first() == 1)
+        {
+            applyConstraint(pc.second());
+        }
+        else if (pc.first() == 2)
+        {
+            // Both constrained to line. Same (+-)direction?
+            if (mag(second() & pc.second()) <= (1.0-1e-3))
+            {
+                // Different directions
+                first() = 3;
+                second() = vector::zero;
+            }
+        }
+        else
+        {
+            first() = 3;
+            second() = vector::zero;
+        }
+    }
+}
+
+
 Foam::tensor Foam::pointConstraint::constraintTransformation() const
 {
     if (first() == 0)
diff --git a/src/OpenFOAM/meshes/pointMesh/pointPatches/derived/coupled/coupledFacePointPatch.H b/src/OpenFOAM/meshes/pointMesh/pointPatches/derived/coupled/coupledFacePointPatch.H
index a77af7af305f5713ee7f6736b29a7e60f7c8c6f7..ec5ae840e4f8a01f0bb377a0c79838ce90717e4e 100644
--- a/src/OpenFOAM/meshes/pointMesh/pointPatches/derived/coupled/coupledFacePointPatch.H
+++ b/src/OpenFOAM/meshes/pointMesh/pointPatches/derived/coupled/coupledFacePointPatch.H
@@ -132,7 +132,7 @@ public:
             //  associated with any faces
             virtual const labelList& loneMeshPoints() const;
 
-            //- Return point unit normals.  Not impelemented.
+            //- Return point unit normals. Not implemented.
             virtual const vectorField& pointNormals() const;
 };
 
diff --git a/src/OpenFOAM/meshes/polyMesh/polyPatches/basic/coupled/coupledPolyPatch.C b/src/OpenFOAM/meshes/polyMesh/polyPatches/basic/coupled/coupledPolyPatch.C
index ee2d966836c27c04e1e2edf97d0e6d13c329076d..8e18f68c771482b8b073b4fdaed7bde8310d415b 100644
--- a/src/OpenFOAM/meshes/polyMesh/polyPatches/basic/coupled/coupledPolyPatch.C
+++ b/src/OpenFOAM/meshes/polyMesh/polyPatches/basic/coupled/coupledPolyPatch.C
@@ -199,13 +199,20 @@ Foam::scalarField Foam::coupledPolyPatch::calcFaceTol
 
         const face& f = faces[faceI];
 
+        // 1. calculate a typical size of the face. Use maximum distance
+        //    to face centre
         scalar maxLenSqr = -GREAT;
+        // 2. as measure of truncation error when comparing two coordinates
+        //    use SMALL * maximum component
+        scalar maxCmpt = -GREAT;
 
         forAll(f, fp)
         {
-            maxLenSqr = max(maxLenSqr, magSqr(points[f[fp]] - cc));
+            const point& pt = points[f[fp]];
+            maxLenSqr = max(maxLenSqr, magSqr(pt - cc));
+            maxCmpt = max(maxCmpt, cmptMax(cmptMag(pt)));
         }
-        tols[faceI] = matchTol * Foam::sqrt(maxLenSqr);
+        tols[faceI] = max(SMALL*maxCmpt, matchTol*Foam::sqrt(maxLenSqr));
     }
     return tols;
 }
diff --git a/src/dynamicMesh/meshCut/directions/directions.C b/src/dynamicMesh/meshCut/directions/directions.C
index b50069d606077aab9299408e7c778abae08cfb09..b77d2cefa45a088706e0893a4f03de9ab1ce128d 100644
--- a/src/dynamicMesh/meshCut/directions/directions.C
+++ b/src/dynamicMesh/meshCut/directions/directions.C
@@ -446,19 +446,4 @@ Foam::directions::directions
 }
 
 
-// * * * * * * * * * * * * * * * * Destructor  * * * * * * * * * * * * * * * //
-
-
-// * * * * * * * * * * * * * * * Member Functions  * * * * * * * * * * * * * //
-
-
-// * * * * * * * * * * * * * * * Member Operators  * * * * * * * * * * * * * //
-
-
-// * * * * * * * * * * * * * * * Friend Functions  * * * * * * * * * * * * * //
-
-
-// * * * * * * * * * * * * * * * Friend Operators  * * * * * * * * * * * * * //
-
-
 // ************************************************************************* //
diff --git a/src/dynamicMesh/motionSmoother/polyMeshGeometry/polyMeshGeometry.C b/src/dynamicMesh/motionSmoother/polyMeshGeometry/polyMeshGeometry.C
index f42f2a4ef4130059d8aad52c65824d7e0e8b3a79..f777081d03097dfc72b6307ba8d7306441a1fae7 100644
--- a/src/dynamicMesh/motionSmoother/polyMeshGeometry/polyMeshGeometry.C
+++ b/src/dynamicMesh/motionSmoother/polyMeshGeometry/polyMeshGeometry.C
@@ -824,7 +824,7 @@ bool Foam::polyMeshGeometry::checkFaceSkewness
             // (i.e. treat as if mirror cell on other side)
 
             vector faceNormal = faceAreas[faceI];
-            faceNormal /= mag(faceNormal) + VSMALL;
+            faceNormal /= mag(faceNormal) + ROOTVSMALL;
 
             vector dOwn = faceCentres[faceI] - cellCentres[own[faceI]];
 
@@ -834,7 +834,7 @@ bool Foam::polyMeshGeometry::checkFaceSkewness
 
             scalar skewness =
                 mag(faceCentres[faceI] - faceIntersection)
-                /(2*mag(dWall) + VSMALL);
+                /(2*mag(dWall) + ROOTVSMALL);
 
             // Check if the skewness vector is greater than the PN vector.
             // This does not cause trouble but is a good indication of a poor
diff --git a/src/finiteVolume/fields/fvPatchFields/constraint/cyclic/cyclicFvPatchField.H b/src/finiteVolume/fields/fvPatchFields/constraint/cyclic/cyclicFvPatchField.H
index 062ffa1d86e711d5450e1f3ed49b09516ca99144..5e179da0f970de0864767f7521aacfca024af034 100644
--- a/src/finiteVolume/fields/fvPatchFields/constraint/cyclic/cyclicFvPatchField.H
+++ b/src/finiteVolume/fields/fvPatchFields/constraint/cyclic/cyclicFvPatchField.H
@@ -142,7 +142,7 @@ public:
 
         // Access
 
-            //- Retirn local reference cast into the cyclic patch
+            //- Return local reference cast into the cyclic patch
             const cyclicFvPatch& cyclicPatch() const
             {
                 return cyclicPatch_;
diff --git a/src/finiteVolume/fvMesh/extendedStencil/cellToCell/fullStencils/cellToCellStencil.H b/src/finiteVolume/fvMesh/extendedStencil/cellToCell/fullStencils/cellToCellStencil.H
index 7fa48ccecb174de777cb03c66693317336c7303e..f8b94c8d5f24f0b59b6c8c064a3a41a96653aa9f 100644
--- a/src/finiteVolume/fvMesh/extendedStencil/cellToCell/fullStencils/cellToCellStencil.H
+++ b/src/finiteVolume/fvMesh/extendedStencil/cellToCell/fullStencils/cellToCellStencil.H
@@ -95,7 +95,7 @@ protected:
         class unionEqOp
         {
             public:
-            void operator()( labelList& x, const labelList& y ) const;
+            void operator()(labelList& x, const labelList& y) const;
         };
 
         //- Collect cell neighbours of faces in global numbering
diff --git a/src/lagrangian/intermediate/clouds/derived/BasicReactingMultiphaseCloud/BasicReactingMultiphaseCloud.H b/src/lagrangian/intermediate/clouds/derived/BasicReactingMultiphaseCloud/BasicReactingMultiphaseCloud.H
index f02475a316fea3f5882ead651d7dbb52dc40600e..3a8d69c2ef8913594261d5ce256680f86d665325 100644
--- a/src/lagrangian/intermediate/clouds/derived/BasicReactingMultiphaseCloud/BasicReactingMultiphaseCloud.H
+++ b/src/lagrangian/intermediate/clouds/derived/BasicReactingMultiphaseCloud/BasicReactingMultiphaseCloud.H
@@ -42,13 +42,31 @@ Description
 
 namespace Foam
 {
-    typedef ReactingMultiphaseCloud<BasicReactingParcel<constGasThermoPhysics> >
+    typedef ReactingMultiphaseCloud
+        <
+            BasicReactingMultiphaseParcel
+            <
+                constGasThermoPhysics
+            >
+        >
         constThermoReactingMultiphaseCloud;
 
-    typedef ReactingMultiphaseCloud<BasicReactingParcel<gasThermoPhysics> >
+    typedef ReactingMultiphaseCloud
+        <
+            BasicReactingMultiphaseParcel
+            <
+                gasThermoPhysics
+            >
+        >
         thermoReactingMultiphaseCloud;
 
-    typedef ReactingMultiphaseCloud<BasicReactingParcel<icoPoly8ThermoPhysics> >
+    typedef ReactingMultiphaseCloud
+        <
+            BasicReactingMultiphaseParcel
+            <
+                icoPoly8ThermoPhysics
+            >
+        >
         icoPoly8ThermoReactingMultiphaseCloud;
 }
 
diff --git a/src/lagrangian/intermediate/parcels/Templates/ReactingMultiphaseParcel/ReactingMultiphaseParcel.C b/src/lagrangian/intermediate/parcels/Templates/ReactingMultiphaseParcel/ReactingMultiphaseParcel.C
index 5c3d3d06856b632567259732526b6344f5907513..c1914e5ed50eb5616717a6fa26ba9b30d3b1e784 100644
--- a/src/lagrangian/intermediate/parcels/Templates/ReactingMultiphaseParcel/ReactingMultiphaseParcel.C
+++ b/src/lagrangian/intermediate/parcels/Templates/ReactingMultiphaseParcel/ReactingMultiphaseParcel.C
@@ -401,7 +401,7 @@ void Foam::ReactingMultiphaseParcel<ParcelType>::calc
             td.cloud().hcTrans()[cellI] +=
                 np0
                *dMassGas[i]
-               *td.cloud().mcCarrierThermo().speciesData()[gid].H(T0);
+               *td.cloud().mcCarrierThermo().speciesData()[gid].Hc();
         }
         forAll(YLiquid_, i)
         {
@@ -410,7 +410,7 @@ void Foam::ReactingMultiphaseParcel<ParcelType>::calc
             td.cloud().hcTrans()[cellI] +=
                 np0
                *dMassLiquid[i]
-               *td.cloud().mcCarrierThermo().speciesData()[gid].H(T0);
+               *td.cloud().mcCarrierThermo().speciesData()[gid].Hc();
         }
 /*
         // No mapping between solid components and carrier phase
@@ -421,7 +421,7 @@ void Foam::ReactingMultiphaseParcel<ParcelType>::calc
             td.cloud().hcTrans()[cellI] +=
                 np0
                *dMassSolid[i]
-               *td.cloud().mcCarrierThermo().speciesData()[gid].H(T0);
+               *td.cloud().mcCarrierThermo().speciesData()[gid].Hc();
         }
 */
         forAll(dMassSRCarrier, i)
@@ -430,7 +430,7 @@ void Foam::ReactingMultiphaseParcel<ParcelType>::calc
             td.cloud().hcTrans()[cellI] +=
                 np0
                *dMassSRCarrier[i]
-               *td.cloud().mcCarrierThermo().speciesData()[i].H(T0);
+               *td.cloud().mcCarrierThermo().speciesData()[i].Hc();
         }
 
         // Update momentum transfer
diff --git a/src/lagrangian/intermediate/parcels/Templates/ReactingParcel/ReactingParcel.C b/src/lagrangian/intermediate/parcels/Templates/ReactingParcel/ReactingParcel.C
index 524b4d00d5d103820851e8ee993c1185fa329935..92525ea2b1d96bb4a375acd4d7c888287d1eb6f5 100644
--- a/src/lagrangian/intermediate/parcels/Templates/ReactingParcel/ReactingParcel.C
+++ b/src/lagrangian/intermediate/parcels/Templates/ReactingParcel/ReactingParcel.C
@@ -122,7 +122,7 @@ void Foam::ReactingParcel<ParcelType>::correctSurfaceValues
     }
 
     // Far field carrier  molar fractions
-    scalarField Xinf(Y_.size());
+    scalarField Xinf(td.cloud().mcCarrierThermo().speciesData().size());
 
     forAll(Xinf, i)
     {
diff --git a/src/mesh/autoMesh/autoHexMesh/autoHexMeshDriver/autoLayerDriver.C b/src/mesh/autoMesh/autoHexMesh/autoHexMeshDriver/autoLayerDriver.C
index cc49e48939e1f138451812d8dfcc703b6ce29e4a..acd1673392526df125e58a60a61f72b94ab4c9bc 100644
--- a/src/mesh/autoMesh/autoHexMesh/autoHexMeshDriver/autoLayerDriver.C
+++ b/src/mesh/autoMesh/autoHexMesh/autoHexMeshDriver/autoLayerDriver.C
@@ -2490,13 +2490,18 @@ void Foam::autoLayerDriver::mergePatchFacesUndo
         << "---------------------------" << nl
         << "    - which are on the same patch" << nl
         << "    - which make an angle < " << layerParams.featureAngle()
+        << "- which are on the same patch" << nl
+        << "- which make an angle < " << layerParams.featureAngle()
         << " degrees"
         << nl
         << "      (cos:" << minCos << ')' << nl
         << "    - as long as the resulting face doesn't become concave"
+        << "  (cos:" << minCos << ')' << nl
+        << "- as long as the resulting face doesn't become concave"
         << " by more than "
         << layerParams.concaveAngle() << " degrees" << nl
         << "      (0=straight, 180=fully concave)" << nl
+        << "  (0=straight, 180=fully concave)" << nl
         << endl;
 
     label nChanged = mergePatchFacesUndo(minCos, concaveCos, motionDict);
@@ -2546,11 +2551,6 @@ void Foam::autoLayerDriver::addLayers
     );
 
 
-    // Undistorted edge length
-    const scalar edge0Len = meshRefiner_.meshCutter().level0EdgeLength();
-    const labelList& cellLevel = meshRefiner_.meshCutter().cellLevel();
-
-
     // Point-wise extrusion data
     // ~~~~~~~~~~~~~~~~~~~~~~~~~
 
@@ -2612,6 +2612,10 @@ void Foam::autoLayerDriver::addLayers
         // Disable extrusion on warped faces
         // ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 
+        // Undistorted edge length
+        const scalar edge0Len = meshRefiner_.meshCutter().level0EdgeLength();
+        const labelList& cellLevel = meshRefiner_.meshCutter().cellLevel();
+
         handleWarpedFaces
         (
             pp,
@@ -2651,6 +2655,10 @@ void Foam::autoLayerDriver::addLayers
     }
 
 
+    // Undistorted edge length
+    const scalar edge0Len = meshRefiner_.meshCutter().level0EdgeLength();
+    const labelList& cellLevel = meshRefiner_.meshCutter().cellLevel();
+
     // Determine (wanted) point-wise layer thickness and expansion ratio
     scalarField thickness(pp().nPoints());
     scalarField minThickness(pp().nPoints());
diff --git a/src/mesh/autoMesh/autoHexMesh/meshRefinement/meshRefinement.C b/src/mesh/autoMesh/autoHexMesh/meshRefinement/meshRefinement.C
index 909cf3e6834c3bba3e2ca80831be81ba4b5893e4..c4ea71f4346f7757813e5e22f67407c423872d98 100644
--- a/src/mesh/autoMesh/autoHexMesh/meshRefinement/meshRefinement.C
+++ b/src/mesh/autoMesh/autoHexMesh/meshRefinement/meshRefinement.C
@@ -54,6 +54,7 @@ License
 #include "Random.H"
 #include "searchableSurfaces.H"
 #include "treeBoundBox.H"
+#include "zeroGradientFvPatchFields.H"
 
 // * * * * * * * * * * * * * * Static Data Members * * * * * * * * * * * * * //
 
@@ -2128,7 +2129,8 @@ void Foam::meshRefinement::dumpRefinementLevel() const
             false
         ),
         mesh_,
-        dimensionedScalar("zero", dimless, 0)
+        dimensionedScalar("zero", dimless, 0),
+        zeroGradientFvPatchScalarField::typeName
     );
 
     const labelList& cellLevel = meshCutter_.cellLevel();
diff --git a/src/mesh/autoMesh/autoHexMesh/refinementSurfaces/refinementSurfaces.C b/src/mesh/autoMesh/autoHexMesh/refinementSurfaces/refinementSurfaces.C
index c910b4c1b57a72b61bc583952bf793a07c6f86ef..0f81dff2f5e477815eda5188a35852163057cc9e 100644
--- a/src/mesh/autoMesh/autoHexMesh/refinementSurfaces/refinementSurfaces.C
+++ b/src/mesh/autoMesh/autoHexMesh/refinementSurfaces/refinementSurfaces.C
@@ -496,23 +496,63 @@ Foam::labelList Foam::refinementSurfaces::getClosedNamedSurfaces() const
 }
 
 
-// Count number of triangles per surface region
-Foam::labelList Foam::refinementSurfaces::countRegions(const triSurface& s)
-{
-    const geometricSurfacePatchList& regions = s.patches();
-
-    labelList nTris(regions.size(), 0);
-
-    forAll(s, triI)
-    {
-        nTris[s[triI].region()]++;
-    }
-    return nTris;
-}
+// // Count number of triangles per surface region
+// Foam::labelList Foam::refinementSurfaces::countRegions(const triSurface& s)
+// {
+//     const geometricSurfacePatchList& regions = s.patches();
+// 
+//     labelList nTris(regions.size(), 0);
+// 
+//     forAll(s, triI)
+//     {
+//         nTris[s[triI].region()]++;
+//     }
+//     return nTris;
+// }
+
+
+// // Pre-calculate the refinement level for every element
+// void Foam::refinementSurfaces::wantedRefinementLevel
+// (
+//     const shellSurfaces& shells,
+//     const label surfI,
+//     const List<pointIndexHit>& info,    // Indices
+//     const pointField& ctrs,             // Representative coordinate
+//     labelList& minLevelField
+// ) const
+// {
+//     const searchableSurface& geom = allGeometry_[surfaces_[surfI]];
+// 
+//     // Get per element the region
+//     labelList region;
+//     geom.getRegion(info, region);
+// 
+//     // Initialise fields to region wise minLevel
+//     minLevelField.setSize(ctrs.size());
+//     minLevelField = -1;
+// 
+//     forAll(minLevelField, i)
+//     {
+//         if (info[i].hit())
+//         {
+//             minLevelField[i] = minLevel(surfI, region[i]);
+//         }
+//     }
+// 
+//     // Find out if triangle inside shell with higher level
+//     // What level does shell want to refine fc to?
+//     labelList shellLevel;
+//     shells.findHigherLevel(ctrs, minLevelField, shellLevel);
+// 
+//     forAll(minLevelField, i)
+//     {
+//         minLevelField[i] = max(minLevelField[i], shellLevel[i]);
+//     }
+// }
 
 
 // Precalculate the refinement level for every element of the searchable
-// surface. This is currently hardcoded for triSurfaceMesh only.
+// surface.
 void Foam::refinementSurfaces::setMinLevelFields
 (
     const shellSurfaces& shells
@@ -522,58 +562,55 @@ void Foam::refinementSurfaces::setMinLevelFields
     {
         const searchableSurface& geom = allGeometry_[surfaces_[surfI]];
 
-        if (isA<triSurfaceMesh>(geom))
+        // Precalculation only makes sense if there are different regions
+        // (so different refinement levels possible) and there are some
+        // elements. Possibly should have 'enough' elements to have fine
+        // enough resolution but for now just make sure we don't catch e.g.
+        // searchableBox (size=6)
+        if (geom.regions().size() > 1 && geom.globalSize() > 10)
         {
-            const triSurfaceMesh& triMesh = refCast<const triSurfaceMesh>(geom);
+            // Representative local coordinates
+            const pointField ctrs = geom.coordinates();
 
-            autoPtr<triSurfaceLabelField> minLevelFieldPtr
-            (
-                new triSurfaceLabelField
+            labelList minLevelField(ctrs.size(), -1);
+            {
+                // Get the element index in a roundabout way. Problem is e.g.
+                // distributed surface where local indices differ from global
+                // ones (needed for getRegion call)
+                List<pointIndexHit> info;
+                geom.findNearest
                 (
-                    IOobject
-                    (
-                        "minLevel",
-                        triMesh.objectRegistry::time().timeName(),  // instance
-                        "triSurface",                               // local
-                        triMesh,
-                        IOobject::NO_READ,
-                        IOobject::AUTO_WRITE
-                    ),
-                    triMesh,
-                    dimless
-                )
-            );
-            triSurfaceLabelField& minLevelField = minLevelFieldPtr();
+                    ctrs,
+                    scalarField(ctrs.size(), sqr(GREAT)),
+                    info
+                );
 
-            const triSurface& s = static_cast<const triSurface&>(triMesh);
+                // Get per element the region
+                labelList region;
+                geom.getRegion(info, region);
 
-            // Initialise fields to region wise minLevel
-            forAll(s, triI)
-            {
-                minLevelField[triI] = minLevel(surfI, s[triI].region());
+                // From the region get the surface-wise refinement level
+                forAll(minLevelField, i)
+                {
+                    if (info[i].hit())
+                    {
+                        minLevelField[i] = minLevel(surfI, region[i]);
+                    }
+                }
             }
 
             // Find out if triangle inside shell with higher level
-            pointField fc(s.size());
-            forAll(s, triI)
-            {
-                fc[triI] = s[triI].centre(s.points());
-            }
             // What level does shell want to refine fc to?
             labelList shellLevel;
-            shells.findHigherLevel(fc, minLevelField, shellLevel);
+            shells.findHigherLevel(ctrs, minLevelField, shellLevel);
 
-            forAll(minLevelField, triI)
+            forAll(minLevelField, i)
             {
-                minLevelField[triI] = max
-                (
-                    minLevelField[triI],
-                    shellLevel[triI]
-                );
+                minLevelField[i] = max(minLevelField[i], shellLevel[i]);
             }
 
-            // Store field on triMesh
-            minLevelFieldPtr.ptr()->store();
+            // Store minLevelField on surface
+            const_cast<searchableSurface&>(geom).setField(minLevelField);
         }
     }
 }
@@ -601,44 +638,89 @@ void Foam::refinementSurfaces::findHigherIntersection
         return;
     }
 
-    // Work arrays
-    labelList hitMap(identity(start.size()));
-    pointField p0(start);
-    pointField p1(end);
-    List<pointIndexHit> hitInfo(start.size());
-
-    forAll(surfaces_, surfI)
+    if (surfaces_.size() == 1)
     {
+        // Optimisation: single segmented surface. No need to duplicate
+        // point storage.
+
+        label surfI = 0;
+
         const searchableSurface& geom = allGeometry_[surfaces_[surfI]];
 
-        geom.findLineAny(p0, p1, hitInfo);
+        // Do intersection test
+        List<pointIndexHit> intersectionInfo(start.size());
+        geom.findLineAny(start, end, intersectionInfo);
 
+        // See if a cached level field available
         labelList minLevelField;
-        if (isA<triSurfaceMesh>(geom))
+        geom.getField(intersectionInfo, minLevelField);
+        bool haveLevelField =
+        (
+            returnReduce(minLevelField.size(), sumOp<label>())
+          > 0
+        );
+
+        if (!haveLevelField && geom.regions().size() == 1)
         {
-            const triSurfaceMesh& triMesh = refCast<const triSurfaceMesh>(geom);
-            triMesh.getField
+            minLevelField = labelList
             (
-                "minLevel",
-                hitInfo,
-                minLevelField
+                intersectionInfo.size(),
+                minLevel(surfI, 0)
             );
+            haveLevelField = true;
         }
 
 
-        // Copy all hits into arguments, continue with misses
-        label newI = 0;
-        forAll(hitInfo, hitI)
+        if (haveLevelField)
+        {
+            forAll(intersectionInfo, i)
+            {
+                if
+                (
+                    intersectionInfo[i].hit()
+                 && minLevelField[i] > currentLevel[i]
+                )
+                {
+                    surfaces[i] = surfI;    // index of surface
+                    surfaceLevel[i] = minLevelField[i];
+                }
+            }
+            return;
+        }
+    }
+
+
+
+    // Work arrays
+    pointField p0(start);
+    pointField p1(end);
+    labelList intersectionToPoint(identity(start.size()));
+    List<pointIndexHit> intersectionInfo(start.size());
+
+    forAll(surfaces_, surfI)
+    {
+        const searchableSurface& geom = allGeometry_[surfaces_[surfI]];
+
+        // Do intersection test
+        geom.findLineAny(p0, p1, intersectionInfo);
+
+        // See if a cached level field available
+        labelList minLevelField;
+        geom.getField(intersectionInfo, minLevelField);
+
+        // Copy all hits into arguments, In-place compact misses.
+        label missI = 0;
+        forAll(intersectionInfo, i)
         {
             // Get the minLevel for the point
             label minLocalLevel = -1;
 
-            if (hitInfo[hitI].hit())
+            if (intersectionInfo[i].hit())
             {
                 // Check if minLevelField for this surface.
                 if (minLevelField.size())
                 {
-                    minLocalLevel = minLevelField[hitI];
+                    minLocalLevel = minLevelField[i];
                 }
                 else
                 {
@@ -648,36 +730,35 @@ void Foam::refinementSurfaces::findHigherIntersection
                 }
             }
 
-            label pointI = hitMap[hitI];
+
+            label pointI = intersectionToPoint[i];
 
             if (minLocalLevel > currentLevel[pointI])
             {
+                // Mark point for refinement
                 surfaces[pointI] = surfI;
                 surfaceLevel[pointI] = minLocalLevel;
             }
             else
             {
-                if (hitI != newI)
-                {
-                    hitMap[newI] = hitMap[hitI];
-                    p0[newI] = p0[hitI];
-                    p1[newI] = p1[hitI];
-                }
-                newI++;
+                p0[missI] = p0[pointI];
+                p1[missI] = p1[pointI];
+                intersectionToPoint[missI] = pointI;
+                missI++;
             }
         }
 
         // All done? Note that this decision should be synchronised
-        if (returnReduce(newI, sumOp<label>()) == 0)
+        if (returnReduce(missI, sumOp<label>()) == 0)
         {
             break;
         }
 
-        // Trim and continue
-        hitMap.setSize(newI);
-        p0.setSize(newI);
-        p1.setSize(newI);
-        hitInfo.setSize(newI);
+        // Trim misses
+        p0.setSize(missI);
+        p1.setSize(missI);
+        intersectionToPoint.setSize(missI);
+        intersectionInfo.setSize(missI);
     }
 }
 
diff --git a/src/mesh/autoMesh/autoHexMesh/refinementSurfaces/refinementSurfaces.H b/src/mesh/autoMesh/autoHexMesh/refinementSurfaces/refinementSurfaces.H
index 86767f57312e470bcf50f183d8a8a73d8e08642d..b1188f1dff79c78754c22f707a00758dda48b1e8 100644
--- a/src/mesh/autoMesh/autoHexMesh/refinementSurfaces/refinementSurfaces.H
+++ b/src/mesh/autoMesh/autoHexMesh/refinementSurfaces/refinementSurfaces.H
@@ -218,8 +218,8 @@ public:
                 const shellSurfaces& shells
             );
 
-            //- Helper: count number of triangles per region
-            static labelList countRegions(const triSurface&);
+            ////- Helper: count number of triangles per region
+            //static labelList countRegions(const triSurface&);
 
 
         // Searching
diff --git a/src/meshTools/indexedOctree/indexedOctree.C b/src/meshTools/indexedOctree/indexedOctree.C
index 128e6bf60022d87775e783f483f33df792d445e4..1d444eff3dae4fe208ff2159ec1d0e5772395979 100644
--- a/src/meshTools/indexedOctree/indexedOctree.C
+++ b/src/meshTools/indexedOctree/indexedOctree.C
@@ -957,6 +957,7 @@ Foam::point Foam::indexedOctree<Type>::pushPointIntoFace
     }
     else if (nFaces == 1)
     {
+        // Point is on a single face
         keepFaceID = faceIndices[0];
     }
     else
@@ -1782,16 +1783,6 @@ Foam::pointIndexHit Foam::indexedOctree<Type>::findLine
     label i = 0;
     for (; i < 100000; i++)
     {
-        if (verbose)
-        {
-            Pout<< "iter:" << i
-                << " at startPoint:" << hitInfo.rawPoint() << endl
-                << "    node:" << nodeI
-                << " octant:" << octant
-                << " bb:" << subBbox(nodeI, octant) << endl;
-        }
-
-
         // Ray-trace to end of current node. Updates point (either on triangle
         // in case of hit or on node bounding box in case of miss)
 
@@ -1808,6 +1799,19 @@ Foam::pointIndexHit Foam::indexedOctree<Type>::findLine
             )
         );
 
+        if (verbose)
+        {
+            Pout<< "iter:" << i
+                << " at current:" << hitInfo.rawPoint()
+                << " (perturbed:" << startPoint << ")" << endl
+                << "    node:" << nodeI
+                << " octant:" << octant
+                << " bb:" << subBbox(nodeI, octant) << endl;
+        }
+
+
+
+
         // Faces of current bounding box current point is on
         direction hitFaceID = 0;
 
@@ -1833,12 +1837,23 @@ Foam::pointIndexHit Foam::indexedOctree<Type>::findLine
             break;
         }
 
-        if (hitFaceID == 0)
+        if (hitFaceID == 0 || hitInfo.rawPoint() == treeEnd)
         {
             // endpoint inside the tree. Return miss.
             break;
         }
 
+        // Create a point on other side of face.
+        point perturbedPoint
+        (
+            pushPoint
+            (
+                octantBb,
+                hitFaceID,
+                hitInfo.rawPoint(),
+                false                   // push outside of octantBb
+            )
+        );
 
         if (verbose)
         {
@@ -1848,14 +1863,7 @@ Foam::pointIndexHit Foam::indexedOctree<Type>::findLine
                 << "    node:" << nodeI
                 << " octant:" << octant
                 << " bb:" << subBbox(nodeI, octant) << nl
-                << "    walking to neighbour containing:"
-                <<  pushPoint
-                    (
-                        octantBb,
-                        hitFaceID,
-                        hitInfo.rawPoint(),
-                        false                   // push outside of octantBb
-                    )
+                << "    walking to neighbour containing:" << perturbedPoint
                 << endl;
         }
 
@@ -1866,13 +1874,7 @@ Foam::pointIndexHit Foam::indexedOctree<Type>::findLine
 
         bool ok = walkToNeighbour
         (
-            pushPoint
-            (
-                octantBb,
-                hitFaceID,
-                hitInfo.rawPoint(),
-                false                   // push outside of octantBb
-            ),
+            perturbedPoint,
             hitFaceID,  // face(s) that hitInfo is on
 
             nodeI,
diff --git a/src/meshTools/meshSearch/meshSearch.C b/src/meshTools/meshSearch/meshSearch.C
index eb5ec14d8fcdd903845d7ed5a189ca8fe92e7093..d6346036491503553ff80bfdb879bf0a7b7aed82 100644
--- a/src/meshTools/meshSearch/meshSearch.C
+++ b/src/meshTools/meshSearch/meshSearch.C
@@ -463,7 +463,7 @@ const Foam::indexedOctree<Foam::treeDataFace>& Foam::meshSearch::boundaryTree()
 
         treeBoundBox overallBb(mesh_.points());
         Random rndGen(123456);
-        overallBb.extend(rndGen, 1E-4);
+        overallBb = overallBb.extend(rndGen, 1E-4);
         overallBb.min() -= point(ROOTVSMALL, ROOTVSMALL, ROOTVSMALL);
         overallBb.max() += point(ROOTVSMALL, ROOTVSMALL, ROOTVSMALL);
 
@@ -497,7 +497,7 @@ const Foam::indexedOctree<Foam::treeDataCell>& Foam::meshSearch::cellTree()
 
         treeBoundBox overallBb(mesh_.points());
         Random rndGen(123456);
-        overallBb.extend(rndGen, 1E-4);
+        overallBb = overallBb.extend(rndGen, 1E-4);
         overallBb.min() -= point(ROOTVSMALL, ROOTVSMALL, ROOTVSMALL);
         overallBb.max() += point(ROOTVSMALL, ROOTVSMALL, ROOTVSMALL);
 
@@ -531,7 +531,7 @@ const Foam::indexedOctree<Foam::treeDataPoint>&
 
         treeBoundBox overallBb(mesh_.cellCentres());
         Random rndGen(123456);
-        overallBb.extend(rndGen, 1E-4);
+        overallBb = overallBb.extend(rndGen, 1E-4);
         overallBb.min() -= point(ROOTVSMALL, ROOTVSMALL, ROOTVSMALL);
         overallBb.max() += point(ROOTVSMALL, ROOTVSMALL, ROOTVSMALL);
 
diff --git a/src/meshTools/searchableSurface/distributedTriSurfaceMesh.C b/src/meshTools/searchableSurface/distributedTriSurfaceMesh.C
index 60ad087b70a3d68239124c8a3960f79b3a4b5eec..40ad36bbc3a4e5b185388fbd006c220853d1694b 100644
--- a/src/meshTools/searchableSurface/distributedTriSurfaceMesh.C
+++ b/src/meshTools/searchableSurface/distributedTriSurfaceMesh.C
@@ -912,41 +912,6 @@ Foam::distributedTriSurfaceMesh::independentlyDistributedBbs
 }
 
 
-void Foam::distributedTriSurfaceMesh::calcBounds
-(
-    boundBox& bb,
-    label& nPoints
-) const
-{
-    // Unfortunately nPoints constructs meshPoints() so do compact version
-    // ourselves
-
-    PackedBoolList pointIsUsed(points().size());
-
-    nPoints = 0;
-    bb.min() = point(VGREAT, VGREAT, VGREAT);
-    bb.max() = point(-VGREAT, -VGREAT, -VGREAT);
-
-    const triSurface& s = static_cast<const triSurface&>(*this);
-
-    forAll(s, triI)
-    {
-        const labelledTri& f = s[triI];
-
-        forAll(f, fp)
-        {
-            label pointI = f[fp];
-            if (pointIsUsed.set(pointI, 1u))
-            {
-                bb.min() = ::Foam::min(bb.min(), points()[pointI]);
-                bb.max() = ::Foam::max(bb.max(), points()[pointI]);
-                nPoints++;
-            }
-        }
-    }
-}
-
-
 // Does any part of triangle overlap bb.
 bool Foam::distributedTriSurfaceMesh::overlaps
 (
@@ -1935,20 +1900,7 @@ void Foam::distributedTriSurfaceMesh::getNormal
 {
     if (!Pstream::parRun())
     {
-        normal.setSize(info.size());
-
-        forAll(info, i)
-        {
-            if (info[i].hit())
-            {
-                normal[i] = faceNormals()[info[i].index()];
-            }
-            else
-            {
-                // Set to what?
-                normal[i] = vector::zero;
-            }
-        }
+        triSurfaceMesh::getNormal(info, normal);
         return;
     }
 
@@ -2006,70 +1958,64 @@ void Foam::distributedTriSurfaceMesh::getNormal
 
 void Foam::distributedTriSurfaceMesh::getField
 (
-    const word& fieldName,
     const List<pointIndexHit>& info,
     labelList& values
 ) const
 {
-    const triSurfaceLabelField& fld = lookupObject<triSurfaceLabelField>
-    (
-        fieldName
-    );
-
-
     if (!Pstream::parRun())
     {
-        values.setSize(info.size());
-        forAll(info, i)
-        {
-            if (info[i].hit())
-            {
-                values[i] = fld[info[i].index()];
-            }
-        }
+        triSurfaceMesh::getField(info, values);
         return;
     }
 
+    if (foundObject<triSurfaceLabelField>("values"))
+    {
+        const triSurfaceLabelField& fld = lookupObject<triSurfaceLabelField>
+        (
+            "values"
+        );
 
-    // Get query data (= local index of triangle)
-    // ~~~~~~~~~~~~~~
 
-    labelList triangleIndex(info.size());
-    autoPtr<mapDistribute> mapPtr
-    (
-        calcLocalQueries
+        // Get query data (= local index of triangle)
+        // ~~~~~~~~~~~~~~
+
+        labelList triangleIndex(info.size());
+        autoPtr<mapDistribute> mapPtr
         (
-            info,
-            triangleIndex
-        )
-    );
-    const mapDistribute& map = mapPtr();
+            calcLocalQueries
+            (
+                info,
+                triangleIndex
+            )
+        );
+        const mapDistribute& map = mapPtr();
 
 
-    // Do my tests
-    // ~~~~~~~~~~~
+        // Do my tests
+        // ~~~~~~~~~~~
 
-    values.setSize(triangleIndex.size());
+        values.setSize(triangleIndex.size());
 
-    forAll(triangleIndex, i)
-    {
-        label triI = triangleIndex[i];
-        values[i] = fld[triI];
-    }
+        forAll(triangleIndex, i)
+        {
+            label triI = triangleIndex[i];
+            values[i] = fld[triI];
+        }
 
 
-    // Send back results
-    // ~~~~~~~~~~~~~~~~~
+        // Send back results
+        // ~~~~~~~~~~~~~~~~~
 
-    map.distribute
-    (
-        Pstream::nonBlocking,
-        List<labelPair>(0),
-        info.size(),
-        map.constructMap(),     // what to send
-        map.subMap(),           // what to receive
-        values
-    );
+        map.distribute
+        (
+            Pstream::nonBlocking,
+            List<labelPair>(0),
+            info.size(),
+            map.constructMap(),     // what to send
+            map.subMap(),           // what to receive
+            values
+        );
+    }
 }
 
 
diff --git a/src/meshTools/searchableSurface/distributedTriSurfaceMesh.H b/src/meshTools/searchableSurface/distributedTriSurfaceMesh.H
index dba72d2abf4b1491be861f6e5dc825533d9b1427..577d12e4c3544fad2b73e8b4343d5c2f80682262 100644
--- a/src/meshTools/searchableSurface/distributedTriSurfaceMesh.H
+++ b/src/meshTools/searchableSurface/distributedTriSurfaceMesh.H
@@ -217,9 +217,6 @@ private:
                 const triSurface&
             );
 
-            //- Calculate surface bounding box
-            void calcBounds(boundBox& bb, label& nPoints) const;
-
             //- Does any part of triangle overlap bb.
             static bool overlaps
             (
@@ -418,7 +415,7 @@ public:
             //  Should really be split into a routine to determine decomposition
             //  and one that does actual distribution but determining
             //  decomposition with duplicate triangle merging requires
-            //  same amoun as work as actual distribution.
+            //  same amount as work as actual distribution.
             virtual void distribute
             (
                 const List<treeBoundBox>&,
@@ -430,14 +427,9 @@ public:
 
         // Other
 
-            //- Specific to triSurfaceMesh: from a set of hits (points and
+            //- WIP. From a set of hits (points and
             //  indices) get the specified field. Misses do not get set.
-            virtual void getField
-            (
-                const word& fieldName,
-                const List<pointIndexHit>&,
-                labelList& values
-            ) const;
+            virtual void getField(const List<pointIndexHit>&, labelList&) const;
 
             //- Subset the part of surface that is overlapping bounds.
             static triSurface overlappingSurface
diff --git a/src/meshTools/searchableSurface/searchableBox.C b/src/meshTools/searchableSurface/searchableBox.C
index a4cc593562a6460ac80f4998d7dc570c4651da71..cb854c3772552b9602a50531321c80a07268beac 100644
--- a/src/meshTools/searchableSurface/searchableBox.C
+++ b/src/meshTools/searchableSurface/searchableBox.C
@@ -229,6 +229,21 @@ const Foam::wordList& Foam::searchableBox::regions() const
 }
 
 
+Foam::pointField Foam::searchableBox::coordinates() const
+{
+    pointField ctrs(6);
+
+    const pointField pts = treeBoundBox::points();
+    const faceList& fcs = treeBoundBox::faces;
+
+    forAll(fcs, i)
+    {
+        ctrs[i] = fcs[i].centre(pts);
+    }
+    return ctrs;
+}
+
+
 Foam::pointIndexHit Foam::searchableBox::findNearest
 (
     const point& sample,
diff --git a/src/meshTools/searchableSurface/searchableBox.H b/src/meshTools/searchableSurface/searchableBox.H
index 9d1f2e8217ef3db841ebf9d1c8b4f5796c34b866..77e0ecd98d8c33b06a2d710eee10f47b1f8de70c 100644
--- a/src/meshTools/searchableSurface/searchableBox.H
+++ b/src/meshTools/searchableSurface/searchableBox.H
@@ -127,6 +127,9 @@ public:
             return 6;
         }
 
+        //- Get representative set of element coordinates
+        //  Usually the element centres (should be of length size()).
+        virtual pointField coordinates() const;
 
         // Single point queries.
 
diff --git a/src/meshTools/searchableSurface/searchableCylinder.C b/src/meshTools/searchableSurface/searchableCylinder.C
index fcaa42194aab42ce1f06f9e199f215548b92c627..0df222ccb0fc929bb882c8c9b1fafa1535097ec2 100644
--- a/src/meshTools/searchableSurface/searchableCylinder.C
+++ b/src/meshTools/searchableSurface/searchableCylinder.C
@@ -40,6 +40,14 @@ addToRunTimeSelectionTable(searchableSurface, searchableCylinder, dict);
 
 // * * * * * * * * * * * * * Private Member Functions  * * * * * * * * * * * //
 
+Foam::pointField Foam::searchableCylinder::coordinates() const
+{
+    pointField ctrs(1, 0.5*(point1_ + point2_));
+
+    return ctrs;
+}
+
+
 Foam::pointIndexHit Foam::searchableCylinder::findNearest
 (
     const point& sample,
diff --git a/src/meshTools/searchableSurface/searchableCylinder.H b/src/meshTools/searchableSurface/searchableCylinder.H
index cae0f058db2863db95ff479381d207c8ad9e57cf..98b4d91e4117eceb782b45a976928820a211c759 100644
--- a/src/meshTools/searchableSurface/searchableCylinder.H
+++ b/src/meshTools/searchableSurface/searchableCylinder.H
@@ -150,6 +150,10 @@ public:
             return 1;
         }
 
+        //- Get representative set of element coordinates
+        //  Usually the element centres (should be of length size()).
+        virtual pointField coordinates() const;
+
 
         // Multiple point queries.
 
diff --git a/src/meshTools/searchableSurface/searchablePlane.H b/src/meshTools/searchableSurface/searchablePlane.H
index 87777e3209bc6763b1e8cd155eb55ce4f3f7c4e1..c0b58cc8ac2d0c086deca4a4dda306756a295623 100644
--- a/src/meshTools/searchableSurface/searchablePlane.H
+++ b/src/meshTools/searchableSurface/searchablePlane.H
@@ -26,7 +26,7 @@ Class
     Foam::searchablePlane
 
 Description
-    Searching on plane. See plane.H
+    Searching on (infinite) plane. See plane.H
 
 SourceFiles
     searchablePlane.C
@@ -122,6 +122,14 @@ public:
             return 1;
         }
 
+        //- Get representative set of element coordinates
+        //  Usually the element centres (should be of length size()).
+        virtual pointField coordinates() const
+        {
+            //notImplemented("searchablePlane::coordinates()")
+            return pointField(1, refPoint());
+        }
+
 
         // Multiple point queries.
 
diff --git a/src/meshTools/searchableSurface/searchablePlate.H b/src/meshTools/searchableSurface/searchablePlate.H
index 7bc900f554bd3dc7a281873cf4b850077d5cf695..b05414bb6cb0e2dd764b732ff0e34f97a59539db 100644
--- a/src/meshTools/searchableSurface/searchablePlate.H
+++ b/src/meshTools/searchableSurface/searchablePlate.H
@@ -145,6 +145,13 @@ public:
             return 1;
         }
 
+        //- Get representative set of element coordinates
+        //  Usually the element centres (should be of length size()).
+        virtual pointField coordinates() const
+        {
+            return pointField(1, origin_);
+        }
+
 
         // Multiple point queries.
 
diff --git a/src/meshTools/searchableSurface/searchableSphere.H b/src/meshTools/searchableSurface/searchableSphere.H
index 297890b9db1b6e4568db5a65ca6d2055fd25777e..fb4aeb22f81c9d3c37afc69da4c549880af66362 100644
--- a/src/meshTools/searchableSurface/searchableSphere.H
+++ b/src/meshTools/searchableSurface/searchableSphere.H
@@ -133,6 +133,13 @@ public:
             return 1;
         }
 
+        //- Get representative set of element coordinates
+        //  Usually the element centres (should be of length size()).
+        virtual pointField coordinates() const
+        {
+            return pointField(1, centre_);
+        }
+
 
         // Multiple point queries.
 
diff --git a/src/meshTools/searchableSurface/searchableSurface.H b/src/meshTools/searchableSurface/searchableSurface.H
index 35b9dc9fb5a1ff9f3ba054492f7130ab2beee4a2..daf8b76daa7131344ef19d86263cda4883c1d6c9 100644
--- a/src/meshTools/searchableSurface/searchableSurface.H
+++ b/src/meshTools/searchableSurface/searchableSurface.H
@@ -190,6 +190,10 @@ public:
             return size();
         }
 
+        //- Get representative set of element coordinates
+        //  Usually the element centres (should be of length size()).
+        virtual pointField coordinates() const = 0;
+
 
         // Single point queries.
 
@@ -319,6 +323,18 @@ public:
             )
             {}
 
+            //- WIP. Store element-wise field.
+            virtual void setField(const labelList& values)
+            {}
+
+            //- WIP. From a set of hits (points and
+            //  indices) get the specified field. Misses do not get set. Return
+            //  empty field if not supported.
+            virtual void getField(const List<pointIndexHit>&, labelList& values)
+            const
+            {
+                values.clear();
+            }
 
 };
 
diff --git a/src/meshTools/searchableSurface/searchableSurfaceCollection.C b/src/meshTools/searchableSurface/searchableSurfaceCollection.C
index ba1dd32988e47a2aa53eb001e1014ad1686f83d2..9279087c1908b2298e61c15b71215646104ced6e 100644
--- a/src/meshTools/searchableSurface/searchableSurfaceCollection.C
+++ b/src/meshTools/searchableSurface/searchableSurfaceCollection.C
@@ -101,7 +101,11 @@ void Foam::searchableSurfaceCollection::findNearest
                     minDistSqr[pointI] = distSqr;
                     nearestInfo[pointI].setPoint(globalPt);
                     nearestInfo[pointI].setHit();
-                    nearestInfo[pointI].setIndex(hitInfo[pointI].index());
+                    nearestInfo[pointI].setIndex
+                    (
+                        hitInfo[pointI].index()
+                      + indexOffset_[surfI]
+                    );
                     nearestSurf[pointI] = surfI;
                 }
             }
@@ -110,6 +114,62 @@ void Foam::searchableSurfaceCollection::findNearest
 }
 
 
+// Sort hits into per-surface bins. Misses are rejected. Maintains map back
+// to position
+void Foam::searchableSurfaceCollection::sortHits
+(
+    const List<pointIndexHit>& info,
+    List<List<pointIndexHit> >& surfInfo,
+    labelListList& infoMap
+) const
+{
+    // Count hits per surface.
+    labelList nHits(subGeom_.size(), 0);
+
+    forAll(info, pointI)
+    {
+        if (info[pointI].hit())
+        {
+            label index = info[pointI].index();
+            label surfI = findLower(indexOffset_, index+1);
+            nHits[surfI]++;
+        }
+    }
+
+    // Per surface the hit
+    surfInfo.setSize(subGeom_.size());
+    // Per surface the original position
+    infoMap.setSize(subGeom_.size());
+
+    forAll(surfInfo, surfI)
+    {
+        surfInfo[surfI].setSize(nHits[surfI]);
+        infoMap[surfI].setSize(nHits[surfI]);
+    }
+    nHits = 0;
+
+    forAll(info, pointI)
+    {
+        if (info[pointI].hit())
+        {
+            label index = info[pointI].index();
+            label surfI = findLower(indexOffset_, index+1);
+
+            // Store for correct surface and adapt indices back to local
+            // ones
+            label localI = nHits[surfI]++;
+            surfInfo[surfI][localI] = pointIndexHit
+            (
+                info[pointI].hit(),
+                info[pointI].rawPoint(),
+                index-indexOffset_[surfI]
+            );
+            infoMap[surfI][localI] = pointI;
+        }
+    }
+}
+
+
 // * * * * * * * * * * * * * * * * Constructors  * * * * * * * * * * * * * * //
 
 Foam::searchableSurfaceCollection::searchableSurfaceCollection
@@ -123,11 +183,13 @@ Foam::searchableSurfaceCollection::searchableSurfaceCollection
     scale_(dict.size()),
     transform_(dict.size()),
     subGeom_(dict.size()),
-    mergeSubRegions_(dict.lookup("mergeSubRegions"))
+    mergeSubRegions_(dict.lookup("mergeSubRegions")),
+    indexOffset_(dict.size()+1)
 {
     Info<< "SearchableCollection : " << name() << endl;
 
     label surfI = 0;
+    label startIndex = 0;
     forAllConstIter(dictionary, dict, iter)
     {
         if (dict.isDict(iter().keyword()))
@@ -153,8 +215,24 @@ Foam::searchableSurfaceCollection::searchableSurfaceCollection
             const searchableSurface& s =
                 io.db().lookupObject<searchableSurface>(subGeomName);
 
+            // I don't know yet how to handle the globalSize combined with
+            // regionOffset. Would cause non-consecutive indices locally
+            // if all indices offset by globalSize() of the local region...
+            if (s.size() != s.globalSize())
+            {
+                FatalErrorIn
+                (
+                    "searchableSurfaceCollection::searchableSurfaceCollection"
+                    "(const IOobject&, const dictionary&)"
+                )   << "Cannot use a distributed surface in a collection."
+                    << exit(FatalError);
+            }
+
             subGeom_.set(surfI, &const_cast<searchableSurface&>(s));
 
+            indexOffset_[surfI] = startIndex;
+            startIndex += subGeom_[surfI].size();
+
             Info<< "    instance : " << instance_[surfI] << endl;
             Info<< "    surface  : " << s.name() << endl;
             Info<< "    scale    : " << scale_[surfI] << endl;
@@ -163,10 +241,13 @@ Foam::searchableSurfaceCollection::searchableSurfaceCollection
             surfI++;
         }
     }
+    indexOffset_[surfI] = startIndex;
+
     instance_.setSize(surfI);
     scale_.setSize(surfI);
     transform_.setSize(surfI);
     subGeom_.setSize(surfI);
+    indexOffset_.setSize(surfI+1);
 }
 
 
@@ -212,12 +293,36 @@ const Foam::wordList& Foam::searchableSurfaceCollection::regions() const
 
 Foam::label Foam::searchableSurfaceCollection::size() const
 {
-    label n = 0;
+    return indexOffset_[indexOffset_.size()-1];
+}
+
+
+Foam::pointField Foam::searchableSurfaceCollection::coordinates() const
+{
+    // Get overall size
+    pointField coords(size());
+
+    // Append individual coordinates
+    label coordI = 0;
+
     forAll(subGeom_, surfI)
     {
-        n += subGeom_[surfI].size();
+        const pointField subCoords = subGeom_[surfI].coordinates();
+
+        forAll(subCoords, i)
+        {
+            coords[coordI++] = transform_[surfI].globalPosition
+            (
+                cmptMultiply
+                (
+                    subCoords[i],
+                    scale_[surfI]
+                )
+            );
+        }
     }
-    return n;
+
+    return coords;
 }
 
 
@@ -296,6 +401,11 @@ void Foam::searchableSurfaceCollection::findLine
                 );
                 info[pointI] = hitInfo[pointI];
                 info[pointI].rawPoint() = nearest[pointI];
+                info[pointI].setIndex
+                (
+                    hitInfo[pointI].index()
+                  + indexOffset_[surfI]
+                );
             }
         }
     }
@@ -397,82 +507,42 @@ void Foam::searchableSurfaceCollection::getRegion
     }
     else
     {
-        region.setSize(info.size());
-        region = -1;
+        // Multiple surfaces. Sort by surface.
 
-        // Which region did point come from. Retest for now to see which
-        // surface it originates from - crap solution! Should use global indices
-        // in index inside pointIndexHit to do this better.
+        // Per surface the hit
+        List<List<pointIndexHit> > surfInfo;
+        // Per surface the original position
+        List<List<label> > infoMap;
+        sortHits(info, surfInfo, infoMap);
 
-        pointField samples(info.size());
-        forAll(info, pointI)
-        {
-            if (info[pointI].hit())
-            {
-                samples[pointI] = info[pointI].hitPoint();
-            }
-            else
-            {
-                samples[pointI] = vector::zero;
-            }
-        }
-        //scalarField minDistSqr(info.size(), SMALL);
-        scalarField minDistSqr(info.size(), GREAT);
+        region.setSize(info.size());
+        region = -1;
 
-        labelList nearestSurf;
-        List<pointIndexHit> nearestInfo;
-        findNearest
-        (
-            samples,
-            minDistSqr,
-            nearestInfo,
-            nearestSurf
-        );
+        // Do region tests
 
-        // Check
+        if (mergeSubRegions_)
         {
-            forAll(info, pointI)
+            // Actually no need for surfInfo. Just take region for surface.
+            forAll(infoMap, surfI)
             {
-                if (info[pointI].hit() && nearestSurf[pointI] == -1)
+                const labelList& map = infoMap[surfI];
+                forAll(map, i)
                 {
-                    FatalErrorIn
-                    (
-                        "searchableSurfaceCollection::getRegion(..)"
-                    )   << "pointI:" << pointI
-                        << " sample:" << samples[pointI]
-                        << " nearest:" << nearestInfo[pointI]
-                        << " nearestsurf:" << nearestSurf[pointI]
-                        << abort(FatalError);
+                    region[map[i]] = regionOffset_[surfI];
                 }
             }
         }
-
-        forAll(subGeom_, surfI)
+        else
         {
-            // Collect points from my surface
-            labelList indices(findIndices(nearestSurf, surfI));
-
-            if (mergeSubRegions_)
-            {
-                forAll(indices, i)
-                {
-                    region[indices[i]] = regionOffset_[surfI];
-                }
-            }
-            else
+            forAll(infoMap, surfI)
             {
                 labelList surfRegion;
-                subGeom_[surfI].getRegion
-                (
-                    List<pointIndexHit>
-                    (
-                        UIndirectList<pointIndexHit>(info, indices)
-                    ),
-                    surfRegion
-                );
-                forAll(indices, i)
+                subGeom_[surfI].getRegion(surfInfo[surfI], surfRegion);
+
+                const labelList& map = infoMap[surfI];
+                forAll(map, i)
                 {
-                    region[indices[i]] = regionOffset_[surfI] + surfRegion[i];
+                    region[map[i]] = regionOffset_[surfI] + surfRegion[i];
                 }
             }
         }
@@ -494,52 +564,26 @@ void Foam::searchableSurfaceCollection::getNormal
     }
     else
     {
-        normal.setSize(info.size());
+        // Multiple surfaces. Sort by surface.
 
-        // See above - crap retest to find surface point originates from.
-        pointField samples(info.size());
-        forAll(info, pointI)
-        {
-            if (info[pointI].hit())
-            {
-                samples[pointI] = info[pointI].hitPoint();
-            }
-            else
-            {
-                samples[pointI] = vector::zero;
-            }
-        }
-        //scalarField minDistSqr(info.size(), SMALL);
-        scalarField minDistSqr(info.size(), GREAT);
-
-        labelList nearestSurf;
-        List<pointIndexHit> nearestInfo;
-        findNearest
-        (
-            samples,
-            minDistSqr,
-            nearestInfo,
-            nearestSurf
-        );
+        // Per surface the hit
+        List<List<pointIndexHit> > surfInfo;
+        // Per surface the original position
+        List<List<label> > infoMap;
+        sortHits(info, surfInfo, infoMap);
 
+        normal.setSize(info.size());
 
-        forAll(subGeom_, surfI)
+        // Do region tests
+        forAll(surfInfo, surfI)
         {
-            // Collect points from my surface
-            labelList indices(findIndices(nearestSurf, surfI));
-
             vectorField surfNormal;
-            subGeom_[surfI].getNormal
-            (
-                List<pointIndexHit>
-                (
-                    UIndirectList<pointIndexHit>(info, indices)
-                ),
-                surfNormal
-            );
-            forAll(indices, i)
+            subGeom_[surfI].getNormal(surfInfo[surfI], surfNormal);
+
+            const labelList& map = infoMap[surfI];
+            forAll(map, i)
             {
-                normal[indices[i]] = surfNormal[i];
+                normal[map[i]] = surfNormal[i];
             }
         }
     }
@@ -561,4 +605,99 @@ void Foam::searchableSurfaceCollection::getVolumeType
 }
 
 
+void Foam::searchableSurfaceCollection::distribute
+(
+    const List<treeBoundBox>& bbs,
+    const bool keepNonLocal,
+    autoPtr<mapDistribute>& faceMap,
+    autoPtr<mapDistribute>& pointMap
+)
+{
+    forAll(subGeom_, surfI)
+    {
+        // Note:Tranform the bounding boxes? Something like
+        // pointField bbPoints =
+        // cmptDivide
+        // (
+        //     transform_[surfI].localPosition
+        //     (
+        //         bbs[i].points()
+        //     ),
+        //     scale_[surfI]
+        // );
+        // treeBoundBox newBb(bbPoints);
+
+        // Note: what to do with faceMap, pointMap from multiple surfaces?
+        subGeom_[surfI].distribute
+        (
+            bbs,
+            keepNonLocal,
+            faceMap,
+            pointMap
+        );
+    }
+}
+
+
+void Foam::searchableSurfaceCollection::setField(const labelList& values)
+{
+    forAll(subGeom_, surfI)
+    {
+        subGeom_[surfI].setField
+        (
+            static_cast<const labelList&>
+            (
+                SubList<label>
+                (
+                    values,
+                    subGeom_[surfI].size(),
+                    indexOffset_[surfI]
+                )
+            )
+        );
+    }
+}
+
+
+void Foam::searchableSurfaceCollection::getField
+(
+    const List<pointIndexHit>& info,
+    labelList& values
+) const
+{
+    if (subGeom_.size() == 0)
+    {}
+    else if (subGeom_.size() == 1)
+    {
+        subGeom_[0].getField(info, values);
+    }
+    else
+    {
+        // Multiple surfaces. Sort by surface.
+
+        // Per surface the hit
+        List<List<pointIndexHit> > surfInfo;
+        // Per surface the original position
+        List<List<label> > infoMap;
+        sortHits(info, surfInfo, infoMap);
+
+        values.setSize(info.size());
+        //?Misses do not get set? values = 0;
+
+        // Do surface tests
+        forAll(surfInfo, surfI)
+        {
+            labelList surfValues;
+            subGeom_[surfI].getField(surfInfo[surfI], surfValues);
+
+            const labelList& map = infoMap[surfI];
+            forAll(map, i)
+            {
+                values[map[i]] = surfValues[i];
+            }
+        }
+    }
+}
+
+
 // ************************************************************************* //
diff --git a/src/meshTools/searchableSurface/searchableSurfaceCollection.H b/src/meshTools/searchableSurface/searchableSurfaceCollection.H
index 3f7ee84baa4012d2482007fa75434a8d1dbc4ca2..e0de60827831f51f04c4cdb232fe5c18c9659d02 100644
--- a/src/meshTools/searchableSurface/searchableSurfaceCollection.H
+++ b/src/meshTools/searchableSurface/searchableSurfaceCollection.H
@@ -77,6 +77,10 @@ private:
 
             Switch mergeSubRegions_;
 
+            //- offsets for indices coming from different surfaces
+            //  (sized with size() of each surface)
+            labelList indexOffset_;
+
         //- Region names
         mutable wordList regions_;
         //- From individual regions to collection regions
@@ -95,6 +99,15 @@ private:
             labelList& nearestSurf
         ) const;
 
+        //- Sort hits into per-surface bins. Misses are rejected.
+        //  Maintains map back to position
+        void sortHits
+        (
+            const List<pointIndexHit>& info,
+            List<List<pointIndexHit> >& surfInfo,
+            labelListList& infoMap
+        ) const;
+
 
         //- Disallow default bitwise copy construct
         searchableSurfaceCollection(const searchableSurfaceCollection&);
@@ -161,6 +174,10 @@ public:
         //- Range of local indices that can be returned.
         virtual label size() const;
 
+        //- Get representative set of element coordinates
+        //  Usually the element centres (should be of length size()).
+        virtual pointField coordinates() const;
+
 
         // Multiple point queries.
 
@@ -215,6 +232,27 @@ public:
                 List<volumeType>&
             ) const;
 
+        // Other
+
+            //- Set bounds of surface. Bounds currently set as list of
+            //  bounding boxes. The bounds are hints to the surface as for
+            //  the range of queries it can expect. faceMap/pointMap can be
+            //  set if the surface has done any redistribution.
+            virtual void distribute
+            (
+                const List<treeBoundBox>&,
+                const bool keepNonLocal,
+                autoPtr<mapDistribute>& faceMap,
+                autoPtr<mapDistribute>& pointMap
+            );
+
+            //- WIP. Store element-wise field.
+            virtual void setField(const labelList& values);
+
+            //- WIP. From a set of hits (points and
+            //  indices) get the specified field. Misses do not get set. Return
+            //  empty field if not supported.
+            virtual void getField(const List<pointIndexHit>&, labelList&) const;
 
         // regIOobject implementation
 
diff --git a/src/meshTools/searchableSurface/searchableSurfaceWithGaps.H b/src/meshTools/searchableSurface/searchableSurfaceWithGaps.H
index 312c74b1d77b66ce6cb4bff37b6ccbac15068b2e..64db7ee91ceabb0343e6be0d68c6f40814fc96d2 100644
--- a/src/meshTools/searchableSurface/searchableSurfaceWithGaps.H
+++ b/src/meshTools/searchableSurface/searchableSurfaceWithGaps.H
@@ -151,6 +151,13 @@ public:
             return surface().size();
         }
 
+        //- Get representative set of element coordinates
+        //  Usually the element centres (should be of length size()).
+        virtual pointField coordinates() const
+        {
+            return surface().coordinates();
+        }
+
 
         // Multiple point queries.
 
@@ -225,6 +232,41 @@ public:
             }
 
 
+        // Other
+
+            //- Set bounds of surface. Bounds currently set as list of
+            //  bounding boxes. The bounds are hints to the surface as for
+            //  the range of queries it can expect. faceMap/pointMap can be
+            //  set if the surface has done any redistribution.
+            virtual void distribute
+            (
+                const List<treeBoundBox>& bbs,
+                const bool keepNonLocal,
+                autoPtr<mapDistribute>& faceMap,
+                autoPtr<mapDistribute>& pointMap
+            )
+            {
+                subGeom_[0].distribute(bbs, keepNonLocal, faceMap, pointMap);
+            }
+
+            //- WIP. Store element-wise field.
+            virtual void setField(const labelList& values)
+            {
+                subGeom_[0].setField(values);
+            }
+
+            //- WIP. From a set of hits (points and
+            //  indices) get the specified field. Misses do not get set. Return
+            //  empty field if not supported.
+            virtual void getField
+            (
+                const List<pointIndexHit>& info,
+                labelList& values
+            ) const
+            {
+                surface().getField(info, values);
+            }
+
         // regIOobject implementation
 
             bool writeData(Ostream& os) const
diff --git a/src/meshTools/searchableSurface/triSurfaceMesh.C b/src/meshTools/searchableSurface/triSurfaceMesh.C
index 11d687aa8b6f75d86f4b191d89d9f681f8382ea9..08d367e9746252451c5ddd9cb95e3c663235e3b9 100644
--- a/src/meshTools/searchableSurface/triSurfaceMesh.C
+++ b/src/meshTools/searchableSurface/triSurfaceMesh.C
@@ -30,6 +30,7 @@ License
 #include "EdgeMap.H"
 #include "triSurfaceFields.H"
 #include "Time.H"
+#include "PackedBoolList.H"
 
 // * * * * * * * * * * * * * * Static Data Members * * * * * * * * * * * * * //
 
@@ -281,6 +282,36 @@ void Foam::triSurfaceMesh::getNextIntersections
 }
 
 
+void Foam::triSurfaceMesh::calcBounds(boundBox& bb, label& nPoints) const
+{
+    // Unfortunately nPoints constructs meshPoints() so do compact version
+    // ourselves
+
+    const triSurface& s = static_cast<const triSurface&>(*this);
+
+    PackedBoolList pointIsUsed(points().size());
+
+    nPoints = 0;
+    bb = boundBox::invertedBox;
+
+    forAll(s, triI)
+    {
+        const labelledTri& f = s[triI];
+
+        forAll(f, fp)
+        {
+            label pointI = f[fp];
+            if (pointIsUsed.set(pointI, 1u))
+            {
+                bb.min() = ::Foam::min(bb.min(), points()[pointI]);
+                bb.max() = ::Foam::max(bb.max(), points()[pointI]);
+                nPoints++;
+            }
+        }
+    }
+}
+
+
 // * * * * * * * * * * * * * * * * Constructors  * * * * * * * * * * * * * * //
 
 Foam::triSurfaceMesh::triSurfaceMesh(const IOobject& io, const triSurface& s)
@@ -301,6 +332,7 @@ Foam::triSurfaceMesh::triSurfaceMesh(const IOobject& io, const triSurface& s)
     ),
     triSurface(s),
     tolerance_(indexedOctree<treeDataTriSurface>::perturbTol()),
+    maxTreeDepth_(10),
     surfaceClosed_(-1)
 {}
 
@@ -344,6 +376,7 @@ Foam::triSurfaceMesh::triSurfaceMesh(const IOobject& io)
         )
     ),
     tolerance_(indexedOctree<treeDataTriSurface>::perturbTol()),
+    maxTreeDepth_(10),
     surfaceClosed_(-1)
 {}
 
@@ -390,6 +423,7 @@ Foam::triSurfaceMesh::triSurfaceMesh
         )
     ),
     tolerance_(indexedOctree<treeDataTriSurface>::perturbTol()),
+    maxTreeDepth_(10),
     surfaceClosed_(-1)
 {
     scalar scaleFactor = 0;
@@ -410,6 +444,14 @@ Foam::triSurfaceMesh::triSurfaceMesh
         Info<< searchableSurface::name() << " : using intersection tolerance "
             << tolerance_ << endl;
     }
+
+
+    // Have optional non-standard tree-depth to limit storage.
+    if (dict.readIfPresent("maxTreeDepth", maxTreeDepth_) && maxTreeDepth_ > 0)
+    {
+        Info<< searchableSurface::name() << " : using maximum tree depth "
+            << maxTreeDepth_ << endl;
+    }
 }
 
 
@@ -431,6 +473,17 @@ void Foam::triSurfaceMesh::clearOut()
 
 // * * * * * * * * * * * * * * * Member Functions  * * * * * * * * * * * * * //
 
+Foam::pointField Foam::triSurfaceMesh::coordinates() const
+{
+    // Use copy to calculate face centres so they don't get stored
+    return PrimitivePatch<labelledTri, SubList, const pointField&>
+    (
+        SubList<labelledTri>(*this, triSurface::size()),
+        triSurface::points()
+    ).faceCentres();
+}
+
+
 void Foam::triSurfaceMesh::movePoints(const pointField& newPoints)
 {
     tree_.clear();
@@ -444,15 +497,28 @@ const Foam::indexedOctree<Foam::treeDataTriSurface>&
 {
     if (tree_.empty())
     {
+        // Calculate bb without constructing local point numbering.
+        treeBoundBox bb;
+        label nPoints;
+        calcBounds(bb, nPoints);
+
+        if (nPoints != points().size())
+        {
+            WarningIn("triSurfaceMesh::tree() const")
+                << "Surface " << searchableSurface::name()
+                << " does not have compact point numbering."
+                << " Of " << points().size() << " only " << nPoints
+                << " are used. This might give problems in some routines."
+                << endl;
+        }
+
+
         // Random number generator. Bit dodgy since not exactly random ;-)
         Random rndGen(65431);
 
         // Slightly extended bb. Slightly off-centred just so on symmetric
         // geometry there are less face/edge aligned items.
-        treeBoundBox bb
-        (
-            treeBoundBox(points(), meshPoints()).extend(rndGen, 1E-4)
-        );
+        bb = bb.extend(rndGen, 1E-4);
         bb.min() -= point(ROOTVSMALL, ROOTVSMALL, ROOTVSMALL);
         bb.max() += point(ROOTVSMALL, ROOTVSMALL, ROOTVSMALL);
 
@@ -465,9 +531,9 @@ const Foam::indexedOctree<Foam::treeDataTriSurface>&
             (
                 treeDataTriSurface(*this),
                 bb,
-                10,     // maxLevel
-                10,     // leafsize
-                3.0     // duplicity
+                maxTreeDepth_,  // maxLevel
+                10,             // leafsize
+                3.0             // duplicity
             )
         );
 
@@ -494,15 +560,17 @@ const Foam::indexedOctree<Foam::treeDataEdge>&
           + nInternalEdges()
         );
 
+        treeBoundBox bb;
+        label nPoints;
+        calcBounds(bb, nPoints);
+
         // Random number generator. Bit dodgy since not exactly random ;-)
         Random rndGen(65431);
 
         // Slightly extended bb. Slightly off-centred just so on symmetric
         // geometry there are less face/edge aligned items.
-        treeBoundBox bb
-        (
-            treeBoundBox(points(), meshPoints()).extend(rndGen, 1E-4)
-        );
+
+        bb = bb.extend(rndGen, 1E-4);
         bb.min() -= point(ROOTVSMALL, ROOTVSMALL, ROOTVSMALL);
         bb.max() += point(ROOTVSMALL, ROOTVSMALL, ROOTVSMALL);
 
@@ -517,10 +585,10 @@ const Foam::indexedOctree<Foam::treeDataEdge>&
                     localPoints(),  // points
                     bEdges          // selected edges
                 ),
-                bb,     // bb
-                8,      // maxLevel
-                10,     // leafsize
-                3.0     // duplicity
+                bb,                 // bb
+                maxTreeDepth_,      // maxLevel
+                10,                 // leafsize
+                3.0                 // duplicity
             )
         );
     }
@@ -754,24 +822,53 @@ void Foam::triSurfaceMesh::getNormal
 }
 
 
+void Foam::triSurfaceMesh::setField(const labelList& values)
+{
+    autoPtr<triSurfaceLabelField> fldPtr
+    (
+        new triSurfaceLabelField
+        (
+            IOobject
+            (
+                "values",
+                objectRegistry::time().timeName(),  // instance
+                "triSurface",                       // local
+                *this,
+                IOobject::NO_READ,
+                IOobject::AUTO_WRITE
+            ),
+            *this,
+            dimless,
+            labelField(values)
+        )
+    );
+
+    // Store field on triMesh
+    fldPtr.ptr()->store();
+}
+
+
 void Foam::triSurfaceMesh::getField
 (
-    const word& fieldName,
     const List<pointIndexHit>& info,
     labelList& values
 ) const
 {
-    const triSurfaceLabelField& fld = lookupObject<triSurfaceLabelField>
-    (
-        fieldName
-    );
-
-    values.setSize(info.size());
-    forAll(info, i)
+    if (foundObject<triSurfaceLabelField>("values"))
     {
-        if (info[i].hit())
+        values.setSize(info.size());
+
+        const triSurfaceLabelField& fld = lookupObject<triSurfaceLabelField>
+        (
+            "values"
+        );
+
+        forAll(info, i)
         {
-            values[i] = fld[info[i].index()];
+            if (info[i].hit())
+            {
+                values[i] = fld[info[i].index()];
+            }
         }
     }
 }
diff --git a/src/meshTools/searchableSurface/triSurfaceMesh.H b/src/meshTools/searchableSurface/triSurfaceMesh.H
index e93b83ecdbb9a04046639c79b666edef979e0c72..dea7c642b896ab21f8670c2e6218b3a491e79c63 100644
--- a/src/meshTools/searchableSurface/triSurfaceMesh.H
+++ b/src/meshTools/searchableSurface/triSurfaceMesh.H
@@ -71,6 +71,9 @@ private:
         //- Optional tolerance to use in searches
         scalar tolerance_;
 
+        //- Optional max tree depth of octree
+        label maxTreeDepth_;
+
         //- Search tree (triangles)
         mutable autoPtr<indexedOctree<treeDataTriSurface> > tree_;
 
@@ -129,6 +132,11 @@ private:
         void operator=(const triSurfaceMesh&);
 
 
+protected:
+
+        //- Calculate (number of)used points and their bounding box
+        void calcBounds(boundBox& bb, label& nPoints) const;
+
 public:
 
     //- Runtime type information
@@ -185,6 +193,10 @@ public:
                 return triSurface::size();
             }
 
+            //- Get representative set of element coordinates
+            //  Usually the element centres (should be of length size()).
+            virtual pointField coordinates() const;
+
             virtual void findNearest
             (
                 const pointField& sample,
@@ -236,6 +248,8 @@ public:
                 List<volumeType>&
             ) const;
 
+        // Other
+
             //- Set bounds of surface. Bounds currently set as list of
             //  bounding boxes. The bounds are hints to the surface as for
             //  the range of queries it can expect. faceMap/pointMap can be
@@ -249,16 +263,12 @@ public:
             )
             {}
 
-        // Other
+            //- WIP. Store element-wise field.
+            virtual void setField(const labelList& values);
 
-            //- Specific to triSurfaceMesh: from a set of hits (points and
+            //- WIP. From a set of hits (points and
             //  indices) get the specified field. Misses do not get set.
-            virtual void getField
-            (
-                const word& fieldName,
-                const List<pointIndexHit>&,
-                labelList& values
-            ) const;
+            virtual void getField(const List<pointIndexHit>&, labelList&) const;
 
 
         // regIOobject implementation
diff --git a/src/parallel/decompositionMethods/scotchDecomp/scotchDecomp.C b/src/parallel/decompositionMethods/scotchDecomp/scotchDecomp.C
index 5375fb529bf870cc74b1bc0ad8311c6d51d0a3a2..53931689cbe13c5e3ae0049d6459892b94add984 100644
--- a/src/parallel/decompositionMethods/scotchDecomp/scotchDecomp.C
+++ b/src/parallel/decompositionMethods/scotchDecomp/scotchDecomp.C
@@ -22,40 +22,87 @@ License
     along with OpenFOAM; if not, write to the Free Software Foundation,
     Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
 
-    Default strategy:
-        Strat=b
-        {
-            job=t,
-            map=t,
-            poli=S,
-            sep=
-            (
-                m
+    From scotch forum:
+ 	
+    By: Francois PELLEGRINI RE: Graph mapping 'strategy' string [ reply ]  
+    2008-08-22 10:09 Strategy handling in Scotch is a bit tricky. In order
+    not to be confused, you must have a clear view of how they are built.
+    Here are some rules:
+
+    1- Strategies are made up of "methods" which are combined by means of
+    "operators".
+
+    2- A method is of the form "m{param=value,param=value,...}", where "m"
+    is a single character (this is your first error: "f" is a method name,
+    not a parameter name).
+
+    3- There exist different sort of strategies : bipartitioning strategies,
+    mapping strategies, ordering strategies, which cannot be mixed. For
+    instance, you cannot build a bipartitioning strategy and feed it to a
+    mapping method (this is your second error).
+
+    To use the "mapCompute" routine, you must create a mapping strategy, not
+    a bipartitioning one, and so use stratGraphMap() and not
+    stratGraphBipart(). Your mapping strategy should however be based on the
+    "recursive bipartitioning" method ("b"). For instance, a simple (and
+    hence not very efficient) mapping strategy can be :
+
+    "b{sep=f}"
+
+    which computes mappings with the recursive bipartitioning method "b",
+    this latter using the Fiduccia-Mattheyses method "f" to compute its
+    separators.
+
+    If you want an exact partition (see your previous post), try
+    "b{sep=fx}".
+
+    However, these strategies are not the most efficient, as they do not
+    make use of the multi-level framework.
+
+    To use the multi-level framework, try for instance:
+
+    "b{sep=m{vert=100,low=h,asc=f}x}"
+
+    The current default mapping strategy in Scotch can be seen by using the
+    "-vs" option of program gmap. It is, to date:
+
+    b
+    {
+        job=t,
+        map=t,
+        poli=S,
+        sep=
+        (
+            m
+            {
+                asc=b
                 {
-                    asc=b
-                    {
-                        bnd=d{pass=40,dif=1,rem=1}f{move=80,pass=-1,bal=0.005},
-                        org=f{move=80,pass=-1,bal=0.005},width=3
-                    },
-                    low=h{pass=10}f{move=80,pass=-1,bal=0.0005},
-                    type=h,
-                    vert=80,
-                    rat=0.8
-                }
-              | m
+                    bnd=d{pass=40,dif=1,rem=1}f{move=80,pass=-1,bal=0.005},
+                    org=f{move=80,pass=-1,bal=0.005},
+                    width=3
+                },
+                low=h{pass=10}f{move=80,pass=-1,bal=0.0005},
+                type=h,
+                vert=80,
+                rat=0.8
+            }
+          | m
+            {
+                asc=b
                 {
-                    asc=b
-                    {
-                        bnd=d{pass=40,dif=1,rem=1}f{move=80,pass=-1,bal=0.005},
-                        org=f{move=80,pass=-1,bal=0.005},
-                        width=3},
-                        low=h{pass=10}f{move=80,pass=-1,bal=0.0005},
-                        type=h,
-                        vert=80,
-                        rat=0.8
-                }
-            )
-        }
+                    bnd=d{pass=40,dif=1,rem=1}f{move=80,pass=-1,bal=0.005},
+                    org=f{move=80,pass=-1,bal=0.005},
+                    width=3
+                },
+                low=h{pass=10}f{move=80,pass=-1,bal=0.0005},
+                type=h,
+                vert=80,
+                rat=0.8
+            }
+        )
+    }
+
+
 \*---------------------------------------------------------------------------*/
 
 #include "scotchDecomp.H"
@@ -126,6 +173,51 @@ Foam::label Foam::scotchDecomp::decompose
     List<int>& finalDecomp
 )
 {
+    // Dump graph
+    if (decompositionDict_.found("scotchCoeffs"))
+    {
+        const dictionary& scotchCoeffs =
+            decompositionDict_.subDict("scotchCoeffs");
+
+        if (scotchCoeffs.found("writeGraph"))
+        {
+            Switch writeGraph(scotchCoeffs.lookup("writeGraph"));
+
+            if (writeGraph)
+            {
+                OFstream str(mesh_.time().path() / mesh_.name() + ".grf");
+
+                Info<< "Dumping Scotch graph file to " << str.name() << endl
+                    << "Use this in combination with gpart." << endl;
+
+                label version = 0;
+                str << version << nl;
+                // Numer of vertices
+                str << xadj.size()-1 << ' ' << adjncy.size() << nl;
+                // Numbering starts from 0
+                label baseval = 0;
+                // Has weights?
+                label hasEdgeWeights = 0;
+                label hasVertexWeights = 0;
+                label numericflag = 10*hasEdgeWeights+hasVertexWeights;
+                str << baseval << ' ' << numericflag << nl;
+                for (label cellI = 0; cellI < xadj.size()-1; cellI++)
+                {
+                    label start = xadj[cellI];
+                    label end = xadj[cellI+1];
+                    str << end-start;
+
+                    for (label i = start; i < end; i++)
+                    {
+                        str << ' ' << adjncy[i];
+                    }
+                    str << nl;
+                }
+            }
+        }
+    }
+
+
     // Strategy
     // ~~~~~~~~
 
@@ -206,51 +298,6 @@ Foam::label Foam::scotchDecomp::decompose
     check(SCOTCH_graphCheck(&grafdat), "SCOTCH_graphCheck");
 
 
-    // Dump graph
-    if (decompositionDict_.found("scotchCoeffs"))
-    {
-        const dictionary& scotchCoeffs =
-            decompositionDict_.subDict("scotchCoeffs");
-
-        if (scotchCoeffs.found("writeGraph"))
-        {
-            Switch writeGraph(scotchCoeffs.lookup("writeGraph"));
-
-            if (writeGraph)
-            {
-                OFstream str(mesh_.time().path() / mesh_.name() + ".grf");
-
-                Info<< "Dumping Scotch graph file to " << str.name() << endl
-                    << "Use this in combination with gpart." << endl;
-
-                label version = 0;
-                str << version << nl;
-                // Numer of vertices
-                str << xadj.size()-1 << ' ' << adjncy.size() << nl;
-                // Numbering starts from 0
-                label baseval = 0;
-                // Has weights?
-                label hasEdgeWeights = 0;
-                label hasVertexWeights = 0;
-                label numericflag = 10*hasEdgeWeights+hasVertexWeights;
-                str << baseval << ' ' << numericflag << nl;
-                for (label cellI = 0; cellI < xadj.size()-1; cellI++)
-                {
-                    label start = xadj[cellI];
-                    label end = xadj[cellI+1];
-                    str << end-start;
-
-                    for (label i = start; i < end; i++)
-                    {
-                        str << ' ' << adjncy[i];
-                    }
-                    str << nl;
-                }
-            }
-        }
-    }
-
-
     // Architecture
     // ~~~~~~~~~~~~
     // (fully connected network topology since using switch)
diff --git a/src/postProcessing/functionObjects/field/fieldAverage/controlDict b/src/postProcessing/functionObjects/field/fieldAverage/controlDict
index 0132aa91ddc1efe904f7858375e4d2338c4cd8f6..3e186c89e28703360e1939b53039023ea506c2cf 100644
--- a/src/postProcessing/functionObjects/field/fieldAverage/controlDict
+++ b/src/postProcessing/functionObjects/field/fieldAverage/controlDict
@@ -14,7 +14,7 @@ FoamFile
 }
 // * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
 
-application     oodles;
+application     XXX;
 
 startFrom       latestTime;
 
@@ -54,6 +54,12 @@ functions
         // Where to load it from (if not already in solver)
         functionObjectLibs ("libfieldAverage.so");
 
+        // Function object enabled flag
+        enabled         true;                             
+
+        // When to output the average fields
+        outputControl   outputTime;                       
+
         // Fields to be averaged - runTime modifiable
         fields
         (
diff --git a/src/postProcessing/functionObjects/field/fieldValues/cellSource/cellSource.C b/src/postProcessing/functionObjects/field/fieldValues/cellSource/cellSource.C
index b94b687ba13ca8bad30ccfcd924eb404ee5b67ba..239aa4342d4c11c1dbe118387398b93eeba5c2a4 100644
--- a/src/postProcessing/functionObjects/field/fieldValues/cellSource/cellSource.C
+++ b/src/postProcessing/functionObjects/field/fieldValues/cellSource/cellSource.C
@@ -46,10 +46,13 @@ namespace Foam
         fieldValues::cellSource::sourceTypeNames_;
 
     template<>
-    const char* NamedEnum<fieldValues::cellSource::operationType, 4>::
-        names[] = {"none", "sum", "volAverage", "volIntegrate"};
+    const char* NamedEnum<fieldValues::cellSource::operationType, 5>::
+        names[] =
+        {
+            "none", "sum", "volAverage", "volIntegrate", "weightedAverage"
+        };
 
-    const NamedEnum<fieldValues::cellSource::operationType, 4>
+    const NamedEnum<fieldValues::cellSource::operationType, 5>
         fieldValues::cellSource::operationTypeNames_;
 
 }
@@ -93,7 +96,7 @@ void Foam::fieldValues::cellSource::setCellZoneCells()
 
 // * * * * * * * * * * * * Protected Member Functions  * * * * * * * * * * * //
 
-void Foam::fieldValues::cellSource::initialise()
+void Foam::fieldValues::cellSource::initialise(const dictionary& dict)
 {
     switch (source_)
     {
@@ -104,7 +107,7 @@ void Foam::fieldValues::cellSource::initialise()
         }
         default:
         {
-            FatalErrorIn("cellSource::constructCellAddressing()")
+            FatalErrorIn("cellSource::initialise()")
                 << "Unknown source type. Valid source types are:"
                 << sourceTypeNames_ << nl << exit(FatalError);
         }
@@ -114,6 +117,29 @@ void Foam::fieldValues::cellSource::initialise()
         << "    total cells  = " << cellId_.size() << nl
         << "    total volume = " << sum(filterField(mesh().V()))
         << nl << endl;
+
+    if (operation_ == opWeightedAverage)
+    {
+        dict.lookup("weightField") >> weightFieldName_;
+        if
+        (
+            obr().foundObject<volScalarField>(weightFieldName_)
+        )
+        {
+            Info<< "    weight field = " << weightFieldName_;
+        }
+        else
+        {
+            FatalErrorIn("cellSource::initialise()")
+                << type() << " " << name_ << ": "
+                << sourceTypeNames_[source_] << "(" << sourceName_ << "):"
+                << nl << "    Weight field " << weightFieldName_
+                << " must be a " << volScalarField::typeName
+                << nl << exit(FatalError);
+        }
+    }
+
+    Info<< nl << endl;
 }
 
 
@@ -172,7 +198,7 @@ void Foam::fieldValues::cellSource::read(const dictionary& dict)
     if (active_)
     {
         // no additional info to read
-        initialise();
+        initialise(dict);
     }
 }
 
@@ -183,9 +209,12 @@ void Foam::fieldValues::cellSource::write()
 
     if (active_)
     {
-        outputFilePtr_()
-            << obr_.time().value() << tab
-            << sum(filterField(mesh().V()));
+        if (Pstream::master())
+        {
+            outputFilePtr_()
+                << obr_.time().value() << tab
+                << sum(filterField(mesh().V()));
+        }
 
         forAll(fields_, i)
         {
@@ -196,7 +225,10 @@ void Foam::fieldValues::cellSource::write()
             writeValues<tensor>(fields_[i]);
         }
 
-        outputFilePtr_()<< endl;
+        if (Pstream::master())
+        {
+            outputFilePtr_()<< endl;
+        }
 
         if (log_)
         {
diff --git a/src/postProcessing/functionObjects/field/fieldValues/cellSource/cellSource.H b/src/postProcessing/functionObjects/field/fieldValues/cellSource/cellSource.H
index 89cc9ad0103807883d0d8ab396f19c2c81b4fc79..a8d04f7d9a616ae8c7b6842468d43e7dbdd1ba40 100644
--- a/src/postProcessing/functionObjects/field/fieldValues/cellSource/cellSource.H
+++ b/src/postProcessing/functionObjects/field/fieldValues/cellSource/cellSource.H
@@ -39,7 +39,7 @@ Description
         valueOutput     true;       // Write values at run-time output times?
         source          cellZone;   // Type of cell source
         sourceName      c0;
-        operation       volAverage; // none, sum, volAverage, volIntegrate
+        operation       volAverage;
         fields
         (
             p
@@ -47,6 +47,13 @@ Description
         );
     }
 
+    where operation is one of:
+      - none
+      - sum
+      - volAverage
+      - volIntegrate
+      - weightedAverage
+
 SourceFiles
     cellSource.C
 
@@ -96,11 +103,12 @@ public:
             opNone,
             opSum,
             opVolAverage,
-            opVolIntegrate
+            opVolIntegrate,
+            opWeightedAverage
         };
 
         //- Operation type names
-        static const NamedEnum<operationType, 4> operationTypeNames_;
+        static const NamedEnum<operationType, 5> operationTypeNames_;
 
 
 private:
@@ -127,23 +135,34 @@ protected:
         //- Local list of cell IDs
         labelList cellId_;
 
+        //- Weight field name - only used for opWeightedAverage mode
+        word weightFieldName_;
+
 
     // Protected member functions
 
         //- Initialise, e.g. cell addressing
-        void initialise();
+        void initialise(const dictionary& dict);
+
+        //- Return true if the field name is valid
+        template<class Type>
+        bool validField(const word& fieldName) const;
 
         //- Insert field values into values list
         template<class Type>
-        bool setFieldValues
+        tmp<Field<Type> > setFieldValues
         (
-            const word& fieldName,
-            List<Type>& values
+            const word& fieldName
         ) const;
 
         //- Apply the 'operation' to the values
         template<class Type>
-        Type processValues(const List<Type>& values) const;
+        Type processValues
+        (
+            const Field<Type>& values,
+            const scalarField& V,
+            const scalarField& weightField
+        ) const;
 
         //- Output file header information
         virtual void writeFileHeader();
diff --git a/src/postProcessing/functionObjects/field/fieldValues/cellSource/cellSourceTemplates.C b/src/postProcessing/functionObjects/field/fieldValues/cellSource/cellSourceTemplates.C
index fe495fcca6bb78ba0f0d6ef7214e10775a1bd424..41c5e70d26cc0e6b787c9b55c0d4558cc2b4f246 100644
--- a/src/postProcessing/functionObjects/field/fieldValues/cellSource/cellSourceTemplates.C
+++ b/src/postProcessing/functionObjects/field/fieldValues/cellSource/cellSourceTemplates.C
@@ -27,27 +27,16 @@ License
 #include "cellSource.H"
 #include "volFields.H"
 #include "IOList.H"
-#include "ListListOps.H"
 
 // * * * * * * * * * * * * Protected Member Functions  * * * * * * * * * * * //
 
 template<class Type>
-bool Foam::fieldValues::cellSource::setFieldValues
-(
-    const word& fieldName,
-    List<Type>& values
-) const
+bool Foam::fieldValues::cellSource::validField(const word& fieldName) const
 {
-    values.setSize(cellId_.size(), pTraits<Type>::zero);
-
     typedef GeometricField<Type, fvPatchField, volMesh> vf;
 
     if (obr_.foundObject<vf>(fieldName))
     {
-        const vf& field = obr_.lookupObject<vf>(fieldName);
-
-        values = UIndirectList<Type>(field, cellId_);
-
         return true;
     }
 
@@ -55,10 +44,29 @@ bool Foam::fieldValues::cellSource::setFieldValues
 }
 
 
+template<class Type>
+Foam::tmp<Foam::Field<Type> > Foam::fieldValues::cellSource::setFieldValues
+(
+    const word& fieldName
+) const
+{
+    typedef GeometricField<Type, fvPatchField, volMesh> vf;
+
+    if (obr_.foundObject<vf>(fieldName))
+    {
+        return filterField(obr_.lookupObject<vf>(fieldName));
+    }
+
+    return tmp<Field<Type> >(new Field<Type>(0.0));
+}
+
+
 template<class Type>
 Type Foam::fieldValues::cellSource::processValues
 (
-    const List<Type>& values
+    const Field<Type>& values,
+    const scalarField& V,
+    const scalarField& weightField
 ) const
 {
     Type result = pTraits<Type>::zero;
@@ -71,13 +79,17 @@ Type Foam::fieldValues::cellSource::processValues
         }
         case opVolAverage:
         {
-            tmp<scalarField> V = filterField(mesh().V());
-            result = sum(values*V())/sum(V());
+            result = sum(values*V)/sum(V);
             break;
         }
         case opVolIntegrate:
         {
-            result = sum(values*filterField(mesh().V()));
+            result = sum(values*V);
+            break;
+        }
+        case opWeightedAverage:
+        {
+            result = sum(values*weightField)/sum(weightField);
             break;
         }
         default:
@@ -95,25 +107,20 @@ Type Foam::fieldValues::cellSource::processValues
 template<class Type>
 bool Foam::fieldValues::cellSource::writeValues(const word& fieldName)
 {
-    List<List<Type> > allValues(Pstream::nProcs());
-
-    bool validField =
-        setFieldValues<Type>(fieldName, allValues[Pstream::myProcNo()]);
+    const bool ok = validField<Type>(fieldName);
 
-    if (validField)
+    if (ok)
     {
-        Pstream::gatherList(allValues);
+        Field<Type> values = combineFields(setFieldValues<Type>(fieldName));
+
+        scalarField V = combineFields(filterField(mesh().V()));
+
+        scalarField weightField =
+            combineFields(setFieldValues<scalar>(weightFieldName_));
 
         if (Pstream::master())
         {
-            List<Type> values =
-                ListListOps::combine<List<Type> >
-                (
-                    allValues,
-                    accessOp<List<Type> >()
-                );
-
-            Type result = processValues(values);
+            Type result = processValues(values, V, weightField);
 
             if (valueOutput_)
             {
@@ -144,7 +151,7 @@ bool Foam::fieldValues::cellSource::writeValues(const word& fieldName)
         }
     }
 
-    return validField;
+    return ok;
 }
 
 
diff --git a/src/postProcessing/functionObjects/field/fieldValues/faceSource/faceSource.C b/src/postProcessing/functionObjects/field/fieldValues/faceSource/faceSource.C
index b609f4310ab9c711d44b19dbf48bb67419146e0e..e84fe07402fabd8e73589b497893eb87d7ba67b2 100644
--- a/src/postProcessing/functionObjects/field/fieldValues/faceSource/faceSource.C
+++ b/src/postProcessing/functionObjects/field/fieldValues/faceSource/faceSource.C
@@ -220,7 +220,7 @@ void Foam::fieldValues::faceSource::initialise(const dictionary& dict)
         }
         default:
         {
-            FatalErrorIn("faceSource::constructFaceAddressing()")
+            FatalErrorIn("faceSource::initiliase()")
                 << type() << " " << name_ << ": "
                 << sourceTypeNames_[source_] << "(" << sourceName_ << "):"
                 << nl << "    Unknown source type. Valid source types are:"
@@ -245,7 +245,7 @@ void Foam::fieldValues::faceSource::initialise(const dictionary& dict)
         }
         else
         {
-            FatalErrorIn("faceSource::constructFaceAddressing()")
+            FatalErrorIn("faceSource::initialise()")
                 << type() << " " << name_ << ": "
                 << sourceTypeNames_[source_] << "(" << sourceName_ << "):"
                 << nl << "    Weight field " << weightFieldName_
@@ -326,9 +326,12 @@ void Foam::fieldValues::faceSource::write()
 
     if (active_)
     {
-        outputFilePtr_()
-            << obr_.time().value() << tab
-            << sum(filterField(mesh().magSf()));
+        if (Pstream::master())
+        {
+            outputFilePtr_()
+                << obr_.time().value() << tab
+                << sum(filterField(mesh().magSf()));
+        }
 
         forAll(fields_, i)
         {
@@ -339,7 +342,10 @@ void Foam::fieldValues::faceSource::write()
             writeValues<tensor>(fields_[i]);
         }
 
-        outputFilePtr_()<< endl;
+        if (Pstream::master())
+        {
+            outputFilePtr_()<< endl;
+        }
 
         if (log_)
         {
@@ -350,4 +356,3 @@ void Foam::fieldValues::faceSource::write()
 
 
 // ************************************************************************* //
-
diff --git a/src/postProcessing/functionObjects/field/fieldValues/faceSource/faceSource.H b/src/postProcessing/functionObjects/field/fieldValues/faceSource/faceSource.H
index e8e2634b12855625003e8d27454aa16ccd0a836d..8ef65d870b717b31423ec919d89cfa61f515ee04 100644
--- a/src/postProcessing/functionObjects/field/fieldValues/faceSource/faceSource.H
+++ b/src/postProcessing/functionObjects/field/fieldValues/faceSource/faceSource.H
@@ -153,17 +153,22 @@ protected:
         //- Initialise, e.g. face addressing
         void initialise(const dictionary& dict);
 
-        //- Insert field values into values list
+        //- Return true if the field name is valid
         template<class Type>
-        bool setFieldValues
-        (
-            const word& fieldName,
-            List<Type>& values
-        ) const;
+        bool validField(const word& fieldName) const;
+
+        //- Return field values by looking up field name
+        template<class Type>
+        tmp<Field<Type> > setFieldValues(const word& fieldName) const;
 
         //- Apply the 'operation' to the values
         template<class Type>
-        Type processValues(const List<Type>& values) const;
+        Type processValues
+        (
+            const Field<Type>& values,
+            const scalarField& magSf,
+            const scalarField& weightField
+        ) const;
 
         //- Output file header information
         virtual void writeFileHeader();
diff --git a/src/postProcessing/functionObjects/field/fieldValues/faceSource/faceSourceTemplates.C b/src/postProcessing/functionObjects/field/fieldValues/faceSource/faceSourceTemplates.C
index d7609573a72cb18d4b7cbeb6c0cee65e67459654..a257384a5060f30ab7aa98e5a7f81aa81fb161f2 100644
--- a/src/postProcessing/functionObjects/field/fieldValues/faceSource/faceSourceTemplates.C
+++ b/src/postProcessing/functionObjects/field/fieldValues/faceSource/faceSourceTemplates.C
@@ -33,81 +33,52 @@ License
 // * * * * * * * * * * * * Protected Member Functions  * * * * * * * * * * * //
 
 template<class Type>
-bool Foam::fieldValues::faceSource::setFieldValues
-(
-    const word& fieldName,
-    List<Type>& values
-) const
+bool Foam::fieldValues::faceSource::validField(const word& fieldName) const
 {
-    values.setSize(faceId_.size(), pTraits<Type>::zero);
-
     typedef GeometricField<Type, fvsPatchField, surfaceMesh> sf;
     typedef GeometricField<Type, fvPatchField, volMesh> vf;
 
     if (obr_.foundObject<sf>(fieldName))
     {
-        const sf& field = obr_.lookupObject<sf>(fieldName);
-
-        forAll(values, i)
-        {
-            label faceI = faceId_[i];
-            label patchI = facePatchId_[i];
-            if (patchI >= 0)
-            {
-                values[i] = field.boundaryField()[patchI][faceI];
-            }
-            else
-            {
-                values[i] = field[faceI];
-            }
-
-            values[i] *= flipMap_[i];
-        }
-
         return true;
     }
     else if (obr_.foundObject<vf>(fieldName))
     {
-        const vf& field = obr_.lookupObject<vf>(fieldName);
+        return true;
+    }
 
-        forAll(values, i)
-        {
-            label faceI = faceId_[i];
-            label patchI = facePatchId_[i];
-            if (patchI >= 0)
-            {
-                values[i] = field.boundaryField()[patchI][faceI];
-            }
-            else
-            {
-                FatalErrorIn
-                (
-                    "fieldValues::faceSource::setFieldValues"
-                    "("
-                        "const word&, "
-                        "List<Type>&"
-                    ") const"
-                )   << type() << " " << name_ << ": "
-                    << sourceTypeNames_[source_] << "(" << sourceName_ << "):"
-                    << nl
-                    << "    Unable to process internal faces for volume field "
-                    << fieldName << nl << abort(FatalError);
-            }
+    return false;
+}
 
-            values[i] *= flipMap_[i];
-        }
 
-        return true;
+template<class Type>
+Foam::tmp<Foam::Field<Type> > Foam::fieldValues::faceSource::setFieldValues
+(
+    const word& fieldName
+) const
+{
+    typedef GeometricField<Type, fvsPatchField, surfaceMesh> sf;
+    typedef GeometricField<Type, fvPatchField, volMesh> vf;
+
+    if (obr_.foundObject<sf>(fieldName))
+    {
+        return filterField(obr_.lookupObject<sf>(fieldName));
+    }
+    else if (obr_.foundObject<vf>(fieldName))
+    {
+        return filterField(obr_.lookupObject<vf>(fieldName));
     }
 
-    return false;
+    return tmp<Field<Type> >(new Field<Type>(0.0));
 }
 
 
 template<class Type>
 Type Foam::fieldValues::faceSource::processValues
 (
-    const List<Type>& values
+    const Field<Type>& values,
+    const scalarField& magSf,
+    const scalarField& weightField
 ) const
 {
     Type result = pTraits<Type>::zero;
@@ -120,54 +91,17 @@ Type Foam::fieldValues::faceSource::processValues
         }
         case opAreaAverage:
         {
-            tmp<scalarField> magSf = filterField(mesh().magSf());
-            result = sum(values*magSf())/sum(magSf());
+            result = sum(values*magSf)/sum(magSf);
             break;
         }
         case opAreaIntegrate:
         {
-            result = sum(values*filterField(mesh().magSf()));
+            result = sum(values*magSf);
             break;
         }
         case opWeightedAverage:
         {
-            if (mesh().foundObject<volScalarField>(weightFieldName_))
-            {
-                tmp<scalarField> wField =
-                    filterField
-                    (
-                        mesh().lookupObject<volScalarField>(weightFieldName_)
-                    );
-               result = sum(values*wField())/sum(wField());
-            }
-            else if (mesh().foundObject<surfaceScalarField>(weightFieldName_))
-            {
-                tmp<scalarField> wField =
-                    filterField
-                    (
-                        mesh().lookupObject<surfaceScalarField>
-                        (
-                            weightFieldName_
-                        )
-                    );
-               result = sum(values*wField())/sum(wField());
-            }
-            else
-            {
-                FatalErrorIn
-                (
-                    "fieldValues::faceSource::processValues"
-                    "("
-                        "List<Type>&"
-                    ") const"
-                )   << type() << " " << name_ << ": "
-                    << sourceTypeNames_[source_] << "(" << sourceName_ << "):"
-                    << nl
-                    << "    Weight field " << weightFieldName_
-                    << " must be either a " << volScalarField::typeName
-                    << " or " << surfaceScalarField::typeName << nl
-                    << abort(FatalError);
-            }
+            result = sum(values*weightField)/sum(weightField);
             break;
         }
         default:
@@ -185,25 +119,20 @@ Type Foam::fieldValues::faceSource::processValues
 template<class Type>
 bool Foam::fieldValues::faceSource::writeValues(const word& fieldName)
 {
-    List<List<Type> > allValues(Pstream::nProcs());
+    const bool ok = validField<Type>(fieldName);
 
-    bool validField =
-        setFieldValues<Type>(fieldName, allValues[Pstream::myProcNo()]);
-
-    if (validField)
+    if (ok)
     {
-        Pstream::gatherList(allValues);
+        Field<Type> values = combineFields(setFieldValues<Type>(fieldName));
+
+        scalarField magSf = combineFields(filterField(mesh().magSf()));
+
+        scalarField weightField =
+            combineFields(setFieldValues<scalar>(weightFieldName_));
 
         if (Pstream::master())
         {
-            List<Type> values =
-                ListListOps::combine<List<Type> >
-                (
-                    allValues,
-                    accessOp<List<Type> >()
-                );
-
-            Type result = processValues(values);
+            Type result = processValues(values, magSf, weightField);
 
             if (valueOutput_)
             {
@@ -222,7 +151,6 @@ bool Foam::fieldValues::faceSource::writeValues(const word& fieldName)
                 ).write();
             }
 
-
             outputFilePtr_()<< tab << result;
 
             if (log_)
@@ -234,7 +162,7 @@ bool Foam::fieldValues::faceSource::writeValues(const word& fieldName)
         }
     }
 
-    return validField;
+    return ok;
 }
 
 
diff --git a/src/postProcessing/functionObjects/field/fieldValues/fieldValue/fieldValue.H b/src/postProcessing/functionObjects/field/fieldValues/fieldValue/fieldValue.H
index 1b96bb8e41c6171c50ab7b6ee60f093e6e98b9f7..4aa6f41bd7adff7342e75c50de49d3894558ad05 100644
--- a/src/postProcessing/functionObjects/field/fieldValues/fieldValue/fieldValue.H
+++ b/src/postProcessing/functionObjects/field/fieldValues/fieldValue/fieldValue.H
@@ -164,6 +164,13 @@ public:
 
             //- Execute the at the final time-loop, currently does nothing
             virtual void end();
+
+            //- Comnbine fields from all processor domains into single field
+            template<class Type>
+            tmp<Field<Type> > combineFields
+            (
+                const tmp<Field<Type> >& field
+            ) const;
 };
 
 
@@ -177,6 +184,12 @@ public:
 
 // * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
 
+#ifdef NoRepository
+    #include "fieldValueTemplates.C"
+#endif
+
+// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
+
 #endif
 
 // ************************************************************************* //
diff --git a/src/postProcessing/functionObjects/field/fieldValues/fieldValue/fieldValueTemplates.C b/src/postProcessing/functionObjects/field/fieldValues/fieldValue/fieldValueTemplates.C
new file mode 100644
index 0000000000000000000000000000000000000000..23c700134f9c84a22e1391121b60adc7fc007032
--- /dev/null
+++ b/src/postProcessing/functionObjects/field/fieldValues/fieldValue/fieldValueTemplates.C
@@ -0,0 +1,66 @@
+/*---------------------------------------------------------------------------*\
+  =========                 |
+  \\      /  F ield         | OpenFOAM: The Open Source CFD Toolbox
+   \\    /   O peration     |
+    \\  /    A nd           | Copyright (C) 2009-2009 OpenCFD Ltd.
+     \\/     M anipulation  |
+-------------------------------------------------------------------------------
+License
+    This file is part of OpenFOAM.
+
+    OpenFOAM is free software; you can redistribute it and/or modify it
+    under the terms of the GNU General Public License as published by the
+    Free Software Foundation; either version 2 of the License, or (at your
+    option) any later version.
+
+    OpenFOAM is distributed in the hope that it will be useful, but WITHOUT
+    ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+    FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+    for more details.
+
+    You should have received a copy of the GNU General Public License
+    along with OpenFOAM; if not, write to the Free Software Foundation,
+    Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
+
+\*---------------------------------------------------------------------------*/
+
+#include "fieldValue.H"
+#include "ListListOps.H"
+#include "Pstream.H"
+
+// * * * * * * * * * * * * * * * Member Functions  * * * * * * * * * * * * * //
+
+template<class Type>
+Foam::tmp<Foam::Field<Type> > Foam::fieldValue::combineFields
+(
+    const tmp<Field<Type> >& field
+) const
+{
+    List<Field<Type> > allValues(Pstream::nProcs());
+
+    allValues[Pstream::myProcNo()] = field();
+
+    Pstream::gatherList(allValues);
+
+    if (Pstream::master())
+    {
+        return tmp<Field<Type> >
+        (
+            new Field<Type>
+            (
+                ListListOps::combine<Field<Type> >
+                (
+                    allValues,
+                    accessOp<Field<Type> >()
+                )
+            )
+        );
+    }
+    else
+    {
+        return field();
+    }
+}
+
+
+// ************************************************************************* //
diff --git a/src/triSurface/triSurface/stitchTriangles.C b/src/triSurface/triSurface/stitchTriangles.C
index 7a441ac6795fe268c4518873aada3872217cb1fb..70fe304808db80dda8daa104053634487b27bb73 100644
--- a/src/triSurface/triSurface/stitchTriangles.C
+++ b/src/triSurface/triSurface/stitchTriangles.C
@@ -26,6 +26,7 @@ License
 
 #include "triSurface.H"
 #include "mergePoints.H"
+#include "PackedBoolList.H"
 
 // * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
 
@@ -46,46 +47,44 @@ bool triSurface::stitchTriangles
     pointField newPoints;
     bool hasMerged = mergePoints(rawPoints, tol, verbose, pointMap, newPoints);
 
+    pointField& ps = storedPoints();
 
+    // Set the coordinates to the merged ones
+    ps.transfer(newPoints);
 
     if (hasMerged)
     {
         if (verbose)
         {
             Pout<< "stitchTriangles : Merged from " << rawPoints.size()
-                << " points down to " << newPoints.size() << endl;
+                << " points down to " << ps.size() << endl;
         }
 
-        pointField& ps = storedPoints();
-
-        // Set the coordinates to the merged ones
-        ps = newPoints;
-
         // Reset the triangle point labels to the unique points array
         label newTriangleI = 0;
         forAll(*this, i)
         {
-            label newA = pointMap[operator[](i)[0]];
-            label newB = pointMap[operator[](i)[1]];
-            label newC = pointMap[operator[](i)[2]];
-
-            if ((newA != newB) && (newA != newC) && (newB != newC))
+            const labelledTri& tri = operator[](i);
+            labelledTri newTri
+            (
+                pointMap[tri[0]],
+                pointMap[tri[1]],
+                pointMap[tri[2]],
+                tri.region()
+            );
+
+            if ((newTri[0] != newTri[1]) && (newTri[0] != newTri[2]) && (newTri[1] != newTri[2]))
             {
-                operator[](newTriangleI)[0] = newA;
-                operator[](newTriangleI)[1] = newB;
-                operator[](newTriangleI)[2] = newC;
-                operator[](newTriangleI).region() = operator[](i).region();
-                newTriangleI++;
+                operator[](newTriangleI++) = newTri;
             }
             else if (verbose)
             {
                 Pout<< "stitchTriangles : "
-                    << "Removing triangle " << i << " with non-unique vertices."
-                    << endl
-                    << "    vertices   :" << newA << ' ' << newB << ' ' << newC
-                    << endl
-                    << "    coordinates:" << ps[newA] << ' ' << ps[newB]
-                    << ' ' << ps[newC]  << endl;
+                    << "Removing triangle " << i
+                    << " with non-unique vertices." << endl
+                    << "    vertices   :" << newTri << endl
+                    << "    coordinates:" << newTri.points(ps)
+                    << endl;
             }
         }
 
@@ -98,6 +97,58 @@ bool triSurface::stitchTriangles
                     << " triangles" << endl;
             }
             setSize(newTriangleI);
+
+            // And possibly compact out any unused points (since used only
+            // by triangles that have just been deleted)
+            // Done in two passes to save memory (pointField)
+
+            // 1. Detect only
+            PackedBoolList pointIsUsed(ps.size());
+
+            label nPoints = 0;
+
+            forAll(*this, i)
+            {
+                const labelledTri& tri = operator[](i);
+
+                forAll(tri, fp)
+                {
+                    label pointI = tri[fp];
+                    if (pointIsUsed.set(pointI, 1))
+                    {
+                        nPoints++;
+                    }
+                }
+            }
+
+            if (nPoints != ps.size())
+            {
+                // 2. Compact.
+                pointMap.setSize(ps.size());
+                label newPointI = 0;
+                forAll(pointIsUsed, pointI)
+                {
+                    if (pointIsUsed[pointI])
+                    {
+                        ps[newPointI] = ps[pointI];
+                        pointMap[pointI] = newPointI++;
+                    }
+                }
+                ps.setSize(newPointI);
+
+                newTriangleI = 0;
+                forAll(*this, i)
+                {
+                    const labelledTri& tri = operator[](i);
+                    operator[](newTriangleI++) = labelledTri
+                    (
+                        pointMap[tri[0]],
+                        pointMap[tri[1]],
+                        pointMap[tri[2]],
+                        tri.region()
+                    );
+                }
+            }
         }
     }
 
diff --git a/src/turbulenceModels/compressible/LES/GenEddyVisc/GenEddyVisc.H b/src/turbulenceModels/compressible/LES/GenEddyVisc/GenEddyVisc.H
index 440ec71c5c9b40bb88176ee3f11aeba2af77a208..7f2d3f1f852a68269ef82f6378039a798457b395 100644
--- a/src/turbulenceModels/compressible/LES/GenEddyVisc/GenEddyVisc.H
+++ b/src/turbulenceModels/compressible/LES/GenEddyVisc/GenEddyVisc.H
@@ -126,15 +126,6 @@ public:
             return alphaSgs_;
         }
 
-        //- Return thermal diffusivity
-        virtual tmp<volScalarField> alphaEff() const
-        {
-            return tmp<volScalarField>
-            (
-                new volScalarField("alphaEff", alphaSgs_ + alpha())
-            );
-        }
-
         //- Return the sub-grid stress tensor.
         virtual tmp<volSymmTensorField> B() const;
 
diff --git a/src/turbulenceModels/compressible/LES/GenSGSStress/GenSGSStress.H b/src/turbulenceModels/compressible/LES/GenSGSStress/GenSGSStress.H
index ff71c80b798a20792b90ff3e568197c80e059ce9..e65495225ccf62faa198a1bd17d0dad842e30d03 100644
--- a/src/turbulenceModels/compressible/LES/GenSGSStress/GenSGSStress.H
+++ b/src/turbulenceModels/compressible/LES/GenSGSStress/GenSGSStress.H
@@ -127,15 +127,6 @@ public:
             return alphaSgs_;
         }
 
-        //- Return thermal conductivity
-        virtual tmp<volScalarField> alphaEff() const
-        {
-            return tmp<volScalarField>
-            (
-                new volScalarField("alphaEff", alphaSgs_ + alpha())
-            );
-        }
-
         //- Return the sub-grid stress tensor
         virtual tmp<volSymmTensorField> B() const
         {
diff --git a/src/turbulenceModels/compressible/LES/LESModel/LESModel.H b/src/turbulenceModels/compressible/LES/LESModel/LESModel.H
index cf019ea9a8aca9b93a709e94507a74c08a9ef6c1..2cdde6c61eda86c94183ae323944ef04866bdb89 100644
--- a/src/turbulenceModels/compressible/LES/LESModel/LESModel.H
+++ b/src/turbulenceModels/compressible/LES/LESModel/LESModel.H
@@ -189,12 +189,6 @@ public:
             }
 
 
-        //- Return the SGS turbulent kinetic energy.
-        virtual tmp<volScalarField> k() const = 0;
-
-        //- Return the SGS turbulent dissipation.
-        virtual tmp<volScalarField> epsilon() const = 0;
-
         //- Return the SGS turbulent viscosity
         virtual tmp<volScalarField> muSgs() const = 0;
 
@@ -210,8 +204,22 @@ public:
         //- Return the SGS turbulent thermal diffusivity
         virtual tmp<volScalarField> alphaSgs() const = 0;
 
-        //- Return the SGS thermal conductivity.
-        virtual tmp<volScalarField> alphaEff() const = 0;
+        //- Return the effective thermal diffusivity
+        virtual tmp<volScalarField> alphaEff() const
+        {
+            return tmp<volScalarField>
+            (
+                new volScalarField("alphaEff", alphaSgs() + alpha())
+            );
+        }
+
+        //- Return the effective turbulence thermal diffusivity for a patch
+        virtual tmp<scalarField> alphaEff(const label patchI) const
+        {
+            return
+                alphaSgs()().boundaryField()[patchI]
+              + alpha().boundaryField()[patchI];
+        }
 
         //- Return the sub-grid stress tensor.
         virtual tmp<volSymmTensorField> B() const = 0;
@@ -261,13 +269,13 @@ public:
         //- Correct Eddy-Viscosity and related properties.
         //  This calls correct(const tmp<volTensorField>& gradU) by supplying
         //  gradU calculated locally.
-        void correct();
+        virtual void correct();
 
         //- Correct Eddy-Viscosity and related properties
         virtual void correct(const tmp<volTensorField>& gradU);
 
         //- Read LESProperties dictionary
-        virtual bool read() = 0;
+        virtual bool read();
 };
 
 
diff --git a/src/turbulenceModels/compressible/LES/SpalartAllmaras/SpalartAllmaras.H b/src/turbulenceModels/compressible/LES/SpalartAllmaras/SpalartAllmaras.H
index e6239ec91bc623240c124f128a8697110a4b6a8c..29d53267a2e41150562bc20a6e0af6817ad900d7 100644
--- a/src/turbulenceModels/compressible/LES/SpalartAllmaras/SpalartAllmaras.H
+++ b/src/turbulenceModels/compressible/LES/SpalartAllmaras/SpalartAllmaras.H
@@ -149,15 +149,6 @@ public:
             return alphaSgs_;
         }
 
-        //- Return thermal conductivity
-        virtual tmp<volScalarField> alphaEff() const
-        {
-            return tmp<volScalarField>
-            (
-                new volScalarField("alphaEff", alphaSgs_ + alpha())
-            );
-        }
-
         //- Return the sub-grid stress tensor.
         virtual tmp<volSymmTensorField> B() const;
 
diff --git a/src/turbulenceModels/compressible/RAS/LRR/LRR.H b/src/turbulenceModels/compressible/RAS/LRR/LRR.H
index 445faebd39e2534e5c6dec9a2123382120f548bc..9ce0eef7bbecc5b164eb64c0f25ace8c95d29324 100644
--- a/src/turbulenceModels/compressible/RAS/LRR/LRR.H
+++ b/src/turbulenceModels/compressible/RAS/LRR/LRR.H
@@ -153,13 +153,10 @@ public:
             return mut_;
         }
 
-        //- Return the effective turbulent thermal diffusivity
-        virtual tmp<volScalarField> alphaEff() const
+        //- Return the turbulence thermal diffusivity
+        virtual tmp<volScalarField> alphat() const
         {
-            return tmp<volScalarField>
-            (
-                new volScalarField("alphaEff", alphat_ + alpha())
-            );
+            return alphat_;
         }
 
         //- Return the turbulence kinetic energy
diff --git a/src/turbulenceModels/compressible/RAS/LaunderGibsonRSTM/LaunderGibsonRSTM.H b/src/turbulenceModels/compressible/RAS/LaunderGibsonRSTM/LaunderGibsonRSTM.H
index 00db2c304f9d9338575f2de850098349a894ce02..e8c5a331b26a41735e8b8afc19e447d9e6d1a773 100644
--- a/src/turbulenceModels/compressible/RAS/LaunderGibsonRSTM/LaunderGibsonRSTM.H
+++ b/src/turbulenceModels/compressible/RAS/LaunderGibsonRSTM/LaunderGibsonRSTM.H
@@ -162,13 +162,10 @@ public:
             return mut_;
         }
 
-        //- Return the effective turbulent thermal diffusivity
-        virtual tmp<volScalarField> alphaEff() const
+        //- Return the turbulence thermal diffusivity
+        virtual tmp<volScalarField> alphat() const
         {
-            return tmp<volScalarField>
-            (
-                new volScalarField("alphaEff", alphat_ + alpha())
-            );
+            return alphat_;
         }
 
         //- Return the turbulence kinetic energy
diff --git a/src/turbulenceModels/compressible/RAS/LaunderSharmaKE/LaunderSharmaKE.H b/src/turbulenceModels/compressible/RAS/LaunderSharmaKE/LaunderSharmaKE.H
index b7c972a662c05b473d8b8eb2f80b4e7125593e4f..f9c878607d04a2796e899720bd2946422828f277 100644
--- a/src/turbulenceModels/compressible/RAS/LaunderSharmaKE/LaunderSharmaKE.H
+++ b/src/turbulenceModels/compressible/RAS/LaunderSharmaKE/LaunderSharmaKE.H
@@ -146,13 +146,10 @@ public:
             return mut_;
         }
 
-        //- Return the effective turbulent thermal diffusivity
-        virtual tmp<volScalarField> alphaEff() const
+        //- Return the turbulence thermal diffusivity
+        virtual tmp<volScalarField> alphat() const
         {
-            return tmp<volScalarField>
-            (
-                new volScalarField("alphaEff", alphat_ + alpha())
-            );
+            return alphat_;
         }
 
         //- Return the turbulence kinetic energy
diff --git a/src/turbulenceModels/compressible/RAS/RASModel/RASModel.H b/src/turbulenceModels/compressible/RAS/RASModel/RASModel.H
index cc189f24c80275ba3e7159dde0416339ee1e7ee1..883fa0825a32e106dadd870c853f38ec3319a061 100644
--- a/src/turbulenceModels/compressible/RAS/RASModel/RASModel.H
+++ b/src/turbulenceModels/compressible/RAS/RASModel/RASModel.H
@@ -265,9 +265,6 @@ public:
             }
 
 
-        //- Return the turbulence viscosity
-        virtual tmp<volScalarField> mut() const = 0;
-
         //- Return the effective viscosity
         virtual tmp<volScalarField> muEff() const
         {
@@ -278,22 +275,21 @@ public:
         }
 
         //- Return the effective turbulent thermal diffusivity
-        virtual tmp<volScalarField> alphaEff() const = 0;
-
-        //- Return the turbulence kinetic energy
-        virtual tmp<volScalarField> k() const = 0;
-
-        //- Return the turbulence kinetic energy dissipation rate
-        virtual tmp<volScalarField> epsilon() const = 0;
-
-        //- Return the Reynolds stress tensor
-        virtual tmp<volSymmTensorField> R() const = 0;
-
-        //- Return the effective stress tensor including the laminar stress
-        virtual tmp<volSymmTensorField> devRhoReff() const = 0;
+        virtual tmp<volScalarField> alphaEff() const
+        {
+            return tmp<volScalarField>
+            (
+                new volScalarField("alphaEff", alphat() + alpha())
+            );
+        }
 
-        //- Return the source term for the momentum equation
-        virtual tmp<fvVectorMatrix> divDevRhoReff(volVectorField& U) const = 0;
+        //- Return the effective turbulent thermal diffusivity for a patch
+        virtual tmp<scalarField> alphaEff(const label patchI) const
+        {
+            return
+                alphat()().boundaryField()[patchI]
+              + alpha().boundaryField()[patchI];
+        }
 
         //- Return yPlus for the given patch
         virtual tmp<scalarField> yPlus
@@ -303,10 +299,10 @@ public:
         ) const;
 
         //- Solve the turbulence equations and correct the turbulence viscosity
-        virtual void correct() = 0;
+        virtual void correct();
 
         //- Read RASProperties dictionary
-        virtual bool read() = 0;
+        virtual bool read();
 };
 
 
diff --git a/src/turbulenceModels/compressible/RAS/RNGkEpsilon/RNGkEpsilon.H b/src/turbulenceModels/compressible/RAS/RNGkEpsilon/RNGkEpsilon.H
index 1e4301adcd222a32d1dabb714db0e7ece32c8bc6..9d9601eee78caed08225e4dfc9e01b884da6c3e6 100644
--- a/src/turbulenceModels/compressible/RAS/RNGkEpsilon/RNGkEpsilon.H
+++ b/src/turbulenceModels/compressible/RAS/RNGkEpsilon/RNGkEpsilon.H
@@ -142,13 +142,10 @@ public:
             return mut_;
         }
 
-        //- Return the effective turbulent thermal diffusivity
-        virtual tmp<volScalarField> alphaEff() const
+        //- Return the turbulence thermal diffusivity
+        virtual tmp<volScalarField> alphat() const
         {
-            return tmp<volScalarField>
-            (
-                new volScalarField("alphaEff", alphat_ + alpha())
-            );
+            return alphat_;
         }
 
         //- Return the turbulence kinetic energy
diff --git a/src/turbulenceModels/compressible/RAS/SpalartAllmaras/SpalartAllmaras.H b/src/turbulenceModels/compressible/RAS/SpalartAllmaras/SpalartAllmaras.H
index 94e736b9f8a13812727cdf9b974dcf205b6c9697..fdb0c675ee09002db9ada976c9c4b51938612aa4 100644
--- a/src/turbulenceModels/compressible/RAS/SpalartAllmaras/SpalartAllmaras.H
+++ b/src/turbulenceModels/compressible/RAS/SpalartAllmaras/SpalartAllmaras.H
@@ -178,13 +178,10 @@ public:
             return mut_;
         }
 
-        //- Return the effective turbulent thermal diffusivity
-        virtual tmp<volScalarField> alphaEff() const
+        //- Return the turbulence thermal diffusivity
+        virtual tmp<volScalarField> alphat() const
         {
-            return tmp<volScalarField>
-            (
-                new volScalarField("alphaEff", alphat_ + alpha())
-            );
+            return alphat_;
         }
 
         //- Return the turbulence kinetic energy
diff --git a/src/turbulenceModels/compressible/RAS/derivedFvPatchFields/turbulentHeatFluxTemperature/turbulentHeatFluxTemperatureFvPatchScalarField.C b/src/turbulenceModels/compressible/RAS/derivedFvPatchFields/turbulentHeatFluxTemperature/turbulentHeatFluxTemperatureFvPatchScalarField.C
index 2f54baf7c25ff80c45be84bbb326731628916be4..e8e6234c32b0ae6d035ae9aea3db301d89cb6726 100644
--- a/src/turbulenceModels/compressible/RAS/derivedFvPatchFields/turbulentHeatFluxTemperature/turbulentHeatFluxTemperatureFvPatchScalarField.C
+++ b/src/turbulenceModels/compressible/RAS/derivedFvPatchFields/turbulentHeatFluxTemperature/turbulentHeatFluxTemperatureFvPatchScalarField.C
@@ -37,6 +37,22 @@ namespace Foam
 namespace compressible
 {
 
+// * * * * * * * * * * * * * * Static Data Members * * * * * * * * * * * * * //
+
+template<>
+const char*
+NamedEnum<turbulentHeatFluxTemperatureFvPatchScalarField::heatSourceType, 2>::
+names[] =
+    {
+        "power",
+        "flux"
+    };
+
+const
+NamedEnum<turbulentHeatFluxTemperatureFvPatchScalarField::heatSourceType, 2>
+    turbulentHeatFluxTemperatureFvPatchScalarField::heatSourceTypeNames_;
+
+
 // * * * * * * * * * * * * * * * * Constructors  * * * * * * * * * * * * * * //
 
 turbulentHeatFluxTemperatureFvPatchScalarField::
@@ -47,8 +63,8 @@ turbulentHeatFluxTemperatureFvPatchScalarField
 )
 :
     fixedGradientFvPatchScalarField(p, iF),
-    q_(p.size(), 0.0),
-    rhoName_("rho")
+    heatSource_(hsPower),
+    q_(p.size(), 0.0)
 {}
 
 
@@ -62,8 +78,8 @@ turbulentHeatFluxTemperatureFvPatchScalarField
 )
 :
     fixedGradientFvPatchScalarField(ptf, p, iF, mapper),
-    q_(ptf.q_, mapper),
-    rhoName_(ptf.rhoName_)
+    heatSource_(ptf.heatSource_),
+    q_(ptf.q_, mapper)
 {}
 
 
@@ -76,8 +92,8 @@ turbulentHeatFluxTemperatureFvPatchScalarField
 )
 :
     fixedGradientFvPatchScalarField(p, iF),
-    q_("q", dict, p.size()),
-    rhoName_(dict.lookupOrDefault<word>("rho", "rho"))
+    heatSource_(heatSourceTypeNames_.read(dict.lookup("heatSource"))),
+    q_("q", dict, p.size())
 {
     fvPatchField<scalar>::operator=(patchInternalField());
     gradient() = 0.0;
@@ -91,8 +107,8 @@ turbulentHeatFluxTemperatureFvPatchScalarField
 )
 :
     fixedGradientFvPatchScalarField(thftpsf),
-    q_(thftpsf.q_),
-    rhoName_(thftpsf.rhoName_)
+    heatSource_(thftpsf.heatSource_),
+    q_(thftpsf.q_)
 {}
 
 
@@ -104,8 +120,8 @@ turbulentHeatFluxTemperatureFvPatchScalarField
 )
 :
     fixedGradientFvPatchScalarField(thftpsf, iF),
-    q_(thftpsf.q_),
-    rhoName_(thftpsf.rhoName_)
+    heatSource_(thftpsf.heatSource_),
+    q_(thftpsf.q_)
 {}
 
 
@@ -150,22 +166,39 @@ void turbulentHeatFluxTemperatureFvPatchScalarField::updateCoeffs()
 
     const RASModel& rasModel = db().lookupObject<RASModel>("RASProperties");
 
-    const scalarField alphaEffp = rasModel.alphaEff()().boundaryField()[patchI];
+    const scalarField alphaEffp = rasModel.alphaEff(patchI);
 
-    const basicThermo& thermo =
-        db().lookupObject<basicThermo>("thermophysicalProperties");
-
-//    const scalarField& Tp = thermo.T().boundaryField()[patchI];
     const scalarField& Tp = *this;
 
-    const scalarField Cpp = thermo.Cp(Tp, patchI);
-
-    const scalarField& rhop =
-        patch().lookupPatchField<volScalarField, scalar>(rhoName_);
-
-    const scalar Ap = gSum(patch().magSf());
+    const scalarField Cpp = rasModel.thermo().Cp(Tp, patchI);
 
-    gradient() = q_/(Ap*rhop*Cpp*alphaEffp);
+    switch (heatSource_)
+    {
+        case hsPower:
+        {
+            const scalar Ap = gSum(patch().magSf());
+            gradient() = q_/(Ap*Cpp*alphaEffp);
+            break;
+        }
+        case hsFlux:
+        {
+            gradient() = q_/(Cpp*alphaEffp);
+            break;
+        }
+        default:
+        {
+            FatalErrorIn
+            (
+                "turbulentHeatFluxTemperatureFvPatchScalarField"
+                "("
+                    "const fvPatch&, "
+                    "const DimensionedField<scalar, volMesh>&, "
+                    "const dictionary&"
+                ")"
+            )   << "Unknown heat source type. Valid types are: "
+                << heatSourceTypeNames_ << nl << exit(FatalError);
+        }
+    }
 
     fixedGradientFvPatchScalarField::updateCoeffs();
 }
@@ -177,8 +210,9 @@ void turbulentHeatFluxTemperatureFvPatchScalarField::write
 ) const
 {
     fvPatchScalarField::write(os);
+    os.writeKeyword("heatSource") << heatSourceTypeNames_[heatSource_]
+        << token::END_STATEMENT << nl;
     q_.writeEntry("q", os);
-    os.writeKeyword("rho") << rhoName_ << token::END_STATEMENT << nl;
     gradient().writeEntry("gradient", os);
     writeEntry("value", os);
 }
diff --git a/src/turbulenceModels/compressible/RAS/derivedFvPatchFields/turbulentHeatFluxTemperature/turbulentHeatFluxTemperatureFvPatchScalarField.H b/src/turbulenceModels/compressible/RAS/derivedFvPatchFields/turbulentHeatFluxTemperature/turbulentHeatFluxTemperatureFvPatchScalarField.H
index f7581102df9f8d328715b7b295548f2e8d051326..7d9e5b7a40a772b6f2f36e932ed34988e62e66d2 100644
--- a/src/turbulenceModels/compressible/RAS/derivedFvPatchFields/turbulentHeatFluxTemperature/turbulentHeatFluxTemperatureFvPatchScalarField.H
+++ b/src/turbulenceModels/compressible/RAS/derivedFvPatchFields/turbulentHeatFluxTemperature/turbulentHeatFluxTemperatureFvPatchScalarField.H
@@ -26,7 +26,20 @@ Class
     Foam::turbulentHeatFluxTemperatureFvPatchScalarField
 
 Description
-    Fixed heat flux boundary condition for temperature.
+    Fixed heat boundary condition to specify temperature gradient. Input
+    heat source either specified in terms of an absolute power [W], or as a
+    flux [W/m2].
+
+    Example usage:
+
+        hotWall
+        {
+            type            compressible::turbulentHeatFluxTemperature;
+            heatSource      flux;        // power [W]; flux [W/m2]
+            q               uniform 10;  // heat power or flux
+            value           uniform 300; // initial temperature value
+        }
+
 
 SourceFiles
     turbulentHeatFluxTemperatureFvPatchScalarField.C
@@ -38,6 +51,7 @@ SourceFiles
 
 #include "fvPatchFields.H"
 #include "fixedGradientFvPatchFields.H"
+#include "NamedEnum.H"
 
 // * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
 
@@ -47,20 +61,37 @@ namespace compressible
 {
 
 /*---------------------------------------------------------------------------*\
-     Class turbulentHeatFluxTemperatureFvPatchScalarField Declaration
+      Class turbulentHeatFluxTemperatureFvPatchScalarField Declaration
 \*---------------------------------------------------------------------------*/
 
 class turbulentHeatFluxTemperatureFvPatchScalarField
 :
     public fixedGradientFvPatchScalarField
 {
-// Private data
+public:
+
+    // Data types
+
+        //- Enumeration listing the possible hest source input modes
+        enum heatSourceType
+        {
+            hsPower,
+            hsFlux
+        };
+
+
+private:
+
+    // Private data
+
+        //- Heat source type names
+        static const NamedEnum<heatSourceType, 2> heatSourceTypeNames_;
 
-    //- Heat flux [W]
-    scalarField q_;
+        //- Heat source type
+        heatSourceType heatSource_;
 
-    //- Name of density field
-    word rhoName_;
+        //- Heat power [W] or flux [W/m2]
+        scalarField q_;
 
 
 public:
diff --git a/src/turbulenceModels/compressible/RAS/derivedFvPatchFields/wallFunctions/alphatWallFunctions/alphatJayatillekeWallFunction/alphatJayatillekeWallFunctionFvPatchScalarField.C b/src/turbulenceModels/compressible/RAS/derivedFvPatchFields/wallFunctions/alphatWallFunctions/alphatJayatillekeWallFunction/alphatJayatillekeWallFunctionFvPatchScalarField.C
index 5c73b8b146f80447c58e1142000b931d6f839d58..cab530edacbe1e4afa342021d5badf64a4ca554c 100644
--- a/src/turbulenceModels/compressible/RAS/derivedFvPatchFields/wallFunctions/alphatWallFunctions/alphatJayatillekeWallFunction/alphatJayatillekeWallFunctionFvPatchScalarField.C
+++ b/src/turbulenceModels/compressible/RAS/derivedFvPatchFields/wallFunctions/alphatWallFunctions/alphatJayatillekeWallFunction/alphatJayatillekeWallFunctionFvPatchScalarField.C
@@ -222,7 +222,7 @@ void alphatJayatillekeWallFunctionFvPatchScalarField::updateCoeffs()
 
     const scalarField& rhow = rasModel.rho().boundaryField()[patchI];
     const fvPatchScalarField& hw =
-        patch().lookupPatchField<volScalarField, scalar>("h");
+        rasModel.thermo().h().boundaryField()[patchI];
 
     // Heat flux [W/m2] - lagging alphatw
     const scalarField qDot = (alphaw + alphatw)*hw.snGrad();
diff --git a/src/turbulenceModels/compressible/RAS/kEpsilon/kEpsilon.H b/src/turbulenceModels/compressible/RAS/kEpsilon/kEpsilon.H
index 865120d8c6c9c320351bcda9b80c655755d48884..11d8f75a058c20bdb21fefec6dbb48cceea45e8f 100644
--- a/src/turbulenceModels/compressible/RAS/kEpsilon/kEpsilon.H
+++ b/src/turbulenceModels/compressible/RAS/kEpsilon/kEpsilon.H
@@ -138,13 +138,10 @@ public:
             return mut_;
         }
 
-        //- Return the effective turbulent thermal diffusivity
-        virtual tmp<volScalarField> alphaEff() const
+        //- Return the turbulence thermal diffusivity
+        virtual tmp<volScalarField> alphat() const
         {
-            return tmp<volScalarField>
-            (
-                new volScalarField("alphaEff", alphat_ + alpha())
-            );
+            return alphat_;
         }
 
         //- Return the turbulence kinetic energy
diff --git a/src/turbulenceModels/compressible/RAS/kOmegaSST/kOmegaSST.H b/src/turbulenceModels/compressible/RAS/kOmegaSST/kOmegaSST.H
index a563557f764cb24dfdba768fe6afe2061f8e1cc4..9806d32493b7ccdf91270fcdd0498aac74a69dc4 100644
--- a/src/turbulenceModels/compressible/RAS/kOmegaSST/kOmegaSST.H
+++ b/src/turbulenceModels/compressible/RAS/kOmegaSST/kOmegaSST.H
@@ -222,13 +222,10 @@ public:
             return mut_;
         }
 
-        //- Return the effective turbulent thermal diffusivity
-        virtual tmp<volScalarField> alphaEff() const
+        //- Return the turbulence thermal diffusivity
+        virtual tmp<volScalarField> alphat() const
         {
-            return tmp<volScalarField>
-            (
-                new volScalarField("alphaEff", alphat_ + alpha())
-            );
+            return alphat_;
         }
 
         //- Return the turbulence kinetic energy
diff --git a/src/turbulenceModels/compressible/RAS/laminar/laminar.C b/src/turbulenceModels/compressible/RAS/laminar/laminar.C
index 415dedee72b21dfb14b39157875dbb71ba9cc983..cc5666eb6cbc0cbe54051a9f4996bf0354bd8d3b 100644
--- a/src/turbulenceModels/compressible/RAS/laminar/laminar.C
+++ b/src/turbulenceModels/compressible/RAS/laminar/laminar.C
@@ -78,6 +78,27 @@ tmp<volScalarField> laminar::mut() const
 }
 
 
+tmp<volScalarField> laminar::alphat() const
+{
+    return tmp<volScalarField>
+    (
+        new volScalarField
+        (
+            IOobject
+            (
+                "alphat",
+                runTime_.timeName(),
+                mesh_,
+                IOobject::NO_READ,
+                IOobject::NO_WRITE
+            ),
+            mesh_,
+            dimensionedScalar("alphat", alpha().dimensions(), 0.0)
+        )
+    );
+}
+
+
 tmp<volScalarField> laminar::k() const
 {
     return tmp<volScalarField>
diff --git a/src/turbulenceModels/compressible/RAS/laminar/laminar.H b/src/turbulenceModels/compressible/RAS/laminar/laminar.H
index af1f5e2bb2c379c3ef7936b6033cb23a17893e2b..506799ec983c01e9ba45a4de8d1687cd1ed6e3f0 100644
--- a/src/turbulenceModels/compressible/RAS/laminar/laminar.H
+++ b/src/turbulenceModels/compressible/RAS/laminar/laminar.H
@@ -89,6 +89,9 @@ public:
             return tmp<volScalarField>(new volScalarField("muEff", mu()));
         }
 
+        //- Return the turbulence thermal diffusivity, i.e. 0 for laminar flow
+        virtual tmp<volScalarField> alphat() const;
+
         //- Return the effective turbulent thermal diffusivity,
         //  i.e. the laminar thermal diffusivity
         virtual tmp<volScalarField> alphaEff() const
diff --git a/src/turbulenceModels/compressible/RAS/realizableKE/realizableKE.H b/src/turbulenceModels/compressible/RAS/realizableKE/realizableKE.H
index 222ee733a3210374de39e8145036981fe9213c86..1b7d51a9a97a2ea4e0d32a4fb76516fa632536a1 100644
--- a/src/turbulenceModels/compressible/RAS/realizableKE/realizableKE.H
+++ b/src/turbulenceModels/compressible/RAS/realizableKE/realizableKE.H
@@ -159,13 +159,10 @@ public:
             return mut_;
         }
 
-        //- Return the effective turbulent thermal diffusivity
-        virtual tmp<volScalarField> alphaEff() const
+        //- Return the turbulence thermal diffusivity
+        virtual tmp<volScalarField> alphat() const
         {
-            return tmp<volScalarField>
-            (
-                new volScalarField("alphaEff", alphat_ + alpha())
-            );
+            return alphat_;
         }
 
         //- Return the turbulence kinetic energy
diff --git a/src/turbulenceModels/compressible/turbulenceModel/laminar/laminar.C b/src/turbulenceModels/compressible/turbulenceModel/laminar/laminar.C
index 4a391c3db5b1d2f4d509a898ab4308e5d2a6cc31..87d450d12d9477e1edb74c351ca9d1bcac01b17d 100644
--- a/src/turbulenceModels/compressible/turbulenceModel/laminar/laminar.C
+++ b/src/turbulenceModels/compressible/turbulenceModel/laminar/laminar.C
@@ -96,6 +96,27 @@ tmp<volScalarField> laminar::mut() const
 }
 
 
+tmp<volScalarField> laminar::alphat() const
+{
+    return tmp<volScalarField>
+    (
+        new volScalarField
+        (
+            IOobject
+            (
+                "alphat",
+                runTime_.timeName(),
+                mesh_,
+                IOobject::NO_READ,
+                IOobject::NO_WRITE
+            ),
+            mesh_,
+            dimensionedScalar("alphat", alpha().dimensions(), 0.0)
+        )
+    );
+}
+
+
 tmp<volScalarField> laminar::k() const
 {
     return tmp<volScalarField>
diff --git a/src/turbulenceModels/compressible/turbulenceModel/laminar/laminar.H b/src/turbulenceModels/compressible/turbulenceModel/laminar/laminar.H
index d4379bfb2e28ca939e7f5df7ee9229990ae5c5ee..7645e67379417f3cc0efc1b8a8c3bee1a86fa9b2 100644
--- a/src/turbulenceModels/compressible/turbulenceModel/laminar/laminar.H
+++ b/src/turbulenceModels/compressible/turbulenceModel/laminar/laminar.H
@@ -99,6 +99,9 @@ public:
             return tmp<volScalarField>(new volScalarField("muEff", mu()));
         }
 
+        //- Return the turbulence thermal diffusivity, i.e. 0 for laminar flow
+        virtual tmp<volScalarField> alphat() const;
+
         //- Return the effective turbulent thermal diffusivity,
         //  i.e. the laminar thermal diffusivity
         virtual tmp<volScalarField> alphaEff() const
@@ -106,6 +109,13 @@ public:
             return tmp<volScalarField>(new volScalarField("alphaEff", alpha()));
         }
 
+        //- Return the effective turbulent thermal diffusivity for a patch,
+        //  i.e. the laminar thermal diffusivity
+        virtual tmp<scalarField> alphaEff(const label patchI) const
+        {
+            return alpha().boundaryField()[patchI];
+        }
+
         //- Return the turbulence kinetic energy, i.e. 0 for laminar flow
         virtual tmp<volScalarField> k() const;
 
diff --git a/src/turbulenceModels/compressible/turbulenceModel/turbulenceModel.H b/src/turbulenceModels/compressible/turbulenceModel/turbulenceModel.H
index 97638e5e49cfb054a21812fd7fa94ef06c96ccb9..9a84c01bb832ee9fb6d873dd0ee0be835ed7e5e3 100644
--- a/src/turbulenceModels/compressible/turbulenceModel/turbulenceModel.H
+++ b/src/turbulenceModels/compressible/turbulenceModel/turbulenceModel.H
@@ -192,9 +192,15 @@ public:
         //- Return the effective viscosity
         virtual tmp<volScalarField> muEff() const = 0;
 
-        //- Return the effective turbulent thermal diffusivity
+        //- Return the turbulence thermal diffusivity
+        virtual tmp<volScalarField> alphat() const = 0;
+
+        //- Return the effective turbulence thermal diffusivity
         virtual tmp<volScalarField> alphaEff() const = 0;
 
+        //- Return the effective turbulence thermal diffusivity for a patch
+        virtual tmp<scalarField> alphaEff(const label patchI) const = 0;
+
         //- Return the turbulence kinetic energy
         virtual tmp<volScalarField> k() const = 0;
 
diff --git a/src/turbulenceModels/incompressible/RAS/derivedFvPatchFields/turbulentHeatFluxTemperature/turbulentHeatFluxTemperatureFvPatchScalarField.C b/src/turbulenceModels/incompressible/RAS/derivedFvPatchFields/turbulentHeatFluxTemperature/turbulentHeatFluxTemperatureFvPatchScalarField.C
index 3ca461698afff2844b2c481b9fb99dce7789b9fc..76639637bab63140c65f999dbe7e9d7aeea73608 100644
--- a/src/turbulenceModels/incompressible/RAS/derivedFvPatchFields/turbulentHeatFluxTemperature/turbulentHeatFluxTemperatureFvPatchScalarField.C
+++ b/src/turbulenceModels/incompressible/RAS/derivedFvPatchFields/turbulentHeatFluxTemperature/turbulentHeatFluxTemperatureFvPatchScalarField.C
@@ -36,6 +36,22 @@ namespace Foam
 namespace incompressible
 {
 
+// * * * * * * * * * * * * * * Static Data Members * * * * * * * * * * * * * //
+
+template<>
+const char*
+NamedEnum<turbulentHeatFluxTemperatureFvPatchScalarField::heatSourceType, 2>::
+names[] =
+    {
+        "power",
+        "flux"
+    };
+
+const
+NamedEnum<turbulentHeatFluxTemperatureFvPatchScalarField::heatSourceType, 2>
+    turbulentHeatFluxTemperatureFvPatchScalarField::heatSourceTypeNames_;
+
+
 // * * * * * * * * * * * * * * * * Constructors  * * * * * * * * * * * * * * //
 
 turbulentHeatFluxTemperatureFvPatchScalarField::
@@ -46,6 +62,7 @@ turbulentHeatFluxTemperatureFvPatchScalarField
 )
 :
     fixedGradientFvPatchScalarField(p, iF),
+    heatSource_(hsPower),
     q_(p.size(), 0.0),
     alphaEffName_("undefinedAlphaEff"),
     CpName_("undefinedCp")
@@ -62,6 +79,7 @@ turbulentHeatFluxTemperatureFvPatchScalarField
 )
 :
     fixedGradientFvPatchScalarField(ptf, p, iF, mapper),
+    heatSource_(ptf.heatSource_),
     q_(ptf.q_, mapper),
     alphaEffName_(ptf.alphaEffName_),
     CpName_(ptf.CpName_)
@@ -77,6 +95,7 @@ turbulentHeatFluxTemperatureFvPatchScalarField
 )
 :
     fixedGradientFvPatchScalarField(p, iF),
+    heatSource_(heatSourceTypeNames_.read(dict.lookup("heatSource"))),
     q_("q", dict, p.size()),
     alphaEffName_(dict.lookup("alphaEff")),
     CpName_(dict.lookup("Cp"))
@@ -93,6 +112,7 @@ turbulentHeatFluxTemperatureFvPatchScalarField
 )
 :
     fixedGradientFvPatchScalarField(thftpsf),
+    heatSource_(thftpsf.heatSource_),
     q_(thftpsf.q_),
     alphaEffName_(thftpsf.alphaEffName_),
     CpName_(thftpsf.CpName_)
@@ -107,6 +127,7 @@ turbulentHeatFluxTemperatureFvPatchScalarField
 )
 :
     fixedGradientFvPatchScalarField(thftpsf, iF),
+    heatSource_(thftpsf.heatSource_),
     q_(thftpsf.q_),
     alphaEffName_(thftpsf.alphaEffName_),
     CpName_(thftpsf.CpName_)
@@ -156,7 +177,33 @@ void turbulentHeatFluxTemperatureFvPatchScalarField::updateCoeffs()
     const scalarField& Cpp =
         patch().lookupPatchField<volScalarField, scalar>(CpName_);
 
-    gradient() = q_/(Cpp*alphaEffp);
+    switch (heatSource_)
+    {
+        case hsPower:
+        {
+            const scalar Ap = gSum(patch().magSf());
+            gradient() = q_/(Ap*Cpp*alphaEffp);
+            break;
+        }
+        case hsFlux:
+        {
+            gradient() = q_/(Cpp*alphaEffp);
+            break;
+        }
+        default:
+        {
+            FatalErrorIn
+            (
+                "turbulentHeatFluxTemperatureFvPatchScalarField"
+                "("
+                    "const fvPatch&, "
+                    "const DimensionedField<scalar, volMesh>&, "
+                    "const dictionary&"
+                ")"
+            )   << "Unknown heat source type. Valid types are: "
+                << heatSourceTypeNames_ << nl << exit(FatalError);
+        }
+    }
 
     fixedGradientFvPatchScalarField::updateCoeffs();
 }
@@ -165,6 +212,8 @@ void turbulentHeatFluxTemperatureFvPatchScalarField::updateCoeffs()
 void turbulentHeatFluxTemperatureFvPatchScalarField::write(Ostream& os) const
 {
     fixedGradientFvPatchScalarField::write(os);
+    os.writeKeyword("heatSource") << heatSourceTypeNames_[heatSource_]
+        << token::END_STATEMENT << nl;
     q_.writeEntry("q", os);
     os.writeKeyword("alphaEff") << alphaEffName_ << token::END_STATEMENT << nl;
     os.writeKeyword("Cp") << CpName_ << token::END_STATEMENT << nl;
diff --git a/src/turbulenceModels/incompressible/RAS/derivedFvPatchFields/turbulentHeatFluxTemperature/turbulentHeatFluxTemperatureFvPatchScalarField.H b/src/turbulenceModels/incompressible/RAS/derivedFvPatchFields/turbulentHeatFluxTemperature/turbulentHeatFluxTemperatureFvPatchScalarField.H
index 848c9ca403ffbf4e1e703cfee37628efaaf762e5..1ba9a7725396ce1d3db7c54b63cd22505113b724 100644
--- a/src/turbulenceModels/incompressible/RAS/derivedFvPatchFields/turbulentHeatFluxTemperature/turbulentHeatFluxTemperatureFvPatchScalarField.H
+++ b/src/turbulenceModels/incompressible/RAS/derivedFvPatchFields/turbulentHeatFluxTemperature/turbulentHeatFluxTemperatureFvPatchScalarField.H
@@ -26,7 +26,22 @@ Class
     Foam::turbulentHeatFluxTemperatureFvPatchScalarField
 
 Description
-    Fixed heat flux boundary condition for temperature.
+    Fixed heat boundary condition to specify temperature gradient. Input
+    heat source either specified in terms of an absolute power [W], or as a
+    flux [W/m2].
+
+    Example usage:
+
+        hotWall
+        {
+            type            turbulentHeatFluxTemperature;
+            heatSource      flux;        // power [W]; flux [W/m2]
+            q               uniform 10;  // heat power or flux
+            alphaEff        alphaEff;    // alphaEff field name;
+                                         // alphaEff in [kg/m/s]
+            Cp              Cp;          // Cp field name; Cp in [J/kg/K]
+            value           uniform 300; // initial temperature value
+        }
 
 SourceFiles
     turbulentHeatFluxTemperatureFvPatchScalarField.C
@@ -38,6 +53,7 @@ SourceFiles
 
 #include "fvPatchFields.H"
 #include "fixedGradientFvPatchFields.H"
+#include "NamedEnum.H"
 
 // * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
 
@@ -54,16 +70,37 @@ class turbulentHeatFluxTemperatureFvPatchScalarField
 :
     public fixedGradientFvPatchScalarField
 {
-// Private data
+public:
+
+    // Data types
+
+        //- Enumeration listing the possible hest source input modes
+        enum heatSourceType
+        {
+            hsPower,
+            hsFlux
+        };
+
+
+private:
+
+    // Private data
+
+        //- Heat source type names
+        static const NamedEnum<heatSourceType, 2> heatSourceTypeNames_;
+
+        //- Heat source type
+        heatSourceType heatSource_;
 
-    //- Heat flux [W/m2]
-    scalarField q_;
+        //- Heat power [W] or flux [W/m2]
+        //  NOTE: to be divided by density, rho, if used in kinematic form
+        scalarField q_;
 
-    //- Name of effective thermal diffusivity field
-    word alphaEffName_;
+        //- Name of effective thermal diffusivity field
+        word alphaEffName_;
 
-    //- Name of specific heat capacity field
-    word CpName_;
+        //- Name of specific heat capacity field
+        word CpName_;
 
 
 public:
diff --git a/tutorials/heatTransfer/buoyantBoussinesqSimpleFoam/hotRoom/0/T b/tutorials/heatTransfer/buoyantBoussinesqSimpleFoam/hotRoom/0/T
index 76b8877c7e780d42f65a1520dcaba50fa64795d8..18610a300b685b169a045233b2775105e8767351 100644
--- a/tutorials/heatTransfer/buoyantBoussinesqSimpleFoam/hotRoom/0/T
+++ b/tutorials/heatTransfer/buoyantBoussinesqSimpleFoam/hotRoom/0/T
@@ -23,411 +23,7 @@ boundaryField
     floor
     {
         type            fixedValue;
-        value           nonuniform List<scalar> 
-400
-(
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-600
-600
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-600
-600
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-)
-;
+        value           uniform 300; 
     }
     ceiling
     {
diff --git a/tutorials/heatTransfer/buoyantBoussinesqSimpleFoam/hotRoom/0/T.org b/tutorials/heatTransfer/buoyantBoussinesqSimpleFoam/hotRoom/0/T.org
index 76b8877c7e780d42f65a1520dcaba50fa64795d8..18610a300b685b169a045233b2775105e8767351 100644
--- a/tutorials/heatTransfer/buoyantBoussinesqSimpleFoam/hotRoom/0/T.org
+++ b/tutorials/heatTransfer/buoyantBoussinesqSimpleFoam/hotRoom/0/T.org
@@ -23,411 +23,7 @@ boundaryField
     floor
     {
         type            fixedValue;
-        value           nonuniform List<scalar> 
-400
-(
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-600
-600
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-600
-600
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-300
-)
-;
+        value           uniform 300; 
     }
     ceiling
     {
diff --git a/tutorials/lagrangian/porousExplicitSourceReactingParcelFoam/verticalChannel/0/omega b/tutorials/lagrangian/porousExplicitSourceReactingParcelFoam/verticalChannel/0/omega
index f9b551e72ba2b71d778ef4f51db0e50aa213ae31..0538e96fb771270fe8c72901c66df0f48c21cafd 100644
--- a/tutorials/lagrangian/porousExplicitSourceReactingParcelFoam/verticalChannel/0/omega
+++ b/tutorials/lagrangian/porousExplicitSourceReactingParcelFoam/verticalChannel/0/omega
@@ -41,7 +41,7 @@ boundaryField
         type            compressible::turbulentMixingLengthFrequencyInlet;
         mixingLength    0.007;
         k               k;
-        value           uniform 4.5-3;
+        value           uniform 4.5e-3;
     }
     outlet
     {