From 6e40e2ed06a8eb8ae961d7ee33d052ad55ab2e46 Mon Sep 17 00:00:00 2001 From: Henry <Henry> Date: Mon, 13 Jun 2011 11:31:17 +0100 Subject: [PATCH] Moved doc/changes to OpenFOAM-nonRelease --- doc/changes/dynamicCode.org | 243 ------------------------------- doc/changes/inotify.txt | 110 -------------- doc/changes/pointAndEdgeSync.txt | 41 ------ doc/changes/splitCyclic.txt | 128 ---------------- doc/changes/staticLinkage.txt | 35 ----- 5 files changed, 557 deletions(-) delete mode 100644 doc/changes/dynamicCode.org delete mode 100644 doc/changes/inotify.txt delete mode 100644 doc/changes/pointAndEdgeSync.txt delete mode 100644 doc/changes/splitCyclic.txt delete mode 100644 doc/changes/staticLinkage.txt diff --git a/doc/changes/dynamicCode.org b/doc/changes/dynamicCode.org deleted file mode 100644 index a6d9dbbe8e1..00000000000 --- a/doc/changes/dynamicCode.org +++ /dev/null @@ -1,243 +0,0 @@ -# -*- mode: org; -*- -# -#+TITLE: =dynamicCode=: Dynamic code compilation -#+AUTHOR: OpenCFD Ltd. -#+DATE: TBA -#+LINK: http://www.openfoam.com -#+OPTIONS: author:nil ^:{} -# Copyright (c) 2011 OpenCFD Ltd. - -* Dictionary preprocessing directive: =#codeStream= - This is a dictionary preprocessing directive (=functionEntry=) which - provides a snippet of OpenFOAM C++ code which gets compiled and executed to - provide the actual dictionary entry. The snippet gets provided as three - sections of C++ code which just gets inserted into a template: - - =code= section: the actual body of the code. It gets called with arguments - =OStream& os, const dictionary& dict= and the C++ code can do a - =dict.lookup= to find current dictionary values. - - optional =codeInclude= section: any #include statements to include OpenFOAM - files. - - optional =codeOptions= section: any extra compilation flags to be added to - =EXE_INC= in =Make/options=. These usually are =-I= include directory - options. - - optional =codeLibs= section: any extra compilation flags to be added to - =LIB_LIBS= in =Make/options=. - - To ease inputting mulit-line code there is the =#{ #}= syntax. Anything in - between these two delimiters becomes a string with all newlines, quotes etc - preserved. - - Example: Look up dictionary entries and do some calculation - #+BEGIN_SRC c++ - - startTime 0; - endTime 100; - .. - writeInterval #codeStream - { - code - #{ - scalar start = readScalar(dict["startTime"]); - scalar end = readScalar(dict["endTime"]); - label nDumps = 5; - os << ((end-start)/nDumps); - #}; - }; - #+END_SRC - -* Implementation - - the =#codeStream= entry reads the dictionary following it, extracts the - =code=, =codeInclude=, =codeOptions=, =codeLibs= sections (these are just strings) and - calculates the SHA1 checksum of the contents. - - it copies a template file - =(etc/codeTemplates/dynamicCode/codeStreamTemplate.C)= or - =($FOAM_CODE_TEMPLATES/codeStreamTemplate.C)=, substituting all - occurences of =code=, =codeInclude=, =codeOptions=, =codeLibs=. - - it writes library source files to =dynamicCode/<SHA1>= and compiles - it using =wmake libso=. - - the resulting library is generated under - =dynamicCode/platforms/$WM_OPTIONS/lib= and is loaded (=dlopen=, =dlsym=) - and the function executed. - - the function will have written its output into the Ostream which then gets - used to construct the entry to replace the whole =#codeStream= section. - - using the SHA1 means that same code will only be compiled and loaded once. - -* Boundary condition: =codedFixedValue= - This uses the same framework as codeStream to have an in-line specialised - =fixedValueFvPatchField=. - #+BEGIN_SRC c++ - outlet - { - type codedFixedValue; - value uniform 0; - redirectType ramp; - - code - #{ - operator==(min(10, 0.1*this->db().time().value())); - #}; - } - #+END_SRC - It by default always includes =fvCFD.H= and adds the =finiteVolume= library to - the include search path and the linked libraries. Any other libraries will - need - to be added using the =codeInclude=, =codeLibs=, =codeOptions= section or provided through - the =libs= entry in the =system/controlDict=. - - A special form is where the code is not supplied in-line but instead comes - from the =codeDict= dictionary in the =system= directory. It should contain - a =ramp= entry: - #+BEGIN_SRC c++ - ramp - { - code - #{ - operator==(min(10, 0.1*this->db().time().value())); - #}; - } - #+END_SRC - The advantage of using this indirect way is that it supports - =runTimeModifiable= so any change of the code will be picked up next iteration. - -* Function object: =coded= - This uses the same framework as codeStream to have an in-line specialised - =functionObject=. - #+BEGIN_SRC c++ - functions - { - pAverage - { - functionObjectLibs ("libutilityFunctionObjects.so"); - type coded; - redirectType average; - outputControl outputTime; - code - #{ - const volScalarField& p = mesh().lookupObject<volScalarField>("p"); - Info<<"p avg:" << average(p) << endl; - #}; - } - } - #+END_SRC - This dynamic code framework uses the following entries - + =codeData=: declaration (in .H file) of local (null-constructable) data - + =codeInclude=: (.C file) usual include section - + =codeRead=: (.C file) executed upon dictionary read - + =codeExecute=: (.C file) executed upon functionObject execute - + =codeEnd=: (.C file) executed upon functionObject end - + =code=: (.C file) executed upon functionObject write. This is the usual place - for simple functionObject. - + =codeLibs=, =codeOptions=: usual - - =coded= by default always includes =fvCFD.H= and adds the =finiteVolume= library to - the include search path and the linked libraries. Any other libraries will - need to be added explicitly (see =codeInclude=, =codeLibs=, =codeOptions= sections) or provided through - the =libs= entry in the =system/controlDict=. - - =coded= is an =OutputFilter= type =functionObject= so supports the usual - + =region=: non-default region - + =enabled=: enable/disable - + =outputControl=: =timeStep= or =outputTime= - + =outputInterval=: in case of =timeStep= - entries. - -* Security - Allowing the case to execute C++ code does introduce security risks. A - third-party case might have a =#codeStream{#code system("rm -rf .");};= hidden - somewhere in a dictionary. =#codeStream= is therefore not enabled by default - you have to enable it by setting in the system-wide =controlDict= - #+BEGIN_SRC c++ - InfoSwitches - { - // Allow case-supplied c++ code (#codeStream, codedFixedValue) - allowSystemOperations 1; - } - #+END_SRC - -* Field manipulation - Fields are read in as =IOdictionary= so can be upcast to provide access to the - mesh: - #+BEGIN_SRC c++ - internalField #codeStream - { - codeInclude - #{ - #include "fvCFD.H" - #}; - - code - #{ - const IOdictionary& d = dynamicCast<const IOdictionary>(dict); - const fvMesh& mesh = refCast<const fvMesh>(d.db()); - scalarField fld(mesh.nCells(), 12.34); - fld.writeEntry("", os); - #}; - - codeOptions - #{ - -I$(LIB_SRC)/finiteVolume/lnInclude - #}; - - codeLibs - #{ - -lfiniteVolume - #}; - }; - #+END_SRC - - Note: above field initialisation has the problem that the boundary conditions - are not evaluated so e.g. processor boundaries will not hold the opposite cell - value. - -* Pitfalls - The syntax of =#codeStream= can be quite hard to get right. These are some - common pitfalls: - + the =code= string has to be a valid set of C++ expressions so has to end in - a ';' - + the C++ code upon execution has to print a valid dictionary entry. In above example it - prints 'uniform 12.34;'. Note the ';' at the end. It is advised to use the - =writeEntry= as above to handle this and also e.g. binary streams (=codeStream= - inherits the stream type from the dictionary) - + the =code=, =codeInclude=, =codeOptions=, =codeLibs= entries are just like any other - dictionary string entry so there has to be a ';' after the string - + the =#codeStream= entry (itself a dictionary) has to end in a ';' - -* Exceptions - There are unfortunately some exceptions to above field massaging. - Following applications read - the field as a dictionary, not as an =IOdictionary=: - - =foamFormatConvert= - - =changeDictionary= - - =foamUpgradeCyclics= - These applications will usually switch off all '#' processing which - just preserves the entries as strings (including all - formatting). =changeDictionary= has the =-enableFunctionEntries= option for if - one does want to evaluate any preprocessing in the changeDictionaryDict. - -* Other - - paraFoam: paraview currently does not export symbols on loaded libraries - (more specific : it does not add 'RTLD_GLOBAL' to the dlopen flags) so - one will have to add the used additional libraries (libfiniteVolume, - lib..) either to the =codeLibs= linkage section (preferred) or to the 'libs' entry in system/controlDict to prevent getting - an error of the form - - --> FOAM FATAL IO ERROR: - Failed loading library "libcodeStream_3cd388ceb070a2f8b0ae61782adbc21c5687ce6f.so" - - By default =#codeStream= links in =libOpenFOAM= and =codedFixedValue= and =coded= - functionObject link in both =libOpenFOAM= and =libfiniteVolume=. - - - parallel running not tested a lot. What about distributed data - (i.e. non-=NFS=) parallel? - - - codedFixedValue could be extended to provide local data however - in terms of complexity this is not really worthwhile. - - - all templates come from (in order of preference) - =FOAM_TEMPLATE_DIR= - =~/.OpenFOAM/dev/codeTemplates/dynamicCode= - =etc/codeTemplates/dynamicCode= - - - any generated C++ code will display line numbers relative to the original - dictionary (using the '#line' directive) to ease finding compilation - errors. diff --git a/doc/changes/inotify.txt b/doc/changes/inotify.txt deleted file mode 100644 index 74e2fc7bb4a..00000000000 --- a/doc/changes/inotify.txt +++ /dev/null @@ -1,110 +0,0 @@ -http://www.openfoam.com -Copyright (c) 2011 OpenCFD Ltd. - -Cleanup of automatic regIOobject rereading. - -- all files (usually only IOdictionary) that need to be monitored -should be registered using MUST_READ_IF_MODIFIED. The MUST_READ should -be used for objects that do not need to be re-read (e.g. fields). -In the old system it would actually monitor e.g. 0/U and constant/polyMesh -files. -I've temporarily added a warning in IOdictionary if constructed with MUST_READ. -Same for IOList,IOField,IOMap if constructed with MUST_READ_IF_MODIFIED -(or is rereading supported?). Please let me know if something does not work or -you see the warning - "Dictionary constructed with IOobject::MUST_READ instead of IOobject::MUST_READ_IF_MODIFIED." << nl - - -- any monitored and modified file will get reloaded from the exact path -that was monitored. In the old system it would/could do a re-search through all -times. - - -- all reductions to synchronise status on different processors are done with -a single reduction instead of one reduction per registered object. This could -be quite a gain on large numbers of processors. - - -- all file monitoring is done by an instance of 'fileMonitor' in the Time -class. The fileMonitor class can be found in OSspecific. It uses either -timestamps as before or the (linux-specific) 'inotify' system framework -(available only if compiled with -DFOAM_USE_INOTIFY). - - -- the monitoring can be done in one of four modes as set by - OptimisationSwitches::fileModificationChecking - - - timeStamp : old behaviour : all nodes check the timestamp - - inotify : using inotify instead of timestamps - - timeStampMaster,inotifyMaster : only the master node checks the file - and only the master node reads it and distribute it to the - slaves. This makes runTimeModifiable possible on distributed - running (see below). - -- distributed running: - - set fileModificationChecking to e.g. timeStampMaster - - decompose a case, e.g. cavity - - copy system and constant to processor0/ - - put the all the processor* directories on the wanted nodes inside - the case directory. E.g. - - on master have /tmp/cavity/processor0 - - on slaveN have /tmp/cavity/processorN - - so to reiterate: - - there is no need for cavity/constant or cavity/system, all the - dictionaries are only in processor0/constant or processor0/system - - the slave processor directories have no system directory and the - constant directory only contains the mesh. - - start the job in distributed mode by specifying the slave roots - (so one fewer than the number of processors) with - the -roots command-line option: - - mpirun -np 4 icoFoam -roots '("/tmp" "/tmp" "/tmp")' -parallel - - - the alternative to the -roots option is to have a - cavity/system/decomposeParDict on the master with - distributed yes; - roots ("/tmp" "/tmp" "/tmp"); - - - as a convenience for cases when the slave roots are identical, - a single root entry is interpreted as being the same for all slaves. - With the -roots command-line option, this can take one of two forms: - - mpirun -np 4 icoFoam -roots '("/tmp")' -parallel - - or simply - - mpirun -np 4 icoFoam -roots '"/tmp"' -parallel - - -Details: -- timeStampMaster, inotifyMaster : this works only for IOdictionaries that -are READ_IF_MODIFIED. It means that slaves read exactly the same dictionary -as the master so cannot be used for dictionaries that contain e.g. mesh -specific information. - -- note: even if the file does not exist (e.g. when timeStampMaster) it -will still register a local file with the fileMonitor. This is so fileMonitor -stays synchronised. So take care when reading/creating non-parallel dictionary. - -- inotify is a monitoring framework used to monitor changes in -lots of files (e.g. used in desktop search engines like beagle). You specify -files to monitor and then get warned for any changes to these files. -It does not need timestamps. There is no need for fileModificationSkew -to allow for time differences. (there can still temporarily be a difference -in modified status between different processors due to nfs lagging). The big -problem is that it does not work over nfs3 (not sure about nfs4). - -- fileMonitor stores two hashtables per file so there is a small overhead -adding and removing files from monitoring. - -- if runTimeModifiable is false at start of run no files will get monitored, -however if runTimeModified gets set to false during the run the files -will still get monitored (though never reloaded). This is only a hypothetical -problem in that the kernel still stores events for the monitored files. However -inotify is very efficient - e.g. it gets used to track changes on file systems -for desktop search engines. - -- in the old system one could call modified() on any object and get -and uptodate state. In the new system it will return the state from -the last runTime++ (which if it triggered any re-reads will have reset the -state anyway). diff --git a/doc/changes/pointAndEdgeSync.txt b/doc/changes/pointAndEdgeSync.txt deleted file mode 100644 index 7dd385acce1..00000000000 --- a/doc/changes/pointAndEdgeSync.txt +++ /dev/null @@ -1,41 +0,0 @@ -http://www.openfoam.com -Copyright (c) 2011 OpenCFD Ltd. - -Some background on the new structure to synchronise point and edge data. The -building blocks: -- globalIndex : globally consecutive numbering of (untransformed) data. It -consists of a single label which starts at 0 on processor 0 and is numbered -consecutively on consecutive processors. The globalIndex class contains -functions to convert to/from processor and local index. - -- globalIndexAndTransform : all the transformations in a mesh. Because the -transformations (cyclics, processorCyclics) need to fill space there can -be only three independent transforms. This class contains functions to encode -local index,processor and transformation into a labelPair. - -- mapDistribute : contains constructors from local and remote data and -works out a compact storage scheme and returns corresponding indices into -the local storage and calculates a scheduling to get the local and remote -data into the local storage. The wanted data is - - untransformed: labelList(List) in globalIndex numbering - - transformed: labelPairList(List) in globalIndexAndTransform -See also mapDistribute.H - -- globalMeshData : works out and stores a mapDistribute to get hold -of coupled point or edge data: - - globalPointSlavesMap() : the map to get all coupled point data - into a compact form - - globalPointSlaves : per point (on the patch of coupled faces) the - indices into the compact data corresponding to untransformed connected - points - - globalPointTransformedSlaves : same but for transformed connected - points -See e.g. syncPointData which applies a reduction operator to data on -coupled points. Note that it needs to know whether the data is a position -(since might need separation() vector). - -These structures get used in e.g. -- syncTools : general synchronisation on points,edges, faces. The point and -edge synchronisation are thin wrappers around the globalMeshData functionality. -- volPointInterpolation : uses a mix of globalMeshData (for synchronising -collocated points) and patch-wise (for separated points). diff --git a/doc/changes/splitCyclic.txt b/doc/changes/splitCyclic.txt deleted file mode 100644 index 99f42b31949..00000000000 --- a/doc/changes/splitCyclic.txt +++ /dev/null @@ -1,128 +0,0 @@ -http://www.openfoam.com -Copyright (c) 2011 OpenCFD Ltd. - -Short overview of the changes to have cyclics split into two halves. - -Cyclics -------- -The two cyclic halves are now split like processor patches. There should be no -difference in running. - -Advantages: -- decomposed cyclics can now be handled properly. It just needs to preserve -the cyclic patch it originates from. -- We can now construct a table of global transformations and handle -points/edges/cells with transformations. -- face ordering after topological changes becomes much easier since we -now preserve what half the face comes from. -- cyclic handling becomes more consistent with processor handling and can -quite often be handled in the same condition. -- transformation tensors now become single entry. - -The disadvantages: -- a patch-wise loop now might need to store data to go to the neighbour half -since it is no longer handled in a single patch. -- decomposed cyclics now require overlapping communications so will -only work in 'nonBlocking' mode or 'blocking' (=buffered) mode but not -in 'scheduled' mode. The underlying message passing library -will require overlapping communications with message tags. -- it is quite a code-change and there might be some oversights. -- once converted (see foamUpgradeCyclics below) cases are not backwards -compatible with previous versions. - - -blockMesh ---------- -blockMeshDict now allows patch definition using the construct-from-dictionary -constructor. This helps defining patches that require additional input e.g. -directMapped and now cyclic: - -boundary -( - sides2_half0 - { - type cyclic; - neighbourPatch sides2_half1; - faces ((2 4 5 3)); - } - -The syntax is - like the polyMesh/boundary file - a list of dictionaries with -one additional entry 'faces' for the block faces. Above shows the new -required entry 'neighbourPatch' for cyclic. - -blockMesh still reads the old format. For a cyclic it will automatically -introduce two patches for the halves, with names xxx_half0 and xxx_half1. - - -foamUpgradeCyclics ------------------- -This is a tool which reads the polyMesh/boundary file and any vol/surface/point -fields and converts them. -It will check if anything needs to be converted, backup the current file to .old -and split any cyclic patchFields into two entries. - - -Mesh converters ---------------- -Most mesh formats use cyclics in a single patch (i.e. the old way). -The converters have been adapted to use the patch 'oldCyclic' instead of -'cyclic'. oldCyclic uses the 17x automatic ordering but writes 'type cyclic' -so afterwards foamUpgradeCyclics can be run to upgrade. - - -decomposePar ------------- -Decomposes cyclics into processorCyclic: - - procBoundary0to1throughsides1_half0 - { - type processorCyclic; - nFaces 1000; - startFace 91350; - myProcNo 0; - neighbProcNo 1; - referPatch sides1_half0; - } - -They have an additional 'referPatch' entry which gives the (cyclic) patch -to use for any transformation. - - -Details -------- -- the cyclic patch dictionary has an entry neighbourPatch. The -patch has new member functions: - - //- Get neighbouring patchID - label neighbPatchID() const - - //- Get neighbouring patch - const cyclicPolyPatch& neighbPatch() - - //- Am I the owner half - bool owner() - -The cyclic still has forward() and reverse() transformations (with -the reverse() equal to the neighbPatch().forward()). - -There is no transformLocalFace anymore - the ordering is the same for -both halves. - - -- 'pure' processor patches now are always coincident - they (should) have no -transformation. As said above cyclics are decomposed into a derived -type 'processorCyclic'. - - -- processor patches use overlapping communication using a different message -tag. This maps straight through into the MPI message tag. Each processor -'interface' (processorPolyPatch, processorFvPatch, etc.) has a 'tag()' -to use for communication. - - -- when constructing a GeometricField from a dictionary it will explicitly -check for non-existing entries for cyclic patches and exit with an error message -warning to run foamUpgradeCyclics. (1.7.x will check if you are trying -to run a case which has split cyclics) - - diff --git a/doc/changes/staticLinkage.txt b/doc/changes/staticLinkage.txt deleted file mode 100644 index 916a5107ed0..00000000000 --- a/doc/changes/staticLinkage.txt +++ /dev/null @@ -1,35 +0,0 @@ -http://www.openfoam.com -Copyright (c) 2011 OpenCFD Ltd. - -Static linkage: - -- compile libraries as static objects: - src; ./Allwmake libo -- in the desired application (e.g. interFoam) adapt Make/options - to link all indirect and direct dependencies as .o files: - - sinclude $(GENERAL_RULES)/mplib$(WM_MPLIB) - sinclude $(RULES)/mplib$(WM_MPLIB) - - EXE_LIBS = \ - -lz $(PLIBS) \ - $(FOAM_LIBBIN)/$(FOAM_MPI)/libPstream.o \ - $(FOAM_LIBBIN)/libOSspecific.o \ - $(FOAM_LIBBIN)/libtwoPhaseInterfaceProperties.o \ - $(FOAM_LIBBIN)/libinterfaceProperties.o \ - $(FOAM_LIBBIN)/libincompressibleTransportModels.o \ - $(FOAM_LIBBIN)/libincompressibleTurbulenceModel.o \ - $(FOAM_LIBBIN)/libincompressibleRASModels.o \ - $(FOAM_LIBBIN)/libincompressibleLESModels.o \ - $(FOAM_LIBBIN)/libLESdeltas.o \ - $(FOAM_LIBBIN)/libLESfilters.o \ - $(FOAM_LIBBIN)/libfiniteVolume.o \ - $(FOAM_LIBBIN)/libmeshTools.o \ - $(FOAM_LIBBIN)/libtriSurface.o \ - $(FOAM_LIBBIN)/libfileFormats.o \ - $(FOAM_LIBBIN)/libOpenFOAM.o \ - -- in Make/files add - SEXE = $(FOAM_USER_APPBIN)/interFoam-static -- wmake exe -- check with ldd -- GitLab