diff --git a/Manuals/assigning_nodal_values_to_function_at_boundary_values.py b/Manuals/assigning_nodal_values_to_function_at_boundary_values.py
new file mode 100755
index 0000000000000000000000000000000000000000..453802f0a8cbb583e8357d2bc593ad0d8ff08489
--- /dev/null
+++ b/Manuals/assigning_nodal_values_to_function_at_boundary_values.py
@@ -0,0 +1,173 @@
+#!/usr/bin/python3
+# I have been struggling about having a list of node indices:
+#
+# nodes = [1, 10, 22, ..., 32]
+#
+# of nodes at the boundary and an associated list of values
+#
+# node_values = [10.2, 1.2,0.1, ..., -0.2]
+#
+# and how to assign this to a function defined on the mesh, in order to use it as boundary conditions, instead of using an analytical function or a constant.
+#
+# After searchin thoroughly I manage to find a solution that works for low order basis functions, for both scalars and vector. I put here my solution to see if this is the way to do it and how can I extend it to higher order basis functions. Also, if it helps someone:
+
+import numpy
+from time import time
+from dolfin import *
+
+def dofs_to_verts(mesh,boundaryFunction,boundaryValue):
+    """
+    Compute the mapping from vertices to degrees of freedom only at the boundary.
+    This saves time, since we do not have to loop over all the cells of the domain.
+    If the for loop was replaced by a simple c++ function call, it would be much faster.
+
+    INPUTS ::
+        mesh :: The mesh where we want to compute the mapping between vertices
+                and degrees of freedom.
+
+        boundaryFunction :: A MeshFunction which assigns integer values to
+                            edges of the mesh.
+
+        boundaryValue :: An integer value which we use to identify the boundary
+                         edges we want to use. The MeshFunction should have
+                         this value assigned to some of the edges.
+    """
+
+    # get the connectivity of the mesh
+    mesh.init(1) # initialize edges
+    mesh_topology = mesh.topology() # get the mesh topology
+    conn12 = mesh_topology(1,2) # get the cells to which an edge belongs
+
+    # get the number of edges
+    n_edges = boundaryFunction.size()
+
+    # get the indices of the boundary edges, as given by the boundaryFunction
+    boundaryEdges = numpy.arange(n_edges)
+    boundaryEdges = boundaryEdges[boundaryFunction.array() == boundaryValue]
+
+    # Define the function spaces associated to the mesh
+    # for now only CG of order 1 works for sure
+    V = FunctionSpace(mesh, "CG", 1)
+    Vv = VectorFunctionSpace(mesh, "CG", 1, dim=2)
+
+    # Allocate memory space of the array that stores the mapping from
+    # vertex number to degree of freedom number for both scalar and vector
+    # valued functions (note that for vector valued functions we need to indices
+    # since the x component and the y component have different indices)
+    vert_to_dofs = numpy.zeros(mesh.num_vertices(), dtype=numpy.uintp)
+    vectordofs_to_vert = numpy.zeros([2,mesh.num_vertices()], dtype=numpy.uintp)
+
+    # Get the degree of freedom maps
+    dm = V.dofmap()
+    dms = [Vv.sub(i).dofmap() for i in range(2)] # the i=0 refers to x component
+                                                 # the i=1 refers to y component
+
+    # get the cells of the mesh
+    mesh_cells = mesh.cells()
+
+    # Since we are only interested in the boundary vertices we just loop over
+    # the boundary edges as given by the MeshFunction and stored in boundaryEdges
+    # each time we loop we get the numbering of the degrees of freedom of the
+    # vertices of the cell that contains the boundary edge and we construct
+    # the mapping from vertex number to degree of freedom number
+    for edge in boundaryEdges:
+        # get the cell index to which the edge belongs
+        cell_ind = conn12(edge)[0]
+        # get the vertices of the cell
+        vert_inds = mesh_cells[cell_ind]
+
+        # add the degree of freedom numbering to the vertices
+        vert_to_dofs[vert_inds] = numpy.array(dm.cell_dofs(cell_ind),dtype=numpy.uintp)
+
+        # now we do the same but for vector valued quantites
+        # since, in this case, our vector have two quantities (the x and y components)
+        # we have to iterate over the two sub dof maps
+        for i, (dms_i, dmcs_i) in enumerate(zip(dms, dms)):
+            vectordofs_to_vert[i,vert_inds] = numpy.array(dms_i.cell_dofs(cell_ind),dtype=numpy.uintp)
+
+    # Return vertex to dof mapping
+    return vert_to_dofs, vectordofs_to_vert
+
+# this is an example of how to assign values at the nodes for both a scalar
+# and a vector valued function to a function
+
+# create a unit square mesh (any other mesh works)
+mesh = UnitSquareMesh(20,20)
+
+# --------------------------------------------------------------------------
+# this part is optional, if you already know the indices of the vertices at
+# the boundary just skip this part
+
+# define the boundary mesh
+boundaryMesh = BoundaryMesh(mesh)
+
+# if you already don't have the boundary nodes indices
+# get the boundary nodes indices
+nodes = boundaryMesh.vertex_map().array()
+
+# --------------------------------------------------------------------------
+
+# --------------------------------------------------------------------------
+# This part is just to generate a boundary function which assigns a value
+# boundary_value to the boundary edges
+
+boundary_value = 10
+# get the global number of the boundary edges
+edge_map = boundaryMesh.cell_map().array()
+# get the number of edges at the boundary
+n_boundaryEdges = edge_map.size
+
+# define the boundary function which defines the boundary
+boundaryFunction = MeshFunction('int',mesh,1)
+n_edges = boundaryFunction.array().size # the total number of edges
+
+# allocate memory space to boundary values
+boundary_values = numpy.zeros(n_edges,dtype='uintp')
+# assign the boundary_value to the array of boundary_values
+boundary_values[edge_map] = boundary_value
+# assign the boundary_values to the boundary_function
+boundaryFunction.set_values(boundary_values)
+
+# --------------------------------------------------------------------------
+
+# generate some random values for the values to assign to the scalar function at the nodes
+node_values_scalar = numpy.random.rand(nodes.size)
+# generate some random values for the values to assign to the vector function at the nodes
+node_values_vector = numpy.random.rand(2,nodes.size)
+
+start = time()
+# get the mapping of vertices at boundary to degrees of freedom
+vert_to_dofs, vectordofs_to_vert = dofs_to_verts(mesh,boundaryFunction,boundary_value)
+
+#print("It took :: %f s" %(time()-start)
+
+# assign vertex values at the boundary for the scalar function
+
+# generate the function space
+V = FunctionSpace(mesh,'Lagrange',1)
+# generate the scalar function
+myFunction = Function(V)
+# get the vector containing the data values at the degrees of freedom
+myFunction_vector = myFunction.vector()
+# use the mapping from vertices (nodes) to degrees of freedom to assign the
+# vertex values of the scalar function
+myFunction_vector[vert_to_dofs[nodes]] = node_values_scalar
+
+plot(myFunction)
+
+# assign vertex values at the boundary for the vector function
+
+# generate the function space
+Vv = VectorFunctionSpace(mesh, "Lagrange", 1, dim=2)
+# generate the vector function
+myVectorFunction = Function(Vv)
+# get the vector containing the data values at the degrees of freedom
+myVectorFunction_vector = myVectorFunction.vector()
+# use the mapping from vertices (nodes) to degrees of freedom to assign the
+# vertex values of the scalar function
+myVectorFunction_vector[vectordofs_to_vert[0,nodes]] = node_values_vector[0,:] # x component
+myVectorFunction_vector[vectordofs_to_vert[1,nodes]] = node_values_vector[1,:] # y component
+
+plot(myVectorFunction)
+
+interactive()
diff --git a/Anleitungen/doNix.manual.md b/Manuals/doNix.manual.md
similarity index 100%
rename from Anleitungen/doNix.manual.md
rename to Manuals/doNix.manual.md
diff --git a/Plotskripte_Jim/links b/Meshes/Plotscript/links
similarity index 100%
rename from Plotskripte_Jim/links
rename to Meshes/Plotscript/links
diff --git a/Plotskripte_Jim/plot2d_vtu.py b/Meshes/Plotscript/plot2d_vtu.py
similarity index 100%
rename from Plotskripte_Jim/plot2d_vtu.py
rename to Meshes/Plotscript/plot2d_vtu.py
diff --git a/Plotskripte_Jim/plot3d_vtu.py b/Meshes/Plotscript/plot3d_vtu.py
similarity index 100%
rename from Plotskripte_Jim/plot3d_vtu.py
rename to Meshes/Plotscript/plot3d_vtu.py
diff --git a/Meshes/README.md b/Meshes/README.md
new file mode 100644
index 0000000000000000000000000000000000000000..05907ad05974695be4f46f7ded190e3fabf15bb0
--- /dev/null
+++ b/Meshes/README.md
@@ -0,0 +1,5 @@
+# Meshes 
+The mesh data in the various folders are only saved here to be able to plot the 
+data either with `paraview` or the script found in the folder `Plotscript`.
+They are produced by the simulation class as output and are not reused. 
+Note that the scripts in `Plotscript` are courtesy of Jim Magiera. 
diff --git a/Jupyter_Notebook_Setup/README.md b/Setup/Jupyter_Notebook_Setup/README.md
similarity index 100%
rename from Jupyter_Notebook_Setup/README.md
rename to Setup/Jupyter_Notebook_Setup/README.md
diff --git a/Jupyter_Notebook_Setup/requirements.txt b/Setup/Jupyter_Notebook_Setup/requirements.txt
similarity index 100%
rename from Jupyter_Notebook_Setup/requirements.txt
rename to Setup/Jupyter_Notebook_Setup/requirements.txt
diff --git a/Jupyter_Notebook_Setup/startup.sh b/Setup/Jupyter_Notebook_Setup/startup.sh
similarity index 100%
rename from Jupyter_Notebook_Setup/startup.sh
rename to Setup/Jupyter_Notebook_Setup/startup.sh
diff --git a/Setup/README.md b/Setup/README.md
new file mode 100644
index 0000000000000000000000000000000000000000..1b84f8fce9204eb029ab96a0019192cea5dc0503
--- /dev/null
+++ b/Setup/README.md
@@ -0,0 +1,91 @@
+# Numerical experiments for LDD-TPR
+This project provides a Fenics based 2D domain decomposition code implementing an LDD solver for two-phase flow systems. 
+Two-phase flow systems means, that each subdomain can either adopt the full two-phase flow equations or the Richards/Richardson. 
+Flexible domain substructurings are possible so long as each subdomain has polygonal boundary. 
+
+## Manual how to set up latest fenix image in Docker
+Pull the latest fenics Docker image
+
+    docker pull quay.io/fenicsproject/stable:latest
+
+
+## Create docker container named LDD-TPR with graphical display, share folder `pwd`
+
+    docker run --dns=129.69.252.252 -it --env HOST_UID=$(id -u $USER) --env HOST_GID=$(id -g $USER) --env DISPLAY=unix$DISPLAY --device /dev/dri --volume /tmp/.X11-unix:/tmp/.X11-unix:rw --volume $(pwd):/home/fenics/shared --name LDD-TPR-fenics2019 quay.io/fenicsproject/stable:latest /bin/bash 
+
+
+If you want the container do be automatically deleted after exiting add `--rm` option.
+
+
+If you have trouble with internet connection inside the container use
+
+    --dns=YOUR_DNS_SERVER
+
+
+## Start LDD-TPR container and step into the container
+
+    docker start LDD-TPR & docker exec -ti -u fenics LDD-TPR /bin/bash -l
+
+
+## Usefull docker commands
+
+List all docker container
+
+    docker ps -a
+
+
+Remove container
+
+    docker rm $container
+
+
+Remove all stopped container
+
+    docker container prune
+
+
+List all images
+
+    docker images
+
+
+Remove image
+
+    docker rmi $image
+
+
+# Troubleshooting
+
+**Problem**
+1. I can't install packages via `apt-get install` inside the  container
+
+2. I cannot create files or folders (no write permissions)
+
+**Solution**
+
+1. If the package is not found first `apt-get update` and then try again. If there is no connection 
+check your dns settings.
+
+2. In the container, execute once the script `Rechtesetup/setpermissions.sh` to gain write access in `/home/fenics/shared`
+	
+        cd /home/fenics/shared/Rechtesetup & sudo ./setpermissions.sh
+
+## Nützliche FEniCS Links 
+
+
+- Forum, viele Fragen wurden evtl. schon gestellt, bzw. können ggf. gestellt werden:
+
+    https://www.allanswered.com/community/s/fenics-project/
+
+
+- Python API reference (die von der neuesten Version existiert quasi noch nicht):
+
+    https://fenicsproject.org/docs/dolfin/2017.2.0/python/programmers-reference/index.html
+
+
+- Fenics git wenn man wirklich mal in die source files schauen möchte:
+
+    https://bitbucket.org/fenics-project/
+
+
+
diff --git a/Rechtesetup/setpermissions.sh b/Setup/Rechtesetup/setpermissions.sh
similarity index 100%
rename from Rechtesetup/setpermissions.sh
rename to Setup/Rechtesetup/setpermissions.sh
diff --git a/python_requirement.txt b/Setup/python_requirement.txt
similarity index 100%
rename from python_requirement.txt
rename to Setup/python_requirement.txt
diff --git a/helper_scripts/README.md b/helper_scripts/README.md
new file mode 100644
index 0000000000000000000000000000000000000000..f0d18efb9ac173ba54870d5c482751a6f00b4b93
--- /dev/null
+++ b/helper_scripts/README.md
@@ -0,0 +1,6 @@
+# Helperscripts 
+The script 
+    merge_spacetime_errornorms.py
+is part of the `../LDDsimulation/helpers.py` but has been adapted to manually
+use it to merge the spacetime errornorm files that come out of mesh studies, 
+manually.
diff --git a/Hilfsskripte/merge_spacetime_errornorms.py b/helper_scripts/merge_spacetime_errornorms.py
similarity index 100%
rename from Hilfsskripte/merge_spacetime_errornorms.py
rename to helper_scripts/merge_spacetime_errornorms.py
diff --git a/old_geometry/old_geometry.py b/old_geometry/old_geometry.py
deleted file mode 100644
index e19248f4dc950c0cc737acef7a1e71a93325999e..0000000000000000000000000000000000000000
--- a/old_geometry/old_geometry.py
+++ /dev/null
@@ -1,115 +0,0 @@
-# global domain
-subdomain0_vertices = [df.Point(0.0,0.0), #
-                        df.Point(13.0,0.0),#
-                        df.Point(13.0,8.0),#
-                        df.Point(0.0,8.0)]
-
-interface12_vertices = [df.Point(0.0, 7.0),
-                        df.Point(9.0, 7.0),
-                        df.Point(10.5, 7.5),
-                        df.Point(12.0, 7.0),
-                        df.Point(13.0, 6.5)]
-# subdomain1.
-subdomain1_vertices = [interface12_vertices[0],
-                        interface12_vertices[1],
-                        interface12_vertices[2],
-                        interface12_vertices[3],
-                        interface12_vertices[4], # southern boundary, 12 interface
-                        subdomain0_vertices[2], # eastern boundary, outer boundary
-                        subdomain0_vertices[3]] # northern boundary, outer on_boundary
-
-# vertex coordinates of the outer boundaries. If it can not be specified as a
-# polygon, use an entry per boundary polygon. This information is used for defining
-# the Dirichlet boundary conditions. If a domain is completely internal, the
-# dictionary entry should be 0: None
-subdomain1_outer_boundary_verts = {
-    0: [interface12_vertices[4], #
-        subdomain0_vertices[2], # eastern boundary, outer boundary
-        subdomain0_vertices[3],
-        interface12_vertices[0]]
-}
-
-
-# interface23
-interface23_vertices = [df.Point(0.0, 5.0),
-                        df.Point(3.0, 5.0),
-                        # df.Point(6.5, 4.5),
-                        df.Point(6.5, 5.0),
-                        df.Point(9.5, 5.0),
-                        # df.Point(11.5, 3.5),
-                        # df.Point(13.0, 3)
-                        df.Point(11.5, 5.0),
-                        df.Point(13.0, 5.0)
-                        ]
-
-#subdomain1
-subdomain2_vertices = [interface23_vertices[0],
-                        interface23_vertices[1],
-                        interface23_vertices[2],
-                        interface23_vertices[3],
-                        interface23_vertices[4],
-                        interface23_vertices[5], # southern boundary, 23 interface
-                        subdomain1_vertices[4], # eastern boundary, outer boundary
-                        subdomain1_vertices[3],
-                        subdomain1_vertices[2],
-                        subdomain1_vertices[1],
-                        subdomain1_vertices[0] ] # northern boundary, 12 interface
-
-subdomain2_outer_boundary_verts = {
-    0: [interface23_vertices[5],
-        subdomain1_vertices[4]],
-    1: [subdomain1_vertices[0],
-        interface23_vertices[0]]
-}
-
-
-# interface34
-interface34_vertices = [df.Point(0.0, 2.0),
-                        df.Point(4.0, 2.0),
-                        df.Point(9.0, 2.5),
-                        df.Point(10.5, 2.0),
-                        df.Point(13.0, 1.5)]
-
-# subdomain3
-subdomain3_vertices = [interface34_vertices[0],
-                        interface34_vertices[1],
-                        interface34_vertices[2],
-                        interface34_vertices[3],
-                        interface34_vertices[4], # southern boundary, 34 interface
-                        subdomain2_vertices[5], # eastern boundary, outer boundary
-                        subdomain2_vertices[4],
-                        subdomain2_vertices[3],
-                        subdomain2_vertices[2],
-                        subdomain2_vertices[1],
-                        subdomain2_vertices[0] ] # northern boundary, 23 interface
-
-subdomain3_outer_boundary_verts = {
-    0: [interface34_vertices[4],
-        subdomain2_vertices[5]],
-    1: [subdomain2_vertices[0],
-        interface34_vertices[0]]
-}
-
-# subdomain4
-subdomain4_vertices = [subdomain0_vertices[0],
-                        subdomain0_vertices[1], # southern boundary, outer boundary
-                        subdomain3_vertices[4],# eastern boundary, outer boundary
-                        subdomain3_vertices[3],
-                        subdomain3_vertices[2],
-                        subdomain3_vertices[1],
-                        subdomain3_vertices[0] ] # northern boundary, 34 interface
-
-subdomain4_outer_boundary_verts = {
-    0: [subdomain4_vertices[6],
-        subdomain4_vertices[0],
-        subdomain4_vertices[1],
-        subdomain4_vertices[2]]
-}
-
-
-subdomain_def_points = [subdomain0_vertices,#
-                      subdomain1_vertices,#
-                      subdomain2_vertices,#
-                      subdomain3_vertices,#
-                      subdomain4_vertices
-                      ]