diff --git a/doc/source/gettingstarted.rst b/doc/source/gettingstarted.rst
index f2f18523a73c91285561c4ade8aa671d0fbdd395..d5b726f54ddb58bcbc1f52eaf368963d0ee2906b 100644
--- a/doc/source/gettingstarted.rst
+++ b/doc/source/gettingstarted.rst
@@ -21,8 +21,9 @@ Testing navipy
 
 Testing blender
 ---------------
-Navipy comes with a command line tool to run your script under blender.\
- It is however required that the python version used by blender, and the one use by navipy matche. To test your installation of blender and navipy you can download the (:download:`script <../../navipy/sensors/blendnavipy_test.py>`) and run it under blender with the following command:
+
+Navipy comes with a command line tool to run your script under blender. \
+It is however required that the python version used by blender, and the one use by navipy matche. To test your installation of blender and navipy you can download the (:download:`script <../../navipy/sensors/blendnavipy_test.py>`) and run it under blender with the following command:
 
  .. code-block:: bash
 		 
diff --git a/doc/source/overview/examples/blenddemo_beesampling.py b/doc/source/overview/examples/blenddemo_beesampling.py
deleted file mode 100644
index a948ef8a824fdc6ef48d4644096a9fe3174d43f3..0000000000000000000000000000000000000000
--- a/doc/source/overview/examples/blenddemo_beesampling.py
+++ /dev/null
@@ -1,32 +0,0 @@
-"""
-Example on how to use the rendering module
-"""
-import tempfile
-import numpy as np
-import os
-from navipy.sensors.bee_sampling import BeeSampling
-from navipy.sensors.renderer import BlenderRender
-
-# create a bee sampling
-cyberbee = BlenderRender()
-cyberbee.cycle_samples = 5
-bee_samp = BeeSampling(cyberbee)
-# Create a list of point from which we want to render images
-# Careful three d meshing grow quickly.
-x = np.linspace(-5, 5, 3)
-y = np.linspace(-5, 5, 3)
-z = np.linspace(1.8, 1.8, 1)
-alpha_1 = np.array([0]) + np.pi / 2
-alpha_2 = np.array([0])
-alpha_3 = np.array([0])
-bee_samp.create_sampling_grid(
-    x, y, z, alpha1=alpha_1, alpha2=alpha_2, alpha3=alpha_3)
-# Assign maximum world dimension. Otherwise the distance
-# will go to infinity, and the distance to objects after compression
-# will be set to 0
-world_dim = 50 * np.sqrt(2)
-bee_samp.world_dim = world_dim
-# Rendering in a temporary folder.
-with tempfile.TemporaryDirectory() as folder:
-    filename_db = os.path.join(folder, 'database.db')
-    bee_samp.render(filename_db)
diff --git a/doc/source/overview/rendering.rst b/doc/source/overview/rendering.rst
index 91efae6ee535cc6c10df22f93f672d4777c26da9..2a2979b7372262789d01aeeecf119f8ada91908e 100644
--- a/doc/source/overview/rendering.rst
+++ b/doc/source/overview/rendering.rst
@@ -17,59 +17,6 @@ Using navipy in blender
 >>> blendnavipy --blender-world='/path/to/world.blend' --python-script='/path/to/script.py'
 
 .. argparse::
-   :filename: ../bin/blendnavipy.py
+   :filename: ../navipy/sensors/blendnavipy.py
    :func: parser_blendnavipy	      
    :prog: blendnavipy
-
-How to generate a database using blender
-----------------------------------------
-.. literalinclude:: examples/blenddemo_beesampling.py
-   :lines: 7, 8
-
-First we configure the rendering module
-
-.. literalinclude:: examples/blenddemo_beesampling.py
-   :lines: 11, 12
-
-With the toolbox at disposition we just need to configure the \
-BeeSampling to render images on a regular 3D grid.
-
-.. literalinclude:: examples/blenddemo_beesampling.py
-   :lines: 13,16-23
-
-If we want to use the distance to objects, we need to tell the \
-BeeSampling what is the maximum distance to objects in the environment.\
- Otherwise the distance can go until infinity, and since the image are \
-compressed in the database, all distances to object will be equal to \
-zero:
-
-.. literalinclude:: examples/blenddemo_beesampling.py
-   :lines: 27-28
-
-Finally we can generate the database.
-
-.. literalinclude:: examples/blenddemo_beesampling.py
-   :dedent: 4
-   :lines: 31-32
-
-
-(:download:`Source code <examples/blenddemo_beesampling.py>`)
-
-Custom sampling
----------------
-.. literalinclude:: examples/blenddemo_cyberbee.py
-   :lines: 5
-
-With the toolbox at disposition we just need to configure the \
-Cyberbee to render images at desired positions.
-
-.. literalinclude:: examples/blenddemo_cyberbee.py
-   :lines: 8-13
-
-To render a scene at a given positions, we assign position orientation \
-to and then tell the cyberbee to get the scene at that point.
-
-.. literalinclude:: examples/blenddemo_cyberbee.py
-   :lines: 16-24
-
-(:download:`Source code <examples/blenddemo_cyberbee.py>`)
diff --git a/doc/source/tutorials/examples/blenddemo_beesampling.py b/doc/source/tutorials/examples/blenddemo_beesampling.py
new file mode 100644
index 0000000000000000000000000000000000000000..e0a6e1d77703ca6128aa2427efea364279ea5df3
--- /dev/null
+++ b/doc/source/tutorials/examples/blenddemo_beesampling.py
@@ -0,0 +1,93 @@
+"""
+Example on how to use the rendering module
+"""
+import tempfile
+import numpy as np
+import os
+import matplotlib.pyplot as plt
+from matplotlib.patches import ConnectionPatch
+from navipy.database import DataBaseLoad
+from navipy.sensors.bee_sampling import BeeSampling
+from navipy.sensors.renderer import BlenderRender
+
+
+# create a bee sampling
+cyberbee = BlenderRender()
+cyberbee.cycle_samples = 100 # higher value -> slower but nicer
+bee_samp = BeeSampling(cyberbee)
+# Create a list of point from which we want to render images
+# Careful three d meshing grow quickly.
+x = np.linspace(-5, 5, 3)
+y = np.linspace(-5, 5, 3)
+z = np.linspace(1.8, 1.8, 1)
+alpha_1 = np.array([0]) + np.pi / 2
+alpha_2 = np.array([0])
+alpha_3 = np.array([0])
+bee_samp.create_sampling_grid(
+    x, y, z, alpha1=alpha_1, alpha2=alpha_2, alpha3=alpha_3)
+# Assign maximum world dimension. Otherwise the distance
+# will go to infinity, and the distance to objects after compression
+# will be set to 0
+world_dim = 50 * np.sqrt(2)
+bee_samp.world_dim = world_dim
+# Rendering in a temporary folder.
+filename_db = 'database.db'
+bee_samp.render(filename_db)
+
+# --- End of database generation.
+# --- Plot some point of the db
+# Load and plot db
+mydb = DataBaseLoad(filename_db)
+
+# Plot a view of db
+posorients = mydb.posorients
+posorient = posorients.loc[1,:]
+f, axarr = plt.subplots(3,3,figsize=(15,10))
+ax=axarr[1][1]
+ax.plot(posorients.x,posorients.y,'ko')
+ax.set_xlabel('x')
+ax.set_ylabel('y')
+ax.set_aspect('equal')
+ax.text(0,2.5,'Top view of the grid',horizontalalignment='center',fontsize=15)
+unique_x = posorients.x.unique()[::-1]
+unique_y = posorients.y.unique()
+for i,axar in enumerate(axarr):
+    for j, ax in enumerate(axar):
+        if j==1 and i==1:
+            continue
+        posorient.x=unique_x[j]
+        posorient.y=unique_y[i]
+        scene = mydb.scene(posorient)
+        to_plot_im = scene[:, :, :3,0]
+        ax.imshow(to_plot_im)
+        ax.invert_yaxis()
+        ax.set_aspect('equal')
+        ax.get_xaxis().set_ticks([])
+        ax.get_yaxis().set_ticks([])
+        ax.set_title('Panorama at ({},{})'.format(posorient.x,posorient.y))
+        xyA=[0,0]
+        if j==0:
+            xyA[0]=to_plot_im.shape[1]
+        elif j==1:
+            xyA[0]=to_plot_im.shape[1]//3
+        elif j==2:
+            xyA[0]=0
+        print(i,j,posorient.x,posorient.y)
+        if i==0:
+            xyA[1]=0
+        elif i==1:
+            xyA[1]=to_plot_im.shape[0]//3
+        elif i==2:
+            xyA[1]=to_plot_im.shape[0]
+            
+        con = ConnectionPatch(xyA=xyA, xyB=(posorient.x,posorient.y), coordsA="data", coordsB="data",
+                      axesA=ax, axesB=axarr[1][1], color="black")
+        ax.add_artist(con)
+        
+for i,axar in enumerate(axarr):
+    axar[0].set_ylabel('Elevation')
+for i,axar in enumerate(axarr.transpose()):
+    axar[-1].set_xlabel('Azimuth')
+
+f.show()
+f.savefig('database.svg')
diff --git a/doc/source/overview/examples/blenddemo_cyberbee.py b/doc/source/tutorials/examples/blenddemo_cyberbee.py
similarity index 73%
rename from doc/source/overview/examples/blenddemo_cyberbee.py
rename to doc/source/tutorials/examples/blenddemo_cyberbee.py
index e9e1b2c9ff21d86c2a6bcdf850a2685c08e2aa89..d689323d1bd0770871bed7e160dbf3b99d7ba63d 100644
--- a/doc/source/overview/examples/blenddemo_cyberbee.py
+++ b/doc/source/tutorials/examples/blenddemo_cyberbee.py
@@ -1,12 +1,11 @@
 import numpy as np
 import pandas as pd
-from matplotlib.colors import hsv_to_rgb, rgb_to_hsv
 import matplotlib.pyplot as plt
 from navipy.sensors.renderer import BlenderRender
 
 # Configure the rendering module
 cyberbee = BlenderRender()
-cyberbee.cycle_samples = 50
+cyberbee.cycle_samples = 100
 cyberbee.camera_rotation_mode = 'XYZ'
 cyberbee.camera_fov = [[-90, 90], [-180, 180]]
 cyberbee.gaussian_width = 1.5
@@ -26,17 +25,8 @@ scene = cyberbee.scene(posorient)
 # plot
 f, axarr = plt.subplots(2, 1,  sharex=True)
 
-to_plot_im = scene[:, :, :3]
-to_plot_im -= to_plot_im.min()
-to_plot_im /= to_plot_im.max()
-to_plot_im = rgb_to_hsv(to_plot_im)
-to_plot_im[..., 2] = to_plot_im[..., 2] * 2
-to_plot_im = hsv_to_rgb(to_plot_im)
-
-to_plot_im = to_plot_im * 255
-to_plot_im = to_plot_im.astype(np.uint8)
-
-to_plot_dist = 1 / scene[:, :, 3]
+to_plot_im = scene[:, :, :3,0]
+to_plot_dist = 1 / scene[:, :, 3,0]
 
 ax = axarr[0]
 ax.imshow(to_plot_im)
@@ -61,7 +51,6 @@ f.subplots_adjust(right=0.8)
 cbar_ax = f.add_axes([0.85, 0.15, 0.03, 0.3])
 cb = f.colorbar(im, cax=cbar_ax)
 cb.set_label('Nearness')
-cb.set_label('Nearness')
 
 f.show()
-f.savefig('CyberBeeView.svg')
+f.savefig('renderimage.svg')
diff --git a/doc/source/tutorials/index.rst b/doc/source/tutorials/index.rst
index 2d4fe5e412fdc517e18cda9c73d8644cf4393ef9..5bd7cf63d3f4ddad18aeda8e50273b2e6568a483 100644
--- a/doc/source/tutorials/index.rst
+++ b/doc/source/tutorials/index.rst
@@ -1,6 +1,69 @@
 Tutorials
 =========
 
+.. The image ratio is: width: 350px; height: 350/4 + (2x5) ~= 98px
+
+.. only:: builder_html and (not singlehtml)
+
+   .. container:: tocdescr
+
+
+       .. container:: descr
+
+         .. figure:: /tutorials/compound_eye.jpeg
+            :target: interface/index.html
+
+         :doc:`/interface/index`
+            Average skyline vector homing (grid)
+
+       .. container:: descr
+
+         .. figure:: /tutorials/compound_eye.jpeg
+            :target: interface/index.html
+
+         :doc:`/interface/index`
+            Following the rotational image difference (close loop)
+
+       .. container:: descr
+
+         .. figure:: /tutorials/compound_eye.jpeg
+            :target: interface/index.html
+
+         :doc:`/interface/index`
+            Photo-taxis with a compound eye (close loop)
+
+       .. container:: descr
+
+         .. figure:: /tutorials/compound_eye.jpeg
+            :target: interface/index.html
+
+         :doc:`/interface/index`
+            Finding attracting points (graph)
+
+       .. container:: descr
+
+         .. figure:: /tutorials/database.jpeg
+            :target: database.html
+
+         :doc:`/tutorials/database`
+            Creating a database for offline methods
+
+       .. container:: descr
+
+         .. figure:: /tutorials/renderimage.svg
+            :target: renderimage.html
+
+         :doc:`/tutorials/renderimage`
+            Generating images and movies from insect trajectory
+
+.. only:: latex or epub or singlehtml
+
+   .. toctree::
+      :maxdepth: 1
+
+      database.rst
+      renderimage.rst
+      
 Average place-code vector homing
 --------------------------------
 Homing with an average skyline vector consist of deriving the skyline \
diff --git a/doc/source/tutorials/renderimage.rst b/doc/source/tutorials/renderimage.rst
new file mode 100644
index 0000000000000000000000000000000000000000..7688636e6cb2ff0ae9c3cd9b552d5c077ebc0e21
--- /dev/null
+++ b/doc/source/tutorials/renderimage.rst
@@ -0,0 +1,25 @@
+Render a single image
+---------------------
+.. literalinclude:: examples/blenddemo_cyberbee.py
+   :lines: 5
+
+With the toolbox at disposition we just need to configure the \
+Cyberbee to render images at desired positions.
+
+.. literalinclude:: examples/blenddemo_cyberbee.py
+   :lines: 8-13
+
+To render a scene at a given positions, we assign position orientation \
+to and then tell the cyberbee to get the scene at that point.
+
+.. literalinclude:: examples/blenddemo_cyberbee.py
+   :lines: 16-24
+
+(:download:`Source code <examples/blenddemo_cyberbee.py>`)
+(:download:`Blender world <../../../navipy/resources/forest_world.blend>`)
+
+
+
+Create a movie
+--------------
+
diff --git a/navipy/resources/forest_world.blend b/navipy/resources/forest_world.blend
index f91b08161a81a8a3312113f73b12c094587bf631..326c649d78b9bdd506ea001b6ef888db2a3c38ad 100644
Binary files a/navipy/resources/forest_world.blend and b/navipy/resources/forest_world.blend differ