From e8616a03ea9a1190873b7331b92a165cabd3f277 Mon Sep 17 00:00:00 2001
From: "Olivier J.N. Bertrand" <olivier.bertrand@uni-bielefeld.de>
Date: Sun, 28 Jan 2018 19:53:43 +0100
Subject: [PATCH] Correct renderer doc

---
 doc/source/index.rst       |  1 +
 doc/source/rendering.rst   |  6 ++--
 navipy/sensors/__init__.py | 55 +++++++++----------------------
 navipy/sensors/renderer.py | 67 +++++++++++++++++++++++++++++++++-----
 4 files changed, 77 insertions(+), 52 deletions(-)

diff --git a/doc/source/index.rst b/doc/source/index.rst
index d4217a2..9a5ec46 100644
--- a/doc/source/index.rst
+++ b/doc/source/index.rst
@@ -28,6 +28,7 @@ Content
    comparing
    moving
    database
+   tutorials
 
 
 Indices and tables
diff --git a/doc/source/rendering.rst b/doc/source/rendering.rst
index 2d70aad..0e9d333 100644
--- a/doc/source/rendering.rst
+++ b/doc/source/rendering.rst
@@ -1,3 +1,3 @@
-Rendering
-=========
-.. automodule:: navipy.rendering
+Sensors
+=======
+.. automodule:: navipy.sensors
diff --git a/navipy/sensors/__init__.py b/navipy/sensors/__init__.py
index a85692f..3928b27 100644
--- a/navipy/sensors/__init__.py
+++ b/navipy/sensors/__init__.py
@@ -1,52 +1,27 @@
 """
-Navipy & blender
-----------------
-What is blender?
-~~~~~~~~~~~~~~~~
-Explain blender
+Every agent comes with a battery of senses (biological agent) \
+or sensors (technical agent). The senses of agents in navipy are limited
+to:
 
-Create a world
-~~~~~~~~~~~~~~
-Explain How to create env for navipy
+* 4d vision (brighness + depth)
 
-Using navipy in blender
-~~~~~~~~~~~~~~~~~~~~~~~
-Blender comes with its own python installation. Thus, we need to \
-tell blender to use our virtualenv where the navigation toolbox \
-is installed. To do we need to import the os module
+The 4d vision sense is controlled by rendering module, either an \
+online rendering or loaded from a database containing pre-rendered images.
 
-.. literalinclude:: blender_run.py
-    :lines: 6 - 7
+For example to use pre-rendered images from a database:
 
-then activate the environment by using the following function:
+.. literalinclude:: example/processing/apcv.py
+   :lines: 10-11
 
-.. literalinclude:: blender_run.py
-    :lines: 13 - 18
+Then the senses can be updated at a new position orientation:
 
-here venv_path is the path to the virtual environment within which \
-navipy has been installed.
+.. literalinclude:: example/processing/apcv.py
+   :lines: 15
 
-Now, blender can import all modules used by the navigation toolbox.
+Renderer
+--------
+.. automodule:: navipy.sensors.renderer
 
-How to run python code with blender:
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
->>> blender path/to/world.blend --background --python path/to/code.py
-
-How to generate a database using blender
-----------------------------------------
-.. automodule:: navipy.rendering.bee_sampling
-
-Custom sampling
----------------
-.. automodule:: navipy.rendering.cyber_bee
-
-Rendering classes
------------------
-.. autoclass:: navipy.sensors.bee_sampling.BeeSampling
-    :members:
-
-.. autoclass:: navipy.sensors.renderer.Cyberbee
-    :members:
 """
 
 
diff --git a/navipy/sensors/renderer.py b/navipy/sensors/renderer.py
index d18499a..df6bafe 100644
--- a/navipy/sensors/renderer.py
+++ b/navipy/sensors/renderer.py
@@ -1,18 +1,52 @@
 """
-.. literalinclude:: example/rendering/blenddemo_cyberbee.py
-   :lines: 5
+Navipy & blender
+----------------
+What is blender?
+~~~~~~~~~~~~~~~~
+Explain blender
 
-With the toolbox at disposition we just need to configure the \
-Cyberbee to render images at desired positions.
+Create a world
+~~~~~~~~~~~~~~
+Explain How to create env for navipy
 
-.. literalinclude:: example/rendering/blenddemo_cyberbee.py
-   :lines: 8-13
+Using navipy in blender
+~~~~~~~~~~~~~~~~~~~~~~~
+Blender comes with its own python installation. Thus, we need to \
+tell blender to use our virtualenv where the navigation toolbox \
+is installed. To do we need to import the os module
 
-To render a scene at a given positions we just have to do:
+.. literalinclude:: blender_run.py
+    :lines: 6 - 7
 
-.. literalinclude:: example/rendering/blenddemo_cyberbee.py
-   :lines: 14-22
+then activate the environment by using the following function:
 
+.. literalinclude:: blender_run.py
+    :lines: 13 - 18
+
+here venv_path is the path to the virtual environment within which \
+navipy has been installed.
+
+Now, blender can import all modules used by the navigation toolbox.
+
+How to run python code with blender:
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+>>> blender path/to/world.blend --background --python path/to/code.py
+
+How to generate a database using blender
+----------------------------------------
+.. automodule:: navipy.sensors.bee_sampling
+
+Custom sampling
+---------------
+.. autoclass:: navipy.sensors.renderer.BlenderRender
+
+Rendering classes
+-----------------
+.. autoclass:: navipy.sensors.bee_sampling.BeeSampling
+    :members:
+
+.. autoclass:: navipy.sensors.renderer.BlenderRender
+    :members:
 """
 import warnings
 try:
@@ -35,6 +69,21 @@ class BlenderRender():
 
     The Bee eye is a panoramic camera with equirectangular projection
     The light rays attaining the eyes are filtered with a gaussian.
+
+    .. literalinclude:: example/rendering/blenddemo_cyberbee.py
+       :lines: 5
+
+    With the toolbox at disposition we just need to configure the \
+    Cyberbee to render images at desired positions.
+
+    .. literalinclude:: example/rendering/blenddemo_cyberbee.py
+       :lines: 8-13
+
+    To render a scene at a given positions we just have to do:
+
+    .. literalinclude:: example/rendering/blenddemo_cyberbee.py
+       :lines: 14-22
+
     """
 
     def __init__(self):
-- 
GitLab