From 6fcdeedf668b0a4ec538e81440033936b2f4af99 Mon Sep 17 00:00:00 2001
From: "Olivier J.N. Bertrand" <olivier.bertrand@uni-bielefeld.de>
Date: Sun, 28 Jan 2018 18:42:31 +0100
Subject: [PATCH] Use Senses class instead of scene to handle compasses, etc.
 at a later stage

---
 doc/source/example/processing/apcv.py         | 35 +++-----------
 .../processing/contrast_weighted_nearness.py  |  9 ++--
 .../example/processing/michelson_contrast.py  |  9 ++--
 doc/source/example/processing/pcv.py          | 10 ++--
 doc/source/example/processing/skyline.py      |  9 ++--
 .../rendering/blenddemo_beesampling.py        |  6 +--
 .../example/rendering/blenddemo_cyberbee.py   |  4 +-
 navipy/database/__init__.py                   |  8 +++-
 navipy/moving/agent.py                        | 47 +++++++++++++++----
 navipy/{rendering => sensors}/__init__.py     | 26 +++++++++-
 navipy/{rendering => sensors}/bee_sampling.py |  0
 .../cyber_bee.py => sensors/renderer.py}      |  6 +--
 12 files changed, 103 insertions(+), 66 deletions(-)
 rename navipy/{rendering => sensors}/__init__.py (60%)
 rename navipy/{rendering => sensors}/bee_sampling.py (100%)
 rename navipy/{rendering/cyber_bee.py => sensors/renderer.py} (96%)

diff --git a/doc/source/example/processing/apcv.py b/doc/source/example/processing/apcv.py
index 00a9c17..4949364 100644
--- a/doc/source/example/processing/apcv.py
+++ b/doc/source/example/processing/apcv.py
@@ -1,18 +1,20 @@
 import matplotlib.pyplot as plt
 from navipy.database import DataBaseLoad
 import navipy.processing as processing
-import numpy as np
+from navipy.sensors import Senses
 import pkg_resources
 
 # 1) Connect to the database
 mydb_filename = pkg_resources.resource_filename(
     'navipy', 'resources/database.db')
 mydb = DataBaseLoad(mydb_filename)
+mysenses = Senses(renderer=mydb)
 # 2) Define the position-orinetation at which
 # we want the image
-rowid = 12
-my_scene = mydb.scene(rowid=rowid)
-my_apcv = processing.pcode.apcv(my_scene, mydb.viewing_directions)
+posorient = mydb.posorients.loc[12, :]
+mysenses.update(posorient)
+my_apcv = processing.pcode.apcv(mysenses.vision.scene,
+                                mysenses.vision.viewing_directions)
 
 my_apcv_sph = processing.tools.cartesian_to_spherical(x=my_apcv[..., 0],
                                                       y=my_apcv[..., 1],
@@ -26,29 +28,4 @@ azimuth = mydb.viewing_directions[...,
 
 
 f, axarr = plt.subplots(1, 2, figsize=(15, 4))
-
-to_plot_im = my_scene[:, :, :3, 0]
-to_plot_im -= to_plot_im.min()
-to_plot_im /= to_plot_im.max()
-to_plot_im = to_plot_im * 255
-to_plot_im = to_plot_im.astype(np.uint8)
-to_plot_dist = my_scene[:, :, 3, 0]
-ax = axarr[0]
-
-for chan_i, chan_n in enumerate(['R', 'G', 'B']):
-    color = chan_n
-    ax.plot(np.rad2deg(my_apcv_sph[..., chan_i, 1]),
-            np.rad2deg(my_apcv_sph[..., chan_i, 0]), 'o', color=color)
-ax.imshow(to_plot_im, extent=[np.min(azimuth), np.max(azimuth),
-                              np.max(elevation), np.min(elevation)])
-ax.invert_yaxis()
-ax = axarr[1]
-color = 'k'
-ax.plot(np.rad2deg(my_apcv_sph[..., 3, 1]),
-        np.rad2deg(my_apcv_sph[..., 3, 0]), 'o', color=color)
-
-ax.imshow(to_plot_dist, extent=[np.min(azimuth), np.max(azimuth),
-                                np.max(elevation), np.min(elevation)])
-ax.invert_yaxis()
-
 f.show()
diff --git a/doc/source/example/processing/contrast_weighted_nearness.py b/doc/source/example/processing/contrast_weighted_nearness.py
index 220863a..614d667 100644
--- a/doc/source/example/processing/contrast_weighted_nearness.py
+++ b/doc/source/example/processing/contrast_weighted_nearness.py
@@ -1,6 +1,7 @@
 import matplotlib.pyplot as plt
 from navipy.database import DataBaseLoad
 import navipy.processing as processing
+from navipy.sensors import Senses
 import pkg_resources
 
 
@@ -8,11 +9,13 @@ import pkg_resources
 mydb_filename = pkg_resources.resource_filename(
     'navipy', 'resources/database.db')
 mydb = DataBaseLoad(mydb_filename)
+mysenses = Senses(renderer=mydb)
 # 2) Define the position-orinetation at which
 # we want the image
-rowid = 12
-my_scene = mydb.scene(rowid=rowid)
-my_contrast = processing.pcode.contrast_weighted_nearness(my_scene)
+posorient = mydb.posorients.loc[12, :]
+mysenses.update(posorient)
+my_contrast = processing.pcode.contrast_weighted_nearness(
+    mysenses.vision.scene)
 
 f, axarr = plt.subplots(2, 2, figsize=(15, 8))
 axarr = axarr.flatten()
diff --git a/doc/source/example/processing/michelson_contrast.py b/doc/source/example/processing/michelson_contrast.py
index e7bb58c..9d6e12c 100644
--- a/doc/source/example/processing/michelson_contrast.py
+++ b/doc/source/example/processing/michelson_contrast.py
@@ -1,18 +1,19 @@
 import matplotlib.pyplot as plt
 from navipy.database import DataBaseLoad
 import navipy.processing as processing
+from navipy.sensors import Senses
 import pkg_resources
 
-
 # 1) Connect to the database
 mydb_filename = pkg_resources.resource_filename(
     'navipy', 'resources/database.db')
 mydb = DataBaseLoad(mydb_filename)
+mysenses = Senses(renderer=mydb)
 # 2) Define the position-orinetation at which
 # we want the image
-rowid = 12
-my_scene = mydb.scene(rowid=rowid)
-my_contrast = processing.pcode.michelson_contrast(my_scene)
+posorient = mydb.posorients.loc[12, :]
+mysenses.update(posorient)
+my_contrast = processing.pcode.michelson_contrast(mysenses.vision.scene)
 
 f, axarr = plt.subplots(2, 2, figsize=(15, 8))
 axarr = axarr.flatten()
diff --git a/doc/source/example/processing/pcv.py b/doc/source/example/processing/pcv.py
index 6c2017c..325d619 100644
--- a/doc/source/example/processing/pcv.py
+++ b/doc/source/example/processing/pcv.py
@@ -1,15 +1,17 @@
 # import matplotlib.pyplot as plt
 from navipy.database import DataBaseLoad
 import navipy.processing as processing
+from navipy.sensors import Senses
 import pkg_resources
 
-
 # 1) Connect to the database
 mydb_filename = pkg_resources.resource_filename(
     'navipy', 'resources/database.db')
 mydb = DataBaseLoad(mydb_filename)
+mysenses = Senses(renderer=mydb)
 # 2) Define the position-orinetation at which
 # we want the image
-rowid = 12
-my_scene = mydb.scene(rowid=rowid)
-my_pcv = processing.pcode.pcv(my_scene, mydb.viewing_directions)
+posorient = mydb.posorients.loc[12, :]
+mysenses.update(posorient)
+my_pcv = processing.pcode.pcv(mysenses.vision.scene,
+                              mysenses.vision.viewing_directions)
diff --git a/doc/source/example/processing/skyline.py b/doc/source/example/processing/skyline.py
index 499bacb..8846f93 100644
--- a/doc/source/example/processing/skyline.py
+++ b/doc/source/example/processing/skyline.py
@@ -1,18 +1,19 @@
 import matplotlib.pyplot as plt
 from navipy.database import DataBaseLoad
 import navipy.processing as processing
+from navipy.sensors import Senses
 import pkg_resources
 
-
 # 1) Connect to the database
 mydb_filename = pkg_resources.resource_filename(
     'navipy', 'resources/database.db')
 mydb = DataBaseLoad(mydb_filename)
+mysenses = Senses(renderer=mydb)
 # 2) Define the position-orinetation at which
 # we want the image
-rowid = 12
-my_scene = mydb.scene(rowid=rowid)
-my_skyline = processing.pcode.skyline(my_scene)
+posorient = mydb.posorients.loc[12, :]
+mysenses.update(posorient)
+my_skyline = processing.pcode.skyline(mysenses.vision.scene)
 
 f, axarr = plt.subplots(1, 2, figsize=(15, 4))
 for chan_i, chan_n in enumerate(mydb.channels):
diff --git a/doc/source/example/rendering/blenddemo_beesampling.py b/doc/source/example/rendering/blenddemo_beesampling.py
index 8e0f429..214c880 100644
--- a/doc/source/example/rendering/blenddemo_beesampling.py
+++ b/doc/source/example/rendering/blenddemo_beesampling.py
@@ -3,11 +3,11 @@ Example on how to use the rendering module
 """
 import tempfile
 import numpy as np
-from navipy.rendering.bee_sampling import BeeSampling
-from navipy.rendering.cyber_bee import Cyberbee
+from navipy.sensors.bee_sampling import BeeSampling
+from navipy.sensors.renderer import BlenderRender
 
 # create a bee sampling
-cyberbee = Cyberbee()
+cyberbee = BlenderRender()
 cyberbee.cycle_samples = 5
 bee_samp = BeeSampling(cyberbee)
 # Create a list of point from which we want to render images
diff --git a/doc/source/example/rendering/blenddemo_cyberbee.py b/doc/source/example/rendering/blenddemo_cyberbee.py
index 64f222f..37f6e44 100644
--- a/doc/source/example/rendering/blenddemo_cyberbee.py
+++ b/doc/source/example/rendering/blenddemo_cyberbee.py
@@ -2,10 +2,10 @@ import numpy as np
 import pandas as pd
 from matplotlib.colors import hsv_to_rgb, rgb_to_hsv
 import matplotlib.pyplot as plt
-from navipy.rendering.cyber_bee import Cyberbee
+from navipy.sensors.renderer import BlenderRender
 
 # with tempfile.TemporaryDirectory() as folder:
-cyberbee = Cyberbee()
+cyberbee = BlenderRender()
 cyberbee.cycle_samples = 50
 cyberbee.camera_rotation_mode = 'XYZ'
 cyberbee.camera_fov = [[-90, 90], [-180, 180]]
diff --git a/navipy/database/__init__.py b/navipy/database/__init__.py
index 2affba6..a31f25f 100644
--- a/navipy/database/__init__.py
+++ b/navipy/database/__init__.py
@@ -122,6 +122,7 @@ It creates three sql table on initialisation.
                                  must be single value')
         self.filename = filename
         self.channels = channels
+        self.viewing_directions = None
         self.normalisation_columns = list()
         for chan_n in self.channels:
             self.normalisation_columns.append(str(chan_n) + '_max')
@@ -138,6 +139,9 @@ It creates three sql table on initialisation.
         self.tablecolumns['position_orientation']['alpha_2'] = 'real'
         self.tablecolumns['image'] = dict()
         self.tablecolumns['image']['data'] = 'array'
+        # self.tablecolumns['viewing_directions'] = dict()
+        # self.tablecolumns['viewing_directions']['elevation'] = 'array'
+        # self.tablecolumns['viewing_directions']['azimuth'] = 'array'
         self.tablecolumns['normalisation'] = dict()
         for col in self.normalisation_columns:
             self.tablecolumns['normalisation'][col] = 'real'
@@ -376,9 +380,9 @@ database
             if rowid is np.nan:
                 raise ValueError('rowid must not be nan')
         if (posorient is None) and (rowid is None):
-                raise Exception('posorient and rowid can not be both None')
+            raise Exception('posorient and rowid can not be both None')
         if posorient is not None:
-                rowid = self.get_posid(posorient)
+            rowid = self.get_posid(posorient)
         # Read images
         tablename = 'position_orientation'
         toreturn = pd.read_sql_query(
diff --git a/navipy/moving/agent.py b/navipy/moving/agent.py
index 2d217c6..0415985 100644
--- a/navipy/moving/agent.py
+++ b/navipy/moving/agent.py
@@ -17,6 +17,7 @@ from multiprocessing import Queue, JoinableQueue, Process
 import inspect
 from navipy.database import DataBaseLoad
 import navipy.moving.maths as navimomath
+from navipy.sensors import Senses
 
 version = float(nx.__version__)
 
@@ -26,10 +27,19 @@ def defaultcallback(*args, **kwargs):
     raise NameError('No Callback')
 
 
+class DefaultSensors():
+    def __init__(self):
+        pass
+
+    def update(self, posorient):
+        raise NameError('No Callback')
+
+
 class AbstractAgent():
     def __init__(self):
-        self._sensors = defaultcallback
+        self._sensors = DefaultSensors()
         self._motion = defaultcallback
+        self._motion_param = None
         self._alter_posorientvel = defaultcallback
         self._posorient_col = ['x', 'y', 'z',
                                'alpha_0', 'alpha_1', 'alpha_2']
@@ -74,6 +84,17 @@ class AbstractAgent():
     def motion(self):
         return inspect.getsourcelines(self._motion)
 
+    @property
+    def motion_param(self):
+        return self._motion_param.copy()
+
+    @motion_param.setter
+    def motion_param(self, param):
+        if isinstance(param, dict):
+            self._motion_param = param
+        else:
+            raise TypeError('motion param should be a dictionary')
+
     @property
     def sensors(self):
         return inspect.getsourcelines(self._sensors)
@@ -83,9 +104,15 @@ class AbstractAgent():
         return inspect.getsourcelines(self._alter_posorientvel)
 
     def move(self):
-        scene = self._sensors(self.posorient)
-        newpos = self._motion(self.posorient, scene)
-        alteredpos = self._alter_posorientvel(newpos)
+        self._sensors.update(self.posorient)
+        if self._motion_param is None:
+            self.velocity = self._motion(self._posorient_vel,
+                                         self._sensors)
+        else:
+            self.velocity = self._motion(self._posorient_vel,
+                                         self._sensors,
+                                         **self._motion_param)
+        alteredpos = self._alter_posorientvel(self._posorient_vel)
         self.posorient = alteredpos
         self.velocity = alteredpos
 
@@ -94,7 +121,7 @@ class AbstractAgent():
         """
         if return_tra:
             trajectory = pd.DataFrame(index=range(0, max_nstep),
-                                      columns=self.posorient_vel_col)
+                                      columns=self._posorient_vel_col)
             trajectory.loc[0, :] = self._posorient_vel.copy()
         for stepi in range(1, max_nstep):
             self.move()
@@ -115,16 +142,16 @@ CyberBeeAgent is a close loop agent and need to be run within blender \
     bla
     """
 
-    def __init__(self, cyberbee):
+    def __init__(self, renderer):
         AbstractAgent.__init__(self)
         AbstractAgent._alter_posorientvel = \
             lambda motion_vec: navimomath.next_pos(motion_vec,
                                                    move_mode='free_run')
-        self.sensors = cyberbee.scene
+        self.sensors = renderer
 
     @AbstractAgent.sensors.setter
-    def sensors(self, cyberbee):
-        self._sensors = cyberbee.scene
+    def sensors(self, renderer):
+        self._sensors = Senses(renderer)
 
     @AbstractAgent.motion.setter
     def motion(self, motion):
@@ -159,7 +186,7 @@ GridAgent is a close loop agent here its position is snap to a grid.
     def sensors(self, database_filename):
         self.db = DataBaseLoad(database_filename)
         self._posorients = self.db.posorients
-        self._sensors = self.db.scene
+        self._sensors = Senses(renderer=self.db)
 
     @AbstractAgent.motion.setter
     def motion(self, motion):
diff --git a/navipy/rendering/__init__.py b/navipy/sensors/__init__.py
similarity index 60%
rename from navipy/rendering/__init__.py
rename to navipy/sensors/__init__.py
index 294345f..a85692f 100644
--- a/navipy/rendering/__init__.py
+++ b/navipy/sensors/__init__.py
@@ -42,9 +42,31 @@ Custom sampling
 
 Rendering classes
 -----------------
-.. autoclass:: navipy.rendering.bee_sampling.BeeSampling
+.. autoclass:: navipy.sensors.bee_sampling.BeeSampling
     :members:
 
-.. autoclass:: navipy.rendering.cyber_bee.Cyberbee
+.. autoclass:: navipy.sensors.renderer.Cyberbee
     :members:
 """
+
+
+class Bunch:
+    def __init__(self, **kwds):
+        self.__dict__.update(kwds)
+
+
+class Senses():
+    def __init__(self,
+                 renderer=None):
+        self.vision = Bunch(scene=None,
+                            viewing_directions=None,
+                            channels=None)
+        self.renderer = renderer
+        if self.renderer is not None:
+            self.vision.scene = None
+            self.vision.viewing_directions = renderer.viewing_directions
+            self.vision.channels = renderer.channels
+
+    def update(self, posorient):
+        if self.renderer is not None:
+            self.vision.scene = self.renderer.scene(posorient)
diff --git a/navipy/rendering/bee_sampling.py b/navipy/sensors/bee_sampling.py
similarity index 100%
rename from navipy/rendering/bee_sampling.py
rename to navipy/sensors/bee_sampling.py
diff --git a/navipy/rendering/cyber_bee.py b/navipy/sensors/renderer.py
similarity index 96%
rename from navipy/rendering/cyber_bee.py
rename to navipy/sensors/renderer.py
index bce3805..d18499a 100644
--- a/navipy/rendering/cyber_bee.py
+++ b/navipy/sensors/renderer.py
@@ -27,10 +27,10 @@ import os
 import pandas as pd
 
 
-class Cyberbee():
+class BlenderRender():
     """
-    Cyberbee is a small class binding python with blender.
-    With Cyberbee one can move the bee to a position, and render what
+    BlenderRender is a small class binding python with blender.
+    With BlenderRender one can move the bee to a position, and render what
     the bee see at this position.
 
     The Bee eye is a panoramic camera with equirectangular projection
-- 
GitLab