diff --git a/doc/source/example/processing/apcv.py b/doc/source/example/processing/apcv.py index 00a9c1727e4da1c174e952c5250c0e280e2154a5..494936458450c16355a08e2be8026b5cc3abce33 100644 --- a/doc/source/example/processing/apcv.py +++ b/doc/source/example/processing/apcv.py @@ -1,18 +1,20 @@ import matplotlib.pyplot as plt from navipy.database import DataBaseLoad import navipy.processing as processing -import numpy as np +from navipy.sensors import Senses import pkg_resources # 1) Connect to the database mydb_filename = pkg_resources.resource_filename( 'navipy', 'resources/database.db') mydb = DataBaseLoad(mydb_filename) +mysenses = Senses(renderer=mydb) # 2) Define the position-orinetation at which # we want the image -rowid = 12 -my_scene = mydb.scene(rowid=rowid) -my_apcv = processing.pcode.apcv(my_scene, mydb.viewing_directions) +posorient = mydb.posorients.loc[12, :] +mysenses.update(posorient) +my_apcv = processing.pcode.apcv(mysenses.vision.scene, + mysenses.vision.viewing_directions) my_apcv_sph = processing.tools.cartesian_to_spherical(x=my_apcv[..., 0], y=my_apcv[..., 1], @@ -26,29 +28,4 @@ azimuth = mydb.viewing_directions[..., f, axarr = plt.subplots(1, 2, figsize=(15, 4)) - -to_plot_im = my_scene[:, :, :3, 0] -to_plot_im -= to_plot_im.min() -to_plot_im /= to_plot_im.max() -to_plot_im = to_plot_im * 255 -to_plot_im = to_plot_im.astype(np.uint8) -to_plot_dist = my_scene[:, :, 3, 0] -ax = axarr[0] - -for chan_i, chan_n in enumerate(['R', 'G', 'B']): - color = chan_n - ax.plot(np.rad2deg(my_apcv_sph[..., chan_i, 1]), - np.rad2deg(my_apcv_sph[..., chan_i, 0]), 'o', color=color) -ax.imshow(to_plot_im, extent=[np.min(azimuth), np.max(azimuth), - np.max(elevation), np.min(elevation)]) -ax.invert_yaxis() -ax = axarr[1] -color = 'k' -ax.plot(np.rad2deg(my_apcv_sph[..., 3, 1]), - np.rad2deg(my_apcv_sph[..., 3, 0]), 'o', color=color) - -ax.imshow(to_plot_dist, extent=[np.min(azimuth), np.max(azimuth), - np.max(elevation), np.min(elevation)]) -ax.invert_yaxis() - f.show() diff --git a/doc/source/example/processing/contrast_weighted_nearness.py b/doc/source/example/processing/contrast_weighted_nearness.py index 220863a1abe332baafab7d0bc5c24c3f23b932cf..614d6675c9473386666ec6653174c3742b2e7548 100644 --- a/doc/source/example/processing/contrast_weighted_nearness.py +++ b/doc/source/example/processing/contrast_weighted_nearness.py @@ -1,6 +1,7 @@ import matplotlib.pyplot as plt from navipy.database import DataBaseLoad import navipy.processing as processing +from navipy.sensors import Senses import pkg_resources @@ -8,11 +9,13 @@ import pkg_resources mydb_filename = pkg_resources.resource_filename( 'navipy', 'resources/database.db') mydb = DataBaseLoad(mydb_filename) +mysenses = Senses(renderer=mydb) # 2) Define the position-orinetation at which # we want the image -rowid = 12 -my_scene = mydb.scene(rowid=rowid) -my_contrast = processing.pcode.contrast_weighted_nearness(my_scene) +posorient = mydb.posorients.loc[12, :] +mysenses.update(posorient) +my_contrast = processing.pcode.contrast_weighted_nearness( + mysenses.vision.scene) f, axarr = plt.subplots(2, 2, figsize=(15, 8)) axarr = axarr.flatten() diff --git a/doc/source/example/processing/michelson_contrast.py b/doc/source/example/processing/michelson_contrast.py index e7bb58c764eac18c265467c1d9046838c222d8d3..9d6e12c0b2dcea90e0a6738934de42724047f767 100644 --- a/doc/source/example/processing/michelson_contrast.py +++ b/doc/source/example/processing/michelson_contrast.py @@ -1,18 +1,19 @@ import matplotlib.pyplot as plt from navipy.database import DataBaseLoad import navipy.processing as processing +from navipy.sensors import Senses import pkg_resources - # 1) Connect to the database mydb_filename = pkg_resources.resource_filename( 'navipy', 'resources/database.db') mydb = DataBaseLoad(mydb_filename) +mysenses = Senses(renderer=mydb) # 2) Define the position-orinetation at which # we want the image -rowid = 12 -my_scene = mydb.scene(rowid=rowid) -my_contrast = processing.pcode.michelson_contrast(my_scene) +posorient = mydb.posorients.loc[12, :] +mysenses.update(posorient) +my_contrast = processing.pcode.michelson_contrast(mysenses.vision.scene) f, axarr = plt.subplots(2, 2, figsize=(15, 8)) axarr = axarr.flatten() diff --git a/doc/source/example/processing/pcv.py b/doc/source/example/processing/pcv.py index 6c2017ccb18e8489b38b03aa968051cf2816efd8..325d61952fe5357578362b837b6477505d9e9c5c 100644 --- a/doc/source/example/processing/pcv.py +++ b/doc/source/example/processing/pcv.py @@ -1,15 +1,17 @@ # import matplotlib.pyplot as plt from navipy.database import DataBaseLoad import navipy.processing as processing +from navipy.sensors import Senses import pkg_resources - # 1) Connect to the database mydb_filename = pkg_resources.resource_filename( 'navipy', 'resources/database.db') mydb = DataBaseLoad(mydb_filename) +mysenses = Senses(renderer=mydb) # 2) Define the position-orinetation at which # we want the image -rowid = 12 -my_scene = mydb.scene(rowid=rowid) -my_pcv = processing.pcode.pcv(my_scene, mydb.viewing_directions) +posorient = mydb.posorients.loc[12, :] +mysenses.update(posorient) +my_pcv = processing.pcode.pcv(mysenses.vision.scene, + mysenses.vision.viewing_directions) diff --git a/doc/source/example/processing/skyline.py b/doc/source/example/processing/skyline.py index 499bacb14a6b6f83fef09403fee3484646fd160d..8846f93e37ef579bcb7a58bb712b24fc25874985 100644 --- a/doc/source/example/processing/skyline.py +++ b/doc/source/example/processing/skyline.py @@ -1,18 +1,19 @@ import matplotlib.pyplot as plt from navipy.database import DataBaseLoad import navipy.processing as processing +from navipy.sensors import Senses import pkg_resources - # 1) Connect to the database mydb_filename = pkg_resources.resource_filename( 'navipy', 'resources/database.db') mydb = DataBaseLoad(mydb_filename) +mysenses = Senses(renderer=mydb) # 2) Define the position-orinetation at which # we want the image -rowid = 12 -my_scene = mydb.scene(rowid=rowid) -my_skyline = processing.pcode.skyline(my_scene) +posorient = mydb.posorients.loc[12, :] +mysenses.update(posorient) +my_skyline = processing.pcode.skyline(mysenses.vision.scene) f, axarr = plt.subplots(1, 2, figsize=(15, 4)) for chan_i, chan_n in enumerate(mydb.channels): diff --git a/doc/source/example/rendering/blenddemo_beesampling.py b/doc/source/example/rendering/blenddemo_beesampling.py index 8e0f429b642b6c6940d6c81fff36b54eeffd6b36..214c8801713edc1776dc868810bede232a802830 100644 --- a/doc/source/example/rendering/blenddemo_beesampling.py +++ b/doc/source/example/rendering/blenddemo_beesampling.py @@ -3,11 +3,11 @@ Example on how to use the rendering module """ import tempfile import numpy as np -from navipy.rendering.bee_sampling import BeeSampling -from navipy.rendering.cyber_bee import Cyberbee +from navipy.sensors.bee_sampling import BeeSampling +from navipy.sensors.renderer import BlenderRender # create a bee sampling -cyberbee = Cyberbee() +cyberbee = BlenderRender() cyberbee.cycle_samples = 5 bee_samp = BeeSampling(cyberbee) # Create a list of point from which we want to render images diff --git a/doc/source/example/rendering/blenddemo_cyberbee.py b/doc/source/example/rendering/blenddemo_cyberbee.py index 64f222f21cf73062d8250fd5ef623a61510c0b7a..37f6e440cb70db36da93b16871aaf6376624ccee 100644 --- a/doc/source/example/rendering/blenddemo_cyberbee.py +++ b/doc/source/example/rendering/blenddemo_cyberbee.py @@ -2,10 +2,10 @@ import numpy as np import pandas as pd from matplotlib.colors import hsv_to_rgb, rgb_to_hsv import matplotlib.pyplot as plt -from navipy.rendering.cyber_bee import Cyberbee +from navipy.sensors.renderer import BlenderRender # with tempfile.TemporaryDirectory() as folder: -cyberbee = Cyberbee() +cyberbee = BlenderRender() cyberbee.cycle_samples = 50 cyberbee.camera_rotation_mode = 'XYZ' cyberbee.camera_fov = [[-90, 90], [-180, 180]] diff --git a/navipy/database/__init__.py b/navipy/database/__init__.py index 2affba62d28d0be496b54ff925335cd1c2bfd494..a31f25f12f4870cfb5dcde5bf192a622cfe4926d 100644 --- a/navipy/database/__init__.py +++ b/navipy/database/__init__.py @@ -122,6 +122,7 @@ It creates three sql table on initialisation. must be single value') self.filename = filename self.channels = channels + self.viewing_directions = None self.normalisation_columns = list() for chan_n in self.channels: self.normalisation_columns.append(str(chan_n) + '_max') @@ -138,6 +139,9 @@ It creates three sql table on initialisation. self.tablecolumns['position_orientation']['alpha_2'] = 'real' self.tablecolumns['image'] = dict() self.tablecolumns['image']['data'] = 'array' + # self.tablecolumns['viewing_directions'] = dict() + # self.tablecolumns['viewing_directions']['elevation'] = 'array' + # self.tablecolumns['viewing_directions']['azimuth'] = 'array' self.tablecolumns['normalisation'] = dict() for col in self.normalisation_columns: self.tablecolumns['normalisation'][col] = 'real' @@ -376,9 +380,9 @@ database if rowid is np.nan: raise ValueError('rowid must not be nan') if (posorient is None) and (rowid is None): - raise Exception('posorient and rowid can not be both None') + raise Exception('posorient and rowid can not be both None') if posorient is not None: - rowid = self.get_posid(posorient) + rowid = self.get_posid(posorient) # Read images tablename = 'position_orientation' toreturn = pd.read_sql_query( diff --git a/navipy/moving/agent.py b/navipy/moving/agent.py index 2d217c69d8ab879e86814c250f5f6bad68df66d6..041598589ae9f11ac87638a09ff7078f8d749fef 100644 --- a/navipy/moving/agent.py +++ b/navipy/moving/agent.py @@ -17,6 +17,7 @@ from multiprocessing import Queue, JoinableQueue, Process import inspect from navipy.database import DataBaseLoad import navipy.moving.maths as navimomath +from navipy.sensors import Senses version = float(nx.__version__) @@ -26,10 +27,19 @@ def defaultcallback(*args, **kwargs): raise NameError('No Callback') +class DefaultSensors(): + def __init__(self): + pass + + def update(self, posorient): + raise NameError('No Callback') + + class AbstractAgent(): def __init__(self): - self._sensors = defaultcallback + self._sensors = DefaultSensors() self._motion = defaultcallback + self._motion_param = None self._alter_posorientvel = defaultcallback self._posorient_col = ['x', 'y', 'z', 'alpha_0', 'alpha_1', 'alpha_2'] @@ -74,6 +84,17 @@ class AbstractAgent(): def motion(self): return inspect.getsourcelines(self._motion) + @property + def motion_param(self): + return self._motion_param.copy() + + @motion_param.setter + def motion_param(self, param): + if isinstance(param, dict): + self._motion_param = param + else: + raise TypeError('motion param should be a dictionary') + @property def sensors(self): return inspect.getsourcelines(self._sensors) @@ -83,9 +104,15 @@ class AbstractAgent(): return inspect.getsourcelines(self._alter_posorientvel) def move(self): - scene = self._sensors(self.posorient) - newpos = self._motion(self.posorient, scene) - alteredpos = self._alter_posorientvel(newpos) + self._sensors.update(self.posorient) + if self._motion_param is None: + self.velocity = self._motion(self._posorient_vel, + self._sensors) + else: + self.velocity = self._motion(self._posorient_vel, + self._sensors, + **self._motion_param) + alteredpos = self._alter_posorientvel(self._posorient_vel) self.posorient = alteredpos self.velocity = alteredpos @@ -94,7 +121,7 @@ class AbstractAgent(): """ if return_tra: trajectory = pd.DataFrame(index=range(0, max_nstep), - columns=self.posorient_vel_col) + columns=self._posorient_vel_col) trajectory.loc[0, :] = self._posorient_vel.copy() for stepi in range(1, max_nstep): self.move() @@ -115,16 +142,16 @@ CyberBeeAgent is a close loop agent and need to be run within blender \ bla """ - def __init__(self, cyberbee): + def __init__(self, renderer): AbstractAgent.__init__(self) AbstractAgent._alter_posorientvel = \ lambda motion_vec: navimomath.next_pos(motion_vec, move_mode='free_run') - self.sensors = cyberbee.scene + self.sensors = renderer @AbstractAgent.sensors.setter - def sensors(self, cyberbee): - self._sensors = cyberbee.scene + def sensors(self, renderer): + self._sensors = Senses(renderer) @AbstractAgent.motion.setter def motion(self, motion): @@ -159,7 +186,7 @@ GridAgent is a close loop agent here its position is snap to a grid. def sensors(self, database_filename): self.db = DataBaseLoad(database_filename) self._posorients = self.db.posorients - self._sensors = self.db.scene + self._sensors = Senses(renderer=self.db) @AbstractAgent.motion.setter def motion(self, motion): diff --git a/navipy/rendering/__init__.py b/navipy/sensors/__init__.py similarity index 60% rename from navipy/rendering/__init__.py rename to navipy/sensors/__init__.py index 294345fcc7b702e5f4afefcd0c5264daebfe7dc9..a85692ff0caf89aac19f933b0018e14cd5e749b2 100644 --- a/navipy/rendering/__init__.py +++ b/navipy/sensors/__init__.py @@ -42,9 +42,31 @@ Custom sampling Rendering classes ----------------- -.. autoclass:: navipy.rendering.bee_sampling.BeeSampling +.. autoclass:: navipy.sensors.bee_sampling.BeeSampling :members: -.. autoclass:: navipy.rendering.cyber_bee.Cyberbee +.. autoclass:: navipy.sensors.renderer.Cyberbee :members: """ + + +class Bunch: + def __init__(self, **kwds): + self.__dict__.update(kwds) + + +class Senses(): + def __init__(self, + renderer=None): + self.vision = Bunch(scene=None, + viewing_directions=None, + channels=None) + self.renderer = renderer + if self.renderer is not None: + self.vision.scene = None + self.vision.viewing_directions = renderer.viewing_directions + self.vision.channels = renderer.channels + + def update(self, posorient): + if self.renderer is not None: + self.vision.scene = self.renderer.scene(posorient) diff --git a/navipy/rendering/bee_sampling.py b/navipy/sensors/bee_sampling.py similarity index 100% rename from navipy/rendering/bee_sampling.py rename to navipy/sensors/bee_sampling.py diff --git a/navipy/rendering/cyber_bee.py b/navipy/sensors/renderer.py similarity index 96% rename from navipy/rendering/cyber_bee.py rename to navipy/sensors/renderer.py index bce38051fe2260846c56831e625466224e84f0e4..d18499a22d4326921f77f0c7ef1e3704db156c45 100644 --- a/navipy/rendering/cyber_bee.py +++ b/navipy/sensors/renderer.py @@ -27,10 +27,10 @@ import os import pandas as pd -class Cyberbee(): +class BlenderRender(): """ - Cyberbee is a small class binding python with blender. - With Cyberbee one can move the bee to a position, and render what + BlenderRender is a small class binding python with blender. + With BlenderRender one can move the bee to a position, and render what the bee see at this position. The Bee eye is a panoramic camera with equirectangular projection