diff --git a/doc/build/doctrees/database.doctree b/doc/build/doctrees/database.doctree
index 2d5588f798d4b181bb70cb3efb2b38e659b5fa66..ee19a98498a3d49fa366bae583f1fd69ff442e39 100644
Binary files a/doc/build/doctrees/database.doctree and b/doc/build/doctrees/database.doctree differ
diff --git a/doc/build/doctrees/environment.pickle b/doc/build/doctrees/environment.pickle
index 798f3106d459b45a5ea102260b966c8f1fda588b..78cb36fdbb6d3eae9aff863285584b6224d15094 100644
Binary files a/doc/build/doctrees/environment.pickle and b/doc/build/doctrees/environment.pickle differ
diff --git a/doc/build/doctrees/processing.doctree b/doc/build/doctrees/processing.doctree
index fed56b00c83183510c133a9ba8f541fb57da153b..85455bc5ba4a1cb056a15f93ee0be162d054a9fb 100644
Binary files a/doc/build/doctrees/processing.doctree and b/doc/build/doctrees/processing.doctree differ
diff --git a/doc/build/html/database.html b/doc/build/html/database.html
index 1ef558fbe095b8b72249ad795d5b69af976e82c9..872cbe3cb1ff4bd02e0fe2e960c21acf5a28f19b 100644
--- a/doc/build/html/database.html
+++ b/doc/build/html/database.html
@@ -117,38 +117,6 @@ to write on database (Load class)</p>
 <dd><p>Return the position orientations of all points in the database</p>
 </dd></dl>
 
-<dl class="method">
-<dt id="navipy.database.DataBaseLoad.scene">
-<code class="descname">scene</code><span class="sig-paren">(</span><em>posorient=None</em>, <em>rowid=None</em><span class="sig-paren">)</span><a class="headerlink" href="#navipy.database.DataBaseLoad.scene" title="Permalink to this definition">¶</a></dt>
-<dd><p>Return a scene at a position orientation or given rowid         in a given database.</p>
-<table class="docutils field-list" frame="void" rules="none">
-<col class="field-name" />
-<col class="field-body" />
-<tbody valign="top">
-<tr class="field-odd field"><th class="field-name">Parameters:</th><td class="field-body"><ul class="first simple">
-<li><strong>database</strong> – a DataBaseLoad class         :param posorient:  a pandas Series with index:         [‘x’,’y’,’z’,’alpha_0,’alpha_1,’alpha_2’] (default None, i.e. not used)</li>
-<li><strong>rowid</strong> – a row identification integer for directly reading         in the database (default None, i.e. not used).</li>
-</ul>
-</td>
-</tr>
-<tr class="field-even field"><th class="field-name">Returns:</th><td class="field-body"><p class="first">a scene [elevation, azimuth, channel, 1] or         [ommatidia,channel,1].</p>
-</td>
-</tr>
-<tr class="field-odd field"><th class="field-name">Return type:</th><td class="field-body"><p class="first last">np.ndarray</p>
-</td>
-</tr>
-</tbody>
-</table>
-<div class="highlight-default"><div class="highlight"><pre><span></span><span class="n">my_scene</span> <span class="o">=</span> <span class="n">mydb</span><span class="o">.</span><span class="n">scene</span><span class="p">(</span><span class="n">rowid</span><span class="o">=</span><span class="n">rowid</span><span class="p">)</span>
-
-</pre></div>
-</div>
-<p>(<a class="reference external" href="./example/database/scene.py">Source code</a>, <a class="reference external" href="./example/database/scene.png">png</a>, <a class="reference external" href="./example/database/scene.hires.png">hires.png</a>, <a class="reference external" href="./example/database/scene.pdf">pdf</a>)</p>
-<div class="figure">
-<img alt="_images/scene.png" src="_images/scene.png" />
-</div>
-</dd></dl>
-
 </dd></dl>
 
 </div>
diff --git a/doc/build/html/genindex.html b/doc/build/html/genindex.html
index cfea0171ad00c5039d00fbd47ba2c0812be3a5ae..82d47675e1ea03c7c284d91031972b348648a8fd 100644
--- a/doc/build/html/genindex.html
+++ b/doc/build/html/genindex.html
@@ -133,10 +133,6 @@
 
 <h2 id="S">S</h2>
 <table style="width: 100%" class="indextable genindextable"><tr>
-  <td style="width: 33%; vertical-align: top;"><ul>
-      <li><a href="database.html#navipy.database.DataBaseLoad.scene">scene() (navipy.database.DataBaseLoad method)</a>
-</li>
-  </ul></td>
   <td style="width: 33%; vertical-align: top;"><ul>
       <li><a href="processing.html#navipy.processing.pcode.skyline">skyline() (in module navipy.processing.pcode)</a>
 </li>
diff --git a/doc/build/html/objects.inv b/doc/build/html/objects.inv
index 3367409cd86a5b85cd03adf395600693b1a77b55..ab820e54a805f84113f97d53d5c4c744e2072ae4 100644
Binary files a/doc/build/html/objects.inv and b/doc/build/html/objects.inv differ
diff --git a/doc/build/html/searchindex.js b/doc/build/html/searchindex.js
index df5fede7980f998ea42102c653ab71d3140fd9e6..3561e30d1c4ec9a79202be7eb51b959a3f5a7b1b 100644
--- a/doc/build/html/searchindex.js
+++ b/doc/build/html/searchindex.js
@@ -1 +1 @@
-Search.setIndex({docnames:["analysing","comparing","computing","database","gettingstarted","index","moving","processing","rendering"],envversion:53,filenames:["analysing.rst","comparing.rst","computing.rst","database.rst","gettingstarted.rst","index.rst","moving.rst","processing.rst","rendering.rst"],objects:{"navipy.database":{DataBaseLoad:[3,1,1,""]},"navipy.database.DataBaseLoad":{create:[3,2,1,""],iter_posorients:[3,3,1,""],posorients:[3,2,1,""],scene:[3,3,1,""]},"navipy.processing.mcode":{optic_flow:[7,4,1,""]},"navipy.processing.pcode":{apcv:[7,4,1,""],contrast_weighted_nearness:[7,4,1,""],michelson_contrast:[7,4,1,""],pcv:[7,4,1,""],skyline:[7,4,1,""]},navipy:{database:[3,0,0,"-"],processing:[7,0,0,"-"]}},objnames:{"0":["py","module","Python module"],"1":["py","class","Python class"],"2":["py","attribute","Python attribute"],"3":["py","method","Python method"],"4":["py","function","Python function"]},objtypes:{"0":"py:module","1":"py:class","2":"py:attribute","3":"py:method","4":"py:function"},terms:{"case":[3,7],"class":3,"default":3,"final":8,"import":[3,8],"return":[3,7],IBS:7,NOT:[],OBS:7,The:[3,7],With:8,__file__:8,about:1,abspath:[],abus:7,access:3,activ:8,activate_thi:8,activate_virtualenv:8,all:8,along:7,alpha1:8,alpha2:8,alpha3:8,alpha_0:3,alpha_1:[3,8],alpha_2:[3,8],alpha_3:8,alreadi:3,alter:3,alwai:7,analys:5,apcv:7,around:7,arrai:[7,8],astyp:[],axarr:[],azimuth:[3,7],background:8,base:[3,7],basten:7,becaus:3,bee:3,bee_samp:8,bee_sampl:8,been:3,beesampl:8,bin:8,blend:8,calcul:[3,7],can:[3,7,8],care:[],cartesian:7,certain:3,channel:[3,7],classic:7,code:3,come:8,compar:5,compon:7,compress:8,comput:5,configur:8,connect:[],consist:3,constant:3,constrast:7,contain:3,contrast_s:7,contrast_weighted_near:7,coordin:7,correspond:3,cost:3,creat:3,create_sampling_grid:8,databas:5,databaseload:3,decid:3,def:8,defin:7,demo_test:8,depend:3,deriv:7,describ:3,dict:8,differ:3,dimens:7,direct:7,directli:3,disposit:8,distanc:[7,8],distance_channel:7,e402:8,each:[3,7],either:7,elev:[3,7],els:8,entir:3,env:8,environment_nam:8,equal:8,equirectangular:7,ever:3,exampl:[],exec:8,expandus:8,extens:7,eye:7,fals:3,figsiz:[],filenam:3,filepath:8,folder:8,follow:7,from:[3,7,8],full:3,gener:3,get:5,get_posori:3,given:[3,7,8],grid:8,grow:[],has:3,have:7,head:3,here:8,highest:7,hire:[3,7],home:8,howev:3,ibpc:7,ibs:7,identif:3,identifi:3,imag:[6,7,8],implement:[],imshow:[],inde:3,index:[3,5,7],infin:8,insect:7,instal:8,instead:3,integ:3,intens:7,interpol:6,introduc:8,invert_yaxi:[],iter:3,iter_posori:3,its:8,join:8,just:8,know:3,line:[],linspac:8,list:[],literalinclud:[],local:7,locat:7,look:3,lowest:7,lumin:7,mai:3,mallot:7,manner:7,map:7,matplotlib:[],max:[],maximum:[7,8],mcode:7,member:[],memori:1,mesh:[],method:7,michelson_contrast:7,min:[],minimum:7,modul:[3,5,8],more:3,move:5,my_apcv:7,my_contrast:7,my_pcv:7,my_scen:[3,7],my_skylin:[],mydb:[3,7],mydb_filenam:3,name:8,navig:[3,8],navipi:[3,7,8],ndarrai:[3,7],need:[7,8],neighbour:6,none:3,noqa:8,normalis:3,now:8,number:[3,7],numpi:[7,8],object:8,obpc:7,obs:7,omatidium:7,ommatidia:[3,7],ommatidium:7,open:8,optic_flow:7,orient:[],orientaiton:3,orinet:[],otherwis:8,our:8,own:8,page:5,panda:3,panoram:7,param:[3,7],paramet:[3,7],part:7,path:8,pcode:7,pcv:7,pdf:[3,7],pixel:7,place_cod:7,plot:[],plt:[],png:[3,7],point:3,posit:[],position_orient:3,posori:3,process:[3,5],project:7,provid:3,pyplot:[],python:8,quickli:[],rang:3,read:[3,8],read_imag:3,region:7,regular:7,render:[3,5],repres:7,respect:7,row:3,rowid:[3,7],rtype:7,scene:[3,5],sceneri:7,search:5,sens:7,seri:3,show:[],sinc:8,situat:3,size:7,sourc:[3,7],space:7,speak:3,speed:3,sqlite3:3,sqrt:8,start:5,strategi:3,subplot:[],tabl:3,talk:1,technic:3,tell:8,tempfil:8,temporarydirectori:8,terminolog:7,therefor:7,thi:3,three:[],through:3,thu:[3,7,8],time:3,to_plot_dist:[],to_plot_im:[],toolbox:[7,8],transform:7,type:[3,7],uint8:[],uniqu:3,until:8,use:[3,8],used:[3,7,8],useful:3,variabl:8,veloc:7,view:7,viewing_direct:7,virtualenv:8,want:[3,7,8],weather:3,were:3,what:8,wheight:7,when:7,where:[3,8],which:3,workon_hom:8,world_dim:8,write:3,zero:8},titles:["Analysing","Comparing","Computing","Database","Getting started","Welcome to Navigation Toolbox\u2019s documentation!","Moving","Processing a scene","Rendering"],titleterms:{agent:0,all:3,analys:0,area:0,around:0,attractor:0,averag:[2,7],between:6,blender:8,build:8,catchment:0,code:[2,7],compar:1,comput:2,contrast:7,cyberbe:8,databas:[3,8],differ:[1,2],document:5,doe:0,environ:8,familiar:[1,2],few:0,find:0,flow:7,from:2,gener:8,get:4,goal:0,grid:6,home:0,how:[3,8],idf:1,imag:[1,2,3],indic:5,infomax:1,load:3,michelson:7,motion:7,move:6,navig:5,nearness:7,network:[1,2],optic:7,orient:3,place:[2,7],point:6,posit:3,process:7,regular:8,render:8,ridf:1,rotat:[1,2],sampl:8,scene:7,script:8,skylin:7,start:4,tabl:5,test:8,toolbox:5,type:0,using:8,vector:[2,7],weight:7,welcom:5,your:8}})
\ No newline at end of file
+Search.setIndex({docnames:["analysing","comparing","computing","database","gettingstarted","index","moving","processing","rendering"],envversion:53,filenames:["analysing.rst","comparing.rst","computing.rst","database.rst","gettingstarted.rst","index.rst","moving.rst","processing.rst","rendering.rst"],objects:{"navipy.database":{DataBaseLoad:[3,1,1,""]},"navipy.database.DataBaseLoad":{create:[3,2,1,""],iter_posorients:[3,3,1,""],posorients:[3,2,1,""]},"navipy.processing.mcode":{optic_flow:[7,4,1,""]},"navipy.processing.pcode":{apcv:[7,4,1,""],contrast_weighted_nearness:[7,4,1,""],michelson_contrast:[7,4,1,""],pcv:[7,4,1,""],skyline:[7,4,1,""]},navipy:{database:[3,0,0,"-"],processing:[7,0,0,"-"]}},objnames:{"0":["py","module","Python module"],"1":["py","class","Python class"],"2":["py","attribute","Python attribute"],"3":["py","method","Python method"],"4":["py","function","Python function"]},objtypes:{"0":"py:module","1":"py:class","2":"py:attribute","3":"py:method","4":"py:function"},terms:{"case":[3,7],"class":3,"default":[],"final":8,"import":[3,8],"return":[3,7],IBS:7,NOT:[],OBS:7,The:[3,7],With:8,__file__:8,about:1,abspath:[],abus:7,access:3,activ:8,activate_thi:8,activate_virtualenv:8,all:8,along:7,alpha1:8,alpha2:8,alpha3:8,alpha_0:3,alpha_1:[3,8],alpha_2:[3,8],alpha_3:8,alreadi:3,alter:3,alwai:7,analys:5,apcv:7,around:7,arrai:[7,8],astyp:[],axarr:[],azimuth:7,background:8,base:[3,7],basten:7,becaus:3,bee:3,bee_samp:8,bee_sampl:8,been:3,beesampl:8,bin:8,blend:8,calcul:[3,7],can:[3,7,8],care:[],cartesian:7,certain:3,channel:[3,7],classic:7,code:3,come:8,compar:5,compon:7,compress:8,comput:5,configur:8,connect:[],consist:3,constant:3,constrast:7,contain:3,contrast_s:7,contrast_weighted_near:7,coordin:7,correspond:3,cost:3,creat:3,create_sampling_grid:8,databas:5,databaseload:3,decid:3,def:8,defin:7,demo_test:8,depend:3,deriv:7,describ:3,dict:8,differ:3,dimens:7,direct:7,directli:[],disposit:8,distanc:[7,8],distance_channel:7,e402:8,each:[3,7],either:7,elev:7,els:8,entir:3,env:8,environment_nam:8,equal:8,equirectangular:7,ever:3,exampl:[],exec:8,expandus:8,extens:7,eye:7,fals:3,figsiz:[],filenam:3,filepath:8,folder:8,follow:7,from:[3,7,8],full:3,gener:3,get:5,get_posori:3,given:[3,7,8],grid:8,grow:[],has:3,have:7,head:3,here:8,highest:7,hire:[3,7],home:8,howev:3,ibpc:7,ibs:7,identif:[],identifi:3,imag:[6,7,8],implement:[],imshow:[],inde:3,index:[3,5,7],infin:8,insect:7,instal:8,instead:3,integ:[],intens:7,interpol:6,introduc:8,invert_yaxi:[],iter:3,iter_posori:3,its:8,join:8,just:8,know:3,line:[],linspac:8,list:[],literalinclud:[],local:7,locat:7,look:3,lowest:7,lumin:7,mai:3,mallot:7,manner:7,map:7,matplotlib:[],max:[],maximum:[7,8],mcode:7,member:[],memori:1,mesh:[],method:7,michelson_contrast:7,min:[],minimum:7,modul:[3,5,8],more:3,move:5,my_apcv:7,my_contrast:7,my_pcv:7,my_scen:7,my_skylin:[],mydb:[3,7],mydb_filenam:3,name:8,navig:[3,8],navipi:[3,7,8],ndarrai:7,need:[7,8],neighbour:6,none:[],noqa:8,normalis:3,now:8,number:[3,7],numpi:[7,8],object:8,obpc:7,obs:7,omatidium:7,ommatidia:7,ommatidium:7,open:8,optic_flow:7,orient:[],orientaiton:3,orinet:[],otherwis:8,our:8,own:8,page:5,panda:[],panoram:7,param:7,paramet:7,part:7,path:8,pcode:7,pcv:7,pdf:[3,7],pixel:7,place_cod:7,plot:[],plt:[],png:[3,7],point:3,posit:[],position_orient:3,posori:3,process:[3,5],project:7,provid:3,pyplot:[],python:8,quickli:[],rang:3,read:8,read_imag:3,region:7,regular:7,render:[3,5],repres:7,respect:7,row:3,rowid:[3,7],rtype:7,scene:5,sceneri:7,search:5,sens:7,seri:3,show:[],sinc:8,situat:3,size:7,sourc:[3,7],space:7,speak:3,speed:3,sqlite3:3,sqrt:8,start:5,strategi:3,subplot:[],tabl:3,talk:1,technic:3,tell:8,tempfil:8,temporarydirectori:8,terminolog:7,therefor:7,thi:3,three:[],through:3,thu:[3,7,8],time:3,to_plot_dist:[],to_plot_im:[],toolbox:[7,8],transform:7,type:7,uint8:[],uniqu:3,until:8,use:[3,8],used:[3,7,8],useful:3,variabl:8,veloc:7,view:7,viewing_direct:7,virtualenv:8,want:[3,7,8],weather:3,were:3,what:8,wheight:7,when:7,where:[3,8],which:3,workon_hom:8,world_dim:8,write:3,zero:8},titles:["Analysing","Comparing","Computing","Database","Getting started","Welcome to Navigation Toolbox\u2019s documentation!","Moving","Processing a scene","Rendering"],titleterms:{agent:0,all:3,analys:0,area:0,around:0,attractor:0,averag:[2,7],between:6,blender:8,build:8,catchment:0,code:[2,7],compar:1,comput:2,contrast:7,cyberbe:8,databas:[3,8],differ:[1,2],document:5,doe:0,environ:8,familiar:[1,2],few:0,find:0,flow:7,from:2,gener:8,get:4,goal:0,grid:6,home:0,how:[3,8],idf:1,imag:[1,2,3],indic:5,infomax:1,load:3,michelson:7,motion:7,move:6,navig:5,nearness:7,network:[1,2],optic:7,orient:3,place:[2,7],point:6,posit:3,process:7,regular:8,render:8,ridf:1,rotat:[1,2],sampl:8,scene:7,script:8,skylin:7,start:4,tabl:5,test:8,toolbox:5,type:0,using:8,vector:[2,7],weight:7,welcom:5,your:8}})
\ No newline at end of file
diff --git a/navipy.egg-info/PKG-INFO b/navipy.egg-info/PKG-INFO
index cb2d0a173a0a145c009c7bab788a28cf48697381..fd0fcd430303ba44bb65b7ecc5a9c0ec68094e90 100644
--- a/navipy.egg-info/PKG-INFO
+++ b/navipy.egg-info/PKG-INFO
@@ -6,6 +6,7 @@ Home-page: UNKNOWN
 Author: Olivier J.N. Bertrand
 Author-email: olivier.bertrand@uni-bielefeld.de
 License: UNKNOWN
+Description-Content-Type: UNKNOWN
 Description: UNKNOWN
 Platform: UNKNOWN
 Requires: numpy
diff --git a/navipy/moving/agent.py b/navipy/moving/agent.py
index ad3cd9c6c4370243c9b43b4edb629c4bd3f57340..e12f39cc4038da280d3d37466ad714bc185932ad 100644
--- a/navipy/moving/agent.py
+++ b/navipy/moving/agent.py
@@ -15,38 +15,163 @@ from navipy.database import DataBaseLoad
 import navipy.moving.maths as navimomath
 
 
-def defaultcallback(database, posorients):
+def defaultcallback(*args, **kwargs):
     raise NameError('No Callback')
 
 
 class AbstractAgent():
+    def __init__(self):
+        self.__sensors = defaultcallback
+        self.__motion = defaultcallback
+        self.__alter_posorientvel = defaultcallback
+        self.__posorient_col = ['x', 'y', 'z',
+                                'alpha_0', 'alpha_1', 'alpha_2']
+        self.__velocity_col = ['d' + col for col in self.__posorient_col]
+        self.__posorient_vel_col = self.__posorient_col.append(
+            self.__velocity_col)
+        self.__posorient_vel = pd.Series(
+            index=self.__posorient_vel_col,
+            data=np.nan)
+
+    @property
+    def posorient(self):
+        return self.__posorient_vel.loc[self.__posorient_col].copy()
+
+    @posorient.setter
+    def posorient(self, posorient):
+        if isinstance(posorient, pd.Series) is False:
+            raise TypeError('posorient should be a pandas Series')
+        for col in self.__posorient_col:
+            if col not in posorient.index:
+                raise KeyError(
+                    'posorient should have {} as index'.format(col))
+        self.__posorient_vel.loc[self.__posorient_col] = \
+            posorient.loc[self.__posorient_col]
+
+    @property
+    def velocity(self):
+        return self.__posorient_vel.loc[self.__velocity_col].copy()
+
+    @velocity.setter
+    def velocity(self, velocity):
+        if isinstance(velocity, pd.Series) is False:
+            raise TypeError('velocity should be a pandas Series')
+        for col in self.__velocity_col:
+            if col not in velocity.index:
+                raise KeyError(
+                    'velocity should have {} as index'.format(col))
+        self.__posorient_vel.loc[self.__velocity_col] = \
+            velocity.loc[self.__velocity_col]
+
+    @property
+    def motion(self):
+        return inspect.getsourcelines(self.__motion)
+
+    @property
+    def sensors(self):
+        return inspect.getsourcelines(self.__sensors)
+
+    @property
+    def alter_posorientvel(self):
+        return inspect.getsourcelines(self.__alter_posorientvel)
+
+    def move(self):
+        scene = self.__sensors(self.posorient)
+        newpos = self.__motion(self.posorient, scene)
+        alteredpos = self.__alter_posorientvel(newpos)
+        self.posorient = alteredpos
+        self.velocity = alteredpos
+
+    def fly(self, max_nstep, return_tra=False):
+        """move cyberbee until max step has been performed
+        """
+        if return_tra:
+            trajectory = pd.DataFrame(index=range(0, max_nstep),
+                                      columns=self.posorient_vel_col)
+            trajectory.loc[0, :] = self.__posorient_col.copy()
+        for stepi in range(1, max_nstep):
+            self.move()
+            if return_tra:
+                trajectory.loc[stepi, :] = self.__posorient_col.copy()
+        if return_tra:
+            return trajectory
+        else:
+            return None
+
+
+class CyberBeeAgent(AbstractAgent):
+    """
+    A common method to make an agent moves is to update the sensory \
+    information at the current agent location, then process this \
+    information such to deduce the agent motion, and finally displaced\
+    the agent at its new location. This iterative method can be used \
+    with a wide range of models.
+    In navipy the update of the sensory information is done by the \
+    Cyberbee a class interfacing blender with the navipy, such that \
+    visual information can be rendered at the agent location.
+    To use the CyberBeeAgent you first need to create a function with \
+    input the scene and the position orientation of the agent, and \
+    with output the agent motion. The CyberBeeAgent can then be moved \
+    for a single step, or fly until a given number of movement has \
+    been effectuated or the agent stopped.
+    """
+
+    def __init__(self, cyberbee):
+        AbstractAgent.__init__(self)
+        AbstractAgent.__alter_posorientvel = \
+            lambda motion_vec: navimomath.next_pos(motion_vec,
+                                                   move_mode='free_run')
+        self.sensors = cyberbee.scene
+
+    @AbstractAgent.sensors.setter
+    def sensors(self, cyberbee):
+        self.__sensors = cyberbee.scene
+
+    @AbstractAgent.motion.setter
+    def motion(self, motion):
+        self.__motion = motion
+
+
+class GridAgent(AbstractAgent, Process):
     """
-    An abtract class for agent
+    The use of a close loop model including visual rendering is \
+    sometimes too slow to efficiently test several models or tune the \
+    parameters of a given models. The GridAgent solves this problem by \
+    restricting the agent motion on locations with rendered scene. The \
+    agent moves thus on a grid, and its next position is always \
+    snapped to the closest grid location. The finer the grid is, the \
+    larger the database storing all sceneries and grid location is; \
+    but also the more accurate the agent motion is. The grid size \
+    depend on the storage space, the time you can wait for the \
+    database creation, and how sensitive to exact location your model is.
+    Similar to the CyberBeeAgent, your navigational model should be \
+    contained in a function with input the scene and the position \
+    orientation of the agent, and with output the agent motion. The \
+    agent can be move a single step, or fly until a given number of \
+    movement has been effectuated or the agent stopped. It is here \
+    worth mentioning that the GridAgent inherit from the Process \
+    class of the multiprocessing module of the standard python \
+    library. Thus, several GridAgents can safely be run in parallel.
     """
 
-    def __init__(self,
-                 database_filename,
-                 memory_friendly=False):
+    def __init__(self, database_filename,
+                 posorients_queue=None,
+                 results_queue=None):
+        if (posorients_queue is not None) and (results_queue is not None):
+            multiprocessing.Process.__init__(self)
+        AbstractAgent.__init__(self)
+        AbstractAgent.__alter_posorientvel = self.snap_to_grid
+        self.sensors = database_filename
 
+    @AbstractAgent.sensors.setter
+    def sensors(self, database_filename):
         self.db = DataBaseLoad(database_filename)
-        self.dbname = database_filename
-        if memory_friendly:
-            self.__posorients = None
-        else:
-            self.__posorients = self.db.posorients
-        # set mode of motion
-        mode_move = {'mode': 'on_cubic_grid',
-                     'param': {'grid_spacing':
-                               pd.Series(data=1,
-                                         index=['dx', 'dy', 'dz'])}}
-        self.mode_of_motion = mode_move
+        self.__posorients = self.db.posorients()
+        self.__sensors = self.db.scene
 
-    @property
-    def posorients(self):
-        toreturn = self.__posorients
-        if toreturn is not None:
-            toreturn = toreturn.copy()
-        return toreturn
+    @AbstractAgent.motion.setter
+    def motion(self, motion):
+        self.__motion = motion
 
     @property
     def mode_of_motion(self):
@@ -79,92 +204,17 @@ class AbstractAgent():
         else:
             raise ValueError('mode is not supported')
 
-    def abstractmove(self, posorients_vel):
-        if isinstance(posorients_vel, pd.Series) is False:
-            raise TypeError('posorients_vel should be a pandas Series')
-        for col in ['x', 'y', 'z', 'alpha_0', 'alpha_1', 'alpha_2',
-                    'dx', 'dy', 'dz', 'dalpha_0', 'dalpha_1', 'dalpha_2']:
-            if col not in posorients_vel.index:
-                raise KeyError(
-                    'posorients_vel should have {} as index'.format(col))
-        # Compute the next position
-        posorients_vel = navimomath.next_pos(
-            posorients_vel,
+    def snap_to_grid(self, posorient_vel):
+        posorient_vel = navimomath.next_pos(
+            posorient_vel,
             move_mode=self.__mode_move['mode'],
             move_param=self.__mode_move['param'])
-
-        # Compute the closest possible position
-        if posorients_vel is None:
-            tmp = navimomath.closest_pos_memory_friendly(
-                posorients_vel,
-                self.db)
-            posorients_vel[['x', 'y', 'z',
-                            'alpha_0', 'alpha_1', 'alpha_2']] = tmp
-            posorients_vel.name = tmp.name
-        else:
-            tmp = navimomath.closest_pos(
-                posorients_vel,
-                self.__posorients)
-            posorients_vel[['x', 'y', 'z',
-                            'alpha_0', 'alpha_1', 'alpha_2']] = tmp
-            posorients_vel.name = tmp.name
-        return posorients_vel
-
-
-class Single(AbstractAgent, Process):
-
-    def __init__(self,
-                 database_filename,
-                 initial_condition,
-                 memory_friendly=False,
-                 posorients_queue=None,
-                 results_queue=None):
-        if (posorients_queue is not None) and (results_queue is not None):
-            multiprocessing.Process.__init__(self)
-        AbstractAgent.__init__(self, database_filename,
-                               memory_friendly)
-
-        self.__posorientvel = pd.Series(
-            data=0,
-            index=['x', 'y', 'z',
-                   'alpha_0', 'alpha_1', 'alpha_2',
-                   'dx', 'dy', 'dz',
-                   'dalpha_0', 'dalpha_1', 'dalpha_2'],
-            dtype=np.float)
-
-        if isinstance(initial_condition, pd.Series):
-            if is_numeric_dtype(initial_condition):
-                common_id = list(set(initial_condition.index).intersection(
-                    self.__posorientvel.index))
-                self.__posorientvel.loc[common_id] = \
-                    initial_condition.loc[common_id]
-            else:
-                raise TypeError('vel should be numeric')
-
-        else:
-            raise TypeError('vel should be a pandas Series')
-
-        self.__posorients_queue = posorients_queue
-        self.__results_queue = results_queue
-        self.__callback_function = defaultcallback
-
-    def move(self):
-        # Compute the next position
-        tmp = self.__callback_function(database=self.db,
-                                       posorient=self.__posorientvel)
-        common_id = list(set(tmp.index).intersection(
-            self.__posorientvel.index))
-        self.__posorientvel.loc[common_id] = tmp.loc[common_id]
-        self.__posorientvel = self.abstractmove(self.__posorientvel)
-
-    def fly(self,  nsteps):
-        """move until either speed is null, or nsteps has been reached"""
-        prev_move = self.__posorientvel
-        for stepi in range(nsteps):
-            self.move()
-            if prev_move.equals(self.__posorientvel):
-                break
-            prev_move = self.__posorientvel
+        tmp = navimomath.closest_pos(
+            posorient_vel, self.__posorients)
+        posorient_vel[['x', 'y', 'z',
+                       'alpha_0', 'alpha_1', 'alpha_2']] = tmp
+        posorient_vel.name = tmp.name
+        return posorient_vel
 
     def run(self):
         """ Only supported when multiprocess"""
@@ -189,35 +239,38 @@ class Single(AbstractAgent, Process):
         self.__posorients_queue.task_done()
         print('Process {} done'.format(proc_name))
 
-    @property
-    def callback_function(self):
-        return inspect.getsourcelines(self.__callback_function)
-
-    @callback_function.setter
-    def callback_function(self, callback_function):
-        self.__callback_function = callback_function
-
-    @property
-    def position(self):
-        return self.__posorientvel.loc[['x', 'y', 'z']]
-
-    @property
-    def velocity(self):
-        return self.__posorientvel.loc[['dx', 'dy', 'dz']]
-
-    @property
-    def orientation(self):
-        return self.__posorientvel.loc[['alpha_0', 'alpha_1', 'alpha_2']]
-
-    @property
-    def angular_velocity(self):
-        return self.__posorientvel.loc[['dalpha_0', 'dalpha_1', 'dalpha_2']]
-
 
-class Multi(AbstractAgent):
+class GraphAgent():
+    """
+    As mentioned above, in every model of navigation the agent motion \
+    is derived from its current external state, its position \
+    orientation as well as the derivatives, and its internal state. \
+    However, when the agent motion is only derived from its current \
+    position orientation, and what is seen from this location, the \
+    simulation of an agent can be drastically simplified. Indeed, \
+    not only the scene at relevant location can be pre-rendered, but \
+    the motion of the agent from those locations as well. The agent \
+    being restricted to move from relevant locations to relevant \
+    locations, a graph of interconnected locations can be built. The \
+    nodes of the graph are the relevant locations, and the directed \
+    edges the motion of the agent from one location to the next. \
+    GraphAgent can build such graph by simply using a database of \
+    pre-rendered scenery at relevant locations, and a function \
+    giving the motion of the agent from a scene and the agent \
+    position orientation. Once the graph has been generated, \
+    attractors can be found, the number of locations converging to \
+    those (i.e. the catchment area or volume), if two locations are \
+    connected, etc.
+    To speed up certain calculations, additional values can stored \
+    at each graph node and access from the callback function. It is \
+    worth mentioning a warning here. The size of the graph can be \
+    incredibly large. Thus, not too much information can be stored \
+    at each node. To assess the memory size of the graph before \
+    creating it, one can use the tool agent.tools.assess_graphmemsize.
+    """
 
     def __init__(self, database_filename):
-        super().__init__(database_filename, False)
+        self.db = DataBaseLoad(database_filename)
         # Init the graph
         self.__graph = nx.DiGraph()
         for row_id, posor in self.db.posorients.iterrows():
@@ -245,15 +298,12 @@ class Multi(AbstractAgent):
         results_queue = Queue()
         for node in self.__graph.nodes:
             posorients_queue.put(self.__graph.nodes[node]['posorient'])
-            initpos = 0 * self.__graph.nodes[node]['posorient']
 
         # Start ndatabase loader
         num_agents = ncpu
-        agents = [Single(self.dbname,
-                         initial_condition=initpos,
-                         memory_friendly=False,
-                         posorients_queue=posorients_queue,
-                         results_queue=results_queue)
+        agents = [GridAgent(self.dbname,
+                            posorients_queue=posorients_queue,
+                            results_queue=results_queue)
                   for _ in range(num_agents)]
         for w in agents:
             w.callback_function = callback_function
diff --git a/navipy/rendering/cyber_bee.py b/navipy/rendering/cyber_bee.py
index 9c203e55a30fae9d6fdc3189894cf868aa336e1f..530a11b38698a1608a72c506c7e711aeed2ca974 100644
--- a/navipy/rendering/cyber_bee.py
+++ b/navipy/rendering/cyber_bee.py
@@ -376,6 +376,20 @@ class Cyberbee():
         distance = distance[:, :, 0]
         return distance
 
+    def scene(self, posorient):
+        """ update position orientation and return a RGBD image
+
+        :param posorient: is a 1x6 vector containing:
+             x,y,z, angle_1, angle_2, angle_3,
+             here the angles are euler rotation around the axis
+             specified by scene.camera.rotation_mode
+        :type posorient: 1x6 double array
+        """
+        self.update(posorient)
+        image = self.image
+        image[:, :, 3] = self.distance
+        return image
+
 
 if __name__ == "__main__":
     # Initiate the Cyberbee