From cbaca8ae0f6b02d9b89d20ff21905d3e882c97f3 Mon Sep 17 00:00:00 2001 From: "lodenthal@uni-bielefeld.de" <lodenthal@uni-bielefeld.de> Date: Mon, 16 Apr 2018 12:24:14 +0300 Subject: [PATCH] updates blenttest for conventions and renderer --- navipy/sensors/blendtest1.py | 179 +++++++++++++++-------------------- navipy/sensors/renderer.py | 29 ++++-- 2 files changed, 93 insertions(+), 115 deletions(-) diff --git a/navipy/sensors/blendtest1.py b/navipy/sensors/blendtest1.py index 51ade5f..877fc94 100644 --- a/navipy/sensors/blendtest1.py +++ b/navipy/sensors/blendtest1.py @@ -1,4 +1,5 @@ from navipy.sensors.renderer import BlenderRender +import tempfile from navipy.maths.euler import matrix, from_matrix from navipy.maths.quaternion import from_matrix as quat_matrix import pandas as pd @@ -10,149 +11,117 @@ import unittest class TestCase(unittest.TestCase): def setUp(self): - self.i = [np.array(['location', 'location', 'location', - 'sxyz', 'sxyz', 'sxyz']), - np.array(['x', 'y', 'z', - 'alpha_0', 'alpha_1', 'alpha_2'])] - self.i2 = [np.array(['location', 'location', 'location', - 'syzx', 'syzx', 'syzx']), - np.array(['x', 'y', 'z', 'alpha_0', - 'alpha_1', 'alpha_2'])] + self.tuples = [('location', 'x'), ('location', 'y'), + ('location', 'z'), ('rxyz', 'alpha_0'), + ('rxyz', 'alpha_1'), ('rxyz', 'alpha_2')] + self.tuples2 = [('location', 'x'), ('location', 'y'), + ('location', 'z'), ('ryzx', 'alpha_0'), + ('ryzx', 'alpha_1'), ('ryzx', 'alpha_2')] self.image2 = np.zeros((4, 180, 360, 4, 1)) - self.posorient2 = pd.Series(index=self.i2) + index = pd.MultiIndex.from_tuples(self.tuples2, + names=['position', 'orientation']) + self.posorient2 = pd.Series(index=index) self.posorient2.loc['location']['x'] = 0 self.posorient2.loc['location']['y'] = 0 self.posorient2.loc['location']['z'] = 0 - self.posorient2.loc['syzx']['alpha_0'] = 0 - self.posorient2.loc['syzx']['alpha_1'] = 0 - self.posorient2.loc['syzx']['alpha_2'] = 0 + self.posorient2.loc['ryzx']['alpha_0'] = 0 + self.posorient2.loc['ryzx']['alpha_1'] = 0 + self.posorient2.loc['ryzx']['alpha_2'] = 0 self.a = 1 self.b = 0.75 self.c = 0.5 - self.posorient = pd.Series(index=self.i) + index = pd.MultiIndex.from_tuples(self.tuples, + names=['position', 'orientation']) + self.posorient = pd.Series(index=index) self.posorient.loc['location']['x'] = 0 self.posorient.loc['location']['y'] = 0 self.posorient.loc['location']['z'] = 0 - self.posorient.loc['sxyz']['alpha_0'] = self.a - self.posorient.loc['sxyz']['alpha_1'] = self.b - self.posorient.loc['sxyz']['alpha_2'] = self.c + self.posorient.loc['rxyz']['alpha_0'] = self.a + self.posorient.loc['rxyz']['alpha_1'] = self.b + self.posorient.loc['rxyz']['alpha_2'] = self.c + + self.renderer = BlenderRender() def test_diff_euler_xyz2yzx(self): - # print("euler test") - logfile = 'blender_render.log' - open(logfile, 'a').close() + mat1 = matrix(self.a, self.b, self.c, axes='rxyz') + at, bt, ct = from_matrix(mat1, axes='ryzx') + + default_tmp_dir = tempfile._get_default_tempdir() + fp = default_tmp_dir + "/" + next(tempfile._get_candidate_names()) + open(fp, 'a').close() old = os.dup(1) sys.stdout.flush() os.close(1) - os.open(logfile, os.O_WRONLY) - - renderer = BlenderRender() - renderer2 = BlenderRender() - mat1 = matrix(self.a, self.b, self.c, axes='sxyz') - a1, a2, a3 = from_matrix(mat1, axes='sxyz') - - at, bt, ct = from_matrix(mat1, axes='syzx') + os.open(fp, os.O_WRONLY) - self.posorient2.loc['syzx']['alpha_0'] = at - self.posorient2.loc['syzx']['alpha_1'] = bt - self.posorient2.loc['syzx']['alpha_2'] = ct - # renderer.camera_rotation_mode = 'YZX' - # print("poorient ### ",self.posorient2.loc[['syzx'], - # ['alpha_0', 'alpha_1', 'alpha_2']].values) - self.image2[0] = renderer2.scene(self.posorient2) - # conv = renderer2.camera_rotation_mode - - # self.posorient.loc['sxyz']['alpha_0'] = a - # self.posorient.loc['sxyz']['alpha_1'] = b - # self.posorient.loc['sxyz']['alpha_2'] = c - # print("poorient ### ",posorient.loc[['sxyz'], - # ['alpha_0', 'alpha_1', 'alpha_2']].values) - # renderer.camera_rotation_mode = 'XYZ' - self.image2[1] = renderer.scene(self.posorient) + self.posorient2.loc['ryzx']['alpha_0'] = bt + self.posorient2.loc['ryzx']['alpha_1'] = ct + self.posorient2.loc['ryzx']['alpha_2'] = at + self.image2[0] = self.renderer.scene(self.posorient) + self.image2[1] = self.renderer.scene(self.posorient2) # disable output redirection os.close(1) os.dup(old) os.close(old) - # print("first image") - # print(self.image[0,:,0,0] - self.image[1,:,0,0]) - # print("second image") - # print(self.image[1,:,0,0]) - # conv2= renderer.camera_rotation_mode - # print("angels xyz",a1,a2,a3,"angles yzx",at,bt,ct) - # print("sh be yzx",conv,"sh be xyz",conv2) - # print(np.sum(np.abs(image2[0,:,:,0]-image2[1,:,:,0]))) - # print("max diff ",np.max(np.abs(self.image2[0]-self.image2[1]))) - # assert np.testing,assert_allclose(self.image[0], - # self.image[1], atol = 0.007) + + # indices = np.where(np.abs(self.image2[0] - + # self.image2[1]) + # ==np.max(np.abs(self.image2[0] + # - self.image2[1]))) + # print("first image", self.image2[0,indices[0],indices[1],indices[2]], + # "second image",self.image2[1,indices[0],indices[1],indices[2]]) + # print(np.sum(np.abs(self.image2[0,:,:,0]-self.image2[1,:,:,0]))) + # print("max diff ",np.max(np.abs(self.image2[0,:,:,0] + # -self.image2[1,:,:,0]))) + assert np.all(np.isclose(self.image2[2], self.image2[3], atol=1.2)) def test_euler_xyz_2_quaternion(self): # print("quaternion test") - logfile = 'blender_render.log' - open(logfile, 'a').close() + i2 = [('location', 'x'), ('location', 'y'), + ('location', 'z'), ('quaternion', 'q_0'), + ('quaternion', 'q_1'), ('quaternion', 'q_2'), + ('quaternion', 'q_3')] + mat1 = matrix(self.a, self.b, self.c, axes='rxyz') + at, bt, ct, dt = quat_matrix(mat1) + + default_tmp_dir = tempfile._get_default_tempdir() + fp = default_tmp_dir + "/" + next(tempfile._get_candidate_names()) + open(fp, 'a').close() old = os.dup(1) sys.stdout.flush() os.close(1) - os.open(logfile, os.O_WRONLY) - - i2 = [np.array(['location', 'location', 'location', - 'quaternion', 'quaternion', 'quaternion', - 'quaternion']), - np.array(['x', 'y', 'z', 'q_0', 'q_1', 'q_2', 'q_3'])] - renderer = BlenderRender() - renderer2 = BlenderRender() - - mat1 = matrix(self.a, self.b, self.c, axes='sxyz') - at, bt, ct, dt = quat_matrix(mat1) + os.open(fp, os.O_WRONLY) - posorient2 = pd.Series(index=i2) + index = pd.MultiIndex.from_tuples(i2, + names=['position', + 'orientation']) + posorient2 = pd.Series(index=index) posorient2.loc['location']['x'] = 0 posorient2.loc['location']['y'] = 0 posorient2.loc['location']['z'] = 0 - posorient2.loc['quaternion']['q_0'] = at - posorient2.loc['quaternion']['q_1'] = bt - posorient2.loc['quaternion']['q_2'] = ct - posorient2.loc['quaternion']['q_3'] = dt - # renderer.camera_rotation_mode = 'YZX' - # print("poorient ### ",posorient2.loc[['quternion'], - # ['q_0', 'q_1', - # 'q_2', 'q_3']].values) - self.image2[2] = renderer2.scene(posorient2) - # conv = renderer2.camera_rotation_mode - - # self.posorient = pd.Series(index=self.i) - - # self.posorient.loc['sxyz']['alpha_0'] = a - # self.posorient.loc['sxyz']['alpha_1'] = b - # self.posorient.loc['sxyz']['alpha_2'] = c - # print("poorient ### ",self.posorient.loc[['sxyz'], - # ['alpha_0', 'alpha_1', 'alpha_2']].values) - # renderer.camera_rotation_mode = 'XYZ' - self.image2[3] = renderer.scene(self.posorient) + posorient2.loc['quaternion']['q_0'] = bt + posorient2.loc['quaternion']['q_1'] = ct + posorient2.loc['quaternion']['q_2'] = dt + posorient2.loc['quaternion']['q_3'] = at + self.image2[3] = self.renderer.scene(posorient2) + self.image2[2] = self.renderer.scene(self.posorient) # disable output redirection os.close(1) os.dup(old) os.close(old) - # print(self.image2[2,:,0,0] - self.image2[3,:,0,0]) # print("max diff ",np.max(np.abs(self.image2[2]-self.image2[3]))) # print("alternative"); - # indices = np.where(np.abs(self.image2[0,:,:,0] - - # self.image2[1,:,:,0]) - # ==np.max(np.abs(self.image2[0,:,:,0] - # - self.image2[1,:,:,0]))) - # print("indices", indices) - # print("first image") - # print(self.image[0,0,:,0]) - # print("second image") - # print(self.image[1,0,:,0]) - # conv2= renderer.camera_rotation_mode - # print("angels xyz",a,b,c,"angles yzx",at,bt,ct) - # print("sh be yzx",conv,"sh be xyz",conv2) - # print(np.sum(np.abs(self.image[0,:,:,0]-self.image[1,:,:,0]))) - # print(np.all(np.isclose(self.image[0], self.image[1]))) - # assert np.testing.assert_allclose(self.image[0], - # self.image[1], atol = 0.007) + # indices = np.where(np.abs(self.image2[2] - + # self.image2[3]) + # ==np.max(np.abs(self.image2[2] + # - self.image2[3]))) + # print("first image", self.image2[2,indices[0],indices[1],indices[2]], + # "second image",self.image2[3,indices[0],indices[1],indices[2]]) + # print(np.sum(np.abs(self.image2[2,:,:,0]-self.image2[3,:,:,0]))) + # print("max diff ",np.max(np.abs(self.image2[2]-self.image2[3]))) + # assert np.all(np.isclose(self.image2[2],self.image2[3], atol = 1.2)) diff --git a/navipy/sensors/renderer.py b/navipy/sensors/renderer.py index 6112f79..0907ec8 100644 --- a/navipy/sensors/renderer.py +++ b/navipy/sensors/renderer.py @@ -293,7 +293,7 @@ class BlenderRender(): [convention][alpha_1] [convention][alpha_2] **where convention can be: - sxyz, sxzy, syxz, syzx, szyx, szxy + rxyz, rxzy, ryxz, ryzx, rzyx, rzxy *in case of quaternions the index should be ['location']['x'] ['location']['y'] @@ -310,31 +310,40 @@ class BlenderRender(): """ if isinstance(posorient, pd.Series): # set roation mode + found_convention = False index = posorient.index convention = index.get_level_values(0)[-1] - if convention == 'sxyz': + if convention == 'rxyz': self.camera_rotation_mode = 'XYZ' - elif convention == 'syzx': + found_convention = True + elif convention == 'ryzx': self.camera_rotation_mode = 'YZX' - elif convention == 'sxzy': + found_convention = True + elif convention == 'rxzy': self.camera_rotation_mode = 'XZY' - elif convention == 'syxz': + found_convention = True + elif convention == 'ryxz': self.camera_rotation_mode = 'YXZ' - elif convention == 'szxy': + found_convention = True + elif convention == 'rzxy': self.camera_rotation_mode = 'ZXY' - elif convention == 'szyx': - self.camera_rotation_mode = 'ZYX' + found_convention = True + elif convention == 'rzyx': + self.camera_rotation_mode = 'ZYX' + found_convention = True self.camera.location = \ posorient.loc[['location'], ['x', 'y', 'z']].values - if convention != 'quaternion': + if found_convention: self.camera.rotation_euler = \ posorient.loc[[convention], ['alpha_0', 'alpha_1', 'alpha_2']].values - else: + elif convention == 'quaternion': self.camera_rotation_mode = 'QUATERNION' self.camera.rotation_quaternion = \ posorient.loc[[convention], ['q_0', 'q_1', 'q_2', 'q_3']].values + else: + raise Exception('your convention is not supported') else: raise TypeError( 'posorient must be of type array, list, or pandas Series') -- GitLab