Skip to content
Snippets Groups Projects
Commit 907dd41f authored by Olivier Bertrand's avatar Olivier Bertrand
Browse files

Update Blender render/test

Clean test function of blender, so to remove unnessesray lines
parent ddebf4d1
No related branches found
No related tags found
No related merge requests found
from navipy.sensors.renderer import BlenderRender
import tempfile
from navipy.maths.euler import matrix, from_matrix
from navipy.maths.quaternion import from_matrix as quat_matrix
import pandas as pd
import numpy as np
import os
import sys
import unittest
class TestCase(unittest.TestCase):
def setUp(self):
self.tuples = [('location', 'x'), ('location', 'y'),
('location', 'z'), ('rxyz', 'alpha_0'),
('rxyz', 'alpha_1'), ('rxyz', 'alpha_2')]
"""
Prepare for the test
"""
convention = 'rxyz'
self.tuples2 = [('location', 'x'), ('location', 'y'),
('location', 'z'), (convention, 'alpha_0'),
(convention, 'alpha_1'), (convention, 'alpha_2')]
self.image2 = np.zeros((7, 180, 360, 4, 1))
index = pd.MultiIndex.from_tuples(self.tuples2,
names=['position', 'orientation'])
self.posorient2 = pd.Series(index=index)
self.posorient2.loc['location']['x'] = 0
self.posorient2.loc['location']['y'] = 0
self.posorient2.loc['location']['z'] = 0
self.posorient2.loc[convention]['alpha_0'] = 0
self.posorient2.loc[convention]['alpha_1'] = 0
self.posorient2.loc[convention]['alpha_2'] = 0
self.a = np.pi/4
self.b = 0
self.c = np.pi/3
index = pd.MultiIndex.from_tuples(self.tuples,
names=['position', 'orientation'])
index = pd.MultiIndex.from_tuples(
[('location', 'x'), ('location', 'y'),
('location', 'z'), (convention, 'alpha_0'),
(convention, 'alpha_1'), (convention, 'alpha_2')])
self.posorient = pd.Series(index=index)
self.posorient.loc['location']['x'] = 0
self.posorient.loc['location']['y'] = 0
self.posorient.loc['location']['z'] = 0
self.posorient.loc['rxyz']['alpha_0'] = self.a
self.posorient.loc['rxyz']['alpha_1'] = self.b
self.posorient.loc['rxyz']['alpha_2'] = self.c
self.posorient.loc['location']['z'] = 1
self.posorient.loc[convention]['alpha_0'] = np.pi/4
self.posorient.loc[convention]['alpha_1'] = np.pi/7
self.posorient.loc[convention]['alpha_2'] = np.pi/3
convention = self.posorient.index.get_level_values(0)[-1]
a, b, c = self.posorient.loc[convention]
self.matorient = matrix(a, b, c, axes=convention)
self.renderer = BlenderRender()
self.image_ref = self.renderer.scene(self.posorient)
def test_diff_euler_xyz2yzx(self):
"""
Test if images rendered from two different conventions match \
one another
"""
convention = 'ryzx'
index = pd.MultiIndex.from_tuples(
[('location', 'x'), ('location', 'y'),
('location', 'z'), (convention, 'alpha_0'),
(convention, 'alpha_1'), (convention, 'alpha_2')])
posorient2 = pd.Series(index=index)
posorient2.loc['location'][:] = self.posorient.loc['location'][:]
# An orientation matrix need to be calculated from
# the euler angle of the convention of 'reference'
# so that it can be decompase in another convention
mat1 = matrix(self.a, self.b, self.c, axes='rxyz')
convention = self.posorient2.index.get_level_values(0)[-1]
at, bt, ct = from_matrix(mat1, axes=convention)
print(self.a*180/np.pi, self.b*180/np.pi, self.c*180/np.pi)
print(at*180/np.pi, bt*180/np.pi, ct*180/np.pi)
self.image2[0] = self.renderer.scene(self.posorient)
from itertools import permutations
for p_i, ang in enumerate(permutations([at, bt, ct])):
self.posorient2.loc[(convention, 'alpha_0')] = ang[0]
self.posorient2.loc[(convention, 'alpha_1')] = ang[1]
self.posorient2.loc[(convention, 'alpha_2')] = ang[2]
# We render the two images to be compared
self.image2[p_i+1] = self.renderer.scene(self.posorient2)
np.save('/home/bolirev/Desktop/test.npy', self.image2)
np.testing.assert_allclose(
self.image2[0], self.image2[1], atol=1.2, rtol=1e-2)
at, bt, ct = from_matrix(self.matorient, axes=convention)
posorient2.loc[convention] = [at, bt, ct]
image2 = self.renderer.scene(posorient2)
np.testing.assert_allclose(image2, self.image_ref)
def test_euler_xyz_2_quaternion(self):
# print("quaternion test")
i2 = [('location', 'x'), ('location', 'y'),
('location', 'z'), ('quaternion', 'q_0'),
('quaternion', 'q_1'), ('quaternion', 'q_2'),
('quaternion', 'q_3')]
mat1 = matrix(self.a, self.b, self.c, axes='rxyz')
at, bt, ct, dt = quat_matrix(mat1)
default_tmp_dir = tempfile._get_default_tempdir()
fp = default_tmp_dir + "/" + next(tempfile._get_candidate_names())
open(fp, 'a').close()
old = os.dup(1)
sys.stdout.flush()
os.close(1)
os.open(fp, os.O_WRONLY)
index = pd.MultiIndex.from_tuples(i2,
names=['position',
'orientation'])
convention = 'quaternion'
index = pd.MultiIndex.from_tuples(
[('location', 'x'), ('location', 'y'),
('location', 'z'), (convention, 'q_0'),
(convention, 'q_1'), (convention, 'q_2'), (convention, 'q_3')],
names=['position', 'orientation'])
posorient2 = pd.Series(index=index)
posorient2.loc['location']['x'] = 0
posorient2.loc['location']['y'] = 0
posorient2.loc['location']['z'] = 0
posorient2.loc['quaternion']['q_0'] = bt
posorient2.loc['quaternion']['q_1'] = ct
posorient2.loc['quaternion']['q_2'] = dt
posorient2.loc['quaternion']['q_3'] = at
self.image2[3] = self.renderer.scene(posorient2)
self.image2[2] = self.renderer.scene(self.posorient)
# disable output redirection
os.close(1)
os.dup(old)
os.close(old)
# print("max diff ",np.max(np.abs(self.image2[2]-self.image2[3])))
# print("alternative");
# indices = np.where(np.abs(self.image2[2] -
# self.image2[3])
# ==np.max(np.abs(self.image2[2]
# - self.image2[3])))
# print("first image", self.image2[2,indices[0],indices[1],indices[2]],
# "second image",self.image2[3,indices[0],indices[1],indices[2]])
# print(np.sum(np.abs(self.image2[2,:,:,0]-self.image2[3,:,:,0])))
# print("max diff ",np.max(np.abs(self.image2[2]-self.image2[3])))
# assert np.all(np.isclose(self.image2[2],self.image2[3], atol = 1.2))
posorient2.loc['location'][:] = self.posorient.loc['location'][:]
# An orientation matrix need to be calculated from
# the euler angle of the convention of 'reference'
# so that it can be decompase in another convention
at, bt, ct, dt = quat_matrix(self.matorient)
posorient2.loc[convention] = [at, bt, ct, dt]
image2 = self.renderer.scene(posorient2)
np.testing.assert_allclose(image2, self.image_ref, atol=1.2)
......@@ -20,9 +20,6 @@ class TestBlenderRender(unittest.TestCase):
* camera_gaussian_width
* camera_resolution
"""
with self.assertRaises(TypeError):
# Should a string
self.cyberbee.camera_rotation_mode = 0
with self.assertRaises(TypeError):
# Should be an integer
self.cyberbee.cycle_samples = 0.1
......@@ -49,10 +46,6 @@ class TestBlenderRender(unittest.TestCase):
val = 100
self.cyberbee.cycle_samples = val
self.assertEqual(val, self.cyberbee.cycle_samples)
# camera rotation mode
val = 'XYZ'
self.cyberbee.camera_rotation_mode = val
self.assertEqual(val, self.cyberbee.camera_rotation_mode)
# camera fov
val = np.array([[-90, 90], [-180, 180]])
self.cyberbee.camera_fov = val
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment