Commit 88d93cff authored by Dipl.-Ing. Jonas Stienen's avatar Dipl.-Ing. Jonas Stienen
Browse files

Fixed VAVec3 to Python sequence parser problem for all setter/getter

parent 532dc2d0
%% Cell type:markdown id: tags: %% Cell type:markdown id: tags:
# VA simple acoustic scene # VA simple acoustic scene
This is a simple example how to create a simple acoustic scene in VA using Python. This is a simple example how to create a simple acoustic scene in VA using Python.
%% Cell type:markdown id: tags: %% Cell type:markdown id: tags:
#### Prerequisites #### Prerequisites
You can ignore this part, it is for preparation purposes only. You can ignore this part, it is for preparation purposes only.
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
import sys import sys
sys.path.append( '../../Lib/site-packages' ) sys.path.append( '../../Lib/site-packages' )
import os import os
print( 'Current working directory:', os.getcwd() ) print( 'Current working directory:', os.getcwd() )
import va import va
print( 'Successfully loaded VA Python extension') print( 'Successfully loaded VA Python extension')
``` ```
%% Cell type:markdown id: tags: %% Cell type:markdown id: tags:
### Connect ### Connect
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
va.connect( "localhost" ); va.connect( "localhost" );
``` ```
%% Cell type:markdown id: tags: %% Cell type:markdown id: tags:
Reset VA to clear the scene Reset VA to clear the scene
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
va.reset() va.reset()
``` ```
%% Cell type:markdown id: tags: %% Cell type:markdown id: tags:
Control output gain Control output gain
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
va.set_output_gain( 0.25 ) va.set_output_gain( 0.25 )
``` ```
%% Cell type:markdown id: tags: %% Cell type:markdown id: tags:
Add the current working directory or any other relative or absolute directory as a search path to your VA application. From now on, only use relative paths or macros to paths. Add the current working directory or any other relative or absolute directory as a search path to your VA application. From now on, only use relative paths or macros to paths.
> Pathes are relevant on server side, not on a remote client. The files must be available on the computer **where the VA application is running**! > Pathes are relevant on server side, not on a remote client. The files must be available on the computer **where the VA application is running**!
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
va.add_search_path( os.getcwd() ); va.add_search_path( os.getcwd() );
``` ```
%% Cell type:markdown id: tags: %% Cell type:markdown id: tags:
### Signal source ### Signal source
Create a signal source from a file and start playback with looping mode Create a signal source from a file and start playback with looping mode
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
signal_source_id = va.create_audio_file_signal_source( 'Audiofiles/Bauer.wav' ) signal_source_id = va.create_audio_file_signal_source( 'Audiofiles/Bauer.wav' )
va.set_audiofile_signal_source_playback_action( signal_source_id, 'play' ) va.set_audio_file_signal_source_playback_action_str( signal_source_id, 'play' )
va.set_audiofile_signal_source_is_looping( signal_source_id, True ) va.set_audio_file_signal_source_playback_is_looping( signal_source_id, True )
``` ```
%% Cell type:markdown id: tags: %% Cell type:markdown id: tags:
### Virtual sound source ### Virtual sound source
Create a virtual sound source with any name and set a position Create a virtual sound source with any name and set a position
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
sound_source_id = va.create_sound_source( 'PySoundSource' ) sound_source_id = va.create_sound_source( 'PySoundSource' )
va.set_sound_source_position( sound_source_id, ( 1.5 1.7, -1.1 ) ) va.set_sound_source_position( sound_source_id, ( 1.5, 1.7, -1.1 ) )
``` ```
%% Cell type:markdown id: tags: %% Cell type:markdown id: tags:
### Connect signal and source ### Connect signal and source
Connect the signal source to the virtual sound source Connect the signal source to the virtual sound source
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
va.set_sound_source_signal_source( sound_source_id, signal_source_id ) va.set_sound_source_signal_source( sound_source_id, signal_source_id )
``` ```
%% Cell type:markdown id: tags: %% Cell type:markdown id: tags:
### Head-related transfer function / Head-related impulse response ### Head-related transfer function / Head-related impulse response
Load an HRIR (time domain representation of an HRTF). See [OpenDAFF](http://www.opendaff.org) for more information. Load an HRIR (time domain representation of an HRTF). See [OpenDAFF](http://www.opendaff.org) for more information.
> We use a macro `DefaultHRIR` here, that is usually available for a VA core. > We use a macro `DefaultHRIR` here, that is usually available for a VA core.
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
hrir_id = va.load_hrir( '$(DefaultHRIR)' ) hrir_id = va.load_hrir( '$(DefaultHRIR)' )
``` ```
%% Cell type:markdown id: tags: %% Cell type:markdown id: tags:
### Virtual listener ### Virtual listener
Create a listener with arbitrary name, assign the HRTF/HRIR and set a position. Create a listener with arbitrary name, assign the HRTF/HRIR and set a position.
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
listener_id = va.create_listener( 'PyListener' ) listener_id = va.create_listener( 'PyListener' )
va.set_listener_position( listener_id, ( 0, 1.7, 0 ) ) va.set_listener_position( listener_id, ( 0, 1.7, 0 ) )
va.set_listener_orientation_vu( listener_id, ( 0, 0, -1, ), ( 0, 1, 0 ) ) # Default view is to -Z (OpenGL) va.set_listener_orientation_vu( listener_id, ( 0, 0, -1, ), ( 0, 1, 0 ) ) # Default view is to -Z (OpenGL)
``` ```
%% Cell type:markdown id: tags: %% Cell type:markdown id: tags:
### Active listener ### Active listener
Set an active listener. This concept is deprecated, but should be used for compatibility until it is removed. Set an active listener. This concept is deprecated, but should be used for compatibility until it is removed.
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
va.set_active_listener( listener_id ) va.set_active_listener( listener_id )
``` ```
%% Cell type:markdown id: tags: %% Cell type:markdown id: tags:
### Disconnect ### Disconnect
%% Cell type:code id: tags: %% Cell type:code id: tags:
``` python ``` python
va.disconnect(); va.disconnect();
``` ```
......
...@@ -39,8 +39,8 @@ static struct PyMethodDef va_methods[] = ...@@ -39,8 +39,8 @@ static struct PyMethodDef va_methods[] =
{ "get_signal_source_info", ( PyCFunction ) va_get_signal_source_info, METH_FASTCALL, va_no_doc }, { "get_signal_source_info", ( PyCFunction ) va_get_signal_source_info, METH_FASTCALL, va_no_doc },
{ "get_signal_source_infos", (PyCFunction)va_get_signal_source_infos, METH_FASTCALL, va_no_doc }, { "get_signal_source_infos", (PyCFunction)va_get_signal_source_infos, METH_FASTCALL, va_no_doc },
{ "get_audio_file_signal_source_playback_state", (PyCFunction)va_get_audio_file_signal_source_playback_state, METH_FASTCALL, va_no_doc }, { "get_audio_file_signal_source_playback_state", (PyCFunction)va_get_audio_file_signal_source_playback_state, METH_FASTCALL, va_no_doc },
{ "set_audio_file_signal_source_playback_action", (PyCFunction)va_set_audio_file_signal_source_playback_action, METH_FASTCALL, va_no_doc },
{ "get_audio_file_signal_source_playback_state_str", (PyCFunction)va_get_audio_file_signal_source_playback_state_str, METH_FASTCALL, va_no_doc }, { "get_audio_file_signal_source_playback_state_str", (PyCFunction)va_get_audio_file_signal_source_playback_state_str, METH_FASTCALL, va_no_doc },
{ "set_audio_file_signal_source_playback_action", (PyCFunction)va_set_audio_file_signal_source_playback_action, METH_FASTCALL, va_no_doc },
{ "set_audio_file_signal_source_playback_action_str", (PyCFunction)va_set_audio_file_signal_source_playback_action_str, METH_FASTCALL, va_no_doc }, { "set_audio_file_signal_source_playback_action_str", (PyCFunction)va_set_audio_file_signal_source_playback_action_str, METH_FASTCALL, va_no_doc },
{ "set_audio_file_signal_source_playback_position", (PyCFunction)va_set_audio_file_signal_source_playback_position, METH_FASTCALL, va_no_doc }, { "set_audio_file_signal_source_playback_position", (PyCFunction)va_set_audio_file_signal_source_playback_position, METH_FASTCALL, va_no_doc },
{ "get_audio_file_signal_source_playback_is_looping", (PyCFunction)va_get_audio_file_signal_source_playback_is_looping, METH_FASTCALL, va_no_doc }, { "get_audio_file_signal_source_playback_is_looping", (PyCFunction)va_get_audio_file_signal_source_playback_is_looping, METH_FASTCALL, va_no_doc },
......
...@@ -816,16 +816,12 @@ static PyObject* va_set_sound_source_position( PyObject*, PyObject** ppArgs, Py_ ...@@ -816,16 +816,12 @@ static PyObject* va_set_sound_source_position( PyObject*, PyObject** ppArgs, Py_
VAPY_REQUIRE_CONN_TRY; VAPY_REQUIRE_CONN_TRY;
static const char * const _keywords[] = { "id", "pos", NULL }; static const char * const _keywords[] = { "id", "pos", NULL };
static _PyArg_Parser _parser = { "i(d,d,d):va_set_sound_source_position", _keywords, 0 }; static _PyArg_Parser _parser = { "i(ddd):set_sound_source_position", _keywords, 0 };
long iID = -1; long iID = -1;
PyObject* pPosList = nullptr; VAVec3 v3Pos;
if( !_PyArg_ParseStack( ppArgs, nArgs, pKeywordNames, &_parser, &iID, &pPosList ) ) if( !_PyArg_ParseStack( ppArgs, nArgs, pKeywordNames, &_parser, &iID, &v3Pos.x, &v3Pos.y, &v3Pos.z ) )
return NULL; return NULL;
VAVec3 v3Pos;
v3Pos.x = PyFloat_AsDouble( PyList_GetItem( pPosList, 0 ) );
v3Pos.y = PyFloat_AsDouble( PyList_GetItem( pPosList, 1 ) );
v3Pos.z = PyFloat_AsDouble( PyList_GetItem( pPosList, 2 ) );
g_pVANetClient->GetCoreInstance()->SetSoundSourcePosition( iID, v3Pos.x, v3Pos.y, v3Pos.z ); g_pVANetClient->GetCoreInstance()->SetSoundSourcePosition( iID, v3Pos.x, v3Pos.y, v3Pos.z );
return Py_None; return Py_None;
...@@ -846,20 +842,7 @@ static PyObject* va_get_sound_source_orientation_vu( PyObject*, PyObject** ppArg ...@@ -846,20 +842,7 @@ static PyObject* va_get_sound_source_orientation_vu( PyObject*, PyObject** ppArg
VAVec3 v3View, v3Up; VAVec3 v3View, v3Up;
g_pVANetClient->GetCoreInstance()->GetSoundSourceOrientationVU( iID, v3View.x, v3View.y, v3View.z, v3Up.x, v3Up.y, v3Up.z ); g_pVANetClient->GetCoreInstance()->GetSoundSourceOrientationVU( iID, v3View.x, v3View.y, v3View.z, v3Up.x, v3Up.y, v3Up.z );
PyObject* pViewList = PyList_New( 3 ); return Py_BuildValue("(ddd)(ddd)", v3View.x, v3View.y, v3View.z, v3Up.x, v3Up.y, v3Up.z);
PyList_SetItem( pViewList, 0, PyFloat_FromDouble( v3View.x ) );
PyList_SetItem( pViewList, 1, PyFloat_FromDouble( v3View.y ) );
PyList_SetItem( pViewList, 2, PyFloat_FromDouble( v3View.z ) );
PyObject* pUpList = PyList_New( 3 );
PyList_SetItem( pUpList, 0, PyFloat_FromDouble( v3View.x ) );
PyList_SetItem( pUpList, 1, PyFloat_FromDouble( v3View.y ) );
PyList_SetItem( pUpList, 2, PyFloat_FromDouble( v3View.z ) );
PyObject* pViewUpList = PyList_New( 2 );
PyList_SetItem( pUpList, 0, pViewList );
PyList_SetItem( pUpList, 1, pUpList );
return pViewUpList;
VAPY_CATCH_RETURN; VAPY_CATCH_RETURN;
}; };
...@@ -869,23 +852,12 @@ static PyObject* va_set_sound_source_orientation_vu( PyObject*, PyObject** ppArg ...@@ -869,23 +852,12 @@ static PyObject* va_set_sound_source_orientation_vu( PyObject*, PyObject** ppArg
VAPY_REQUIRE_CONN_TRY; VAPY_REQUIRE_CONN_TRY;
static const char * const _keywords[] = { "id", "view", "up", NULL }; static const char * const _keywords[] = { "id", "view", "up", NULL };
static _PyArg_Parser _parser = { "i(d,d,d)(d,d,d):set_sound_source_orientation_vu", _keywords, 0 }; static _PyArg_Parser _parser = { "i(ddd)(ddd):set_sound_source_orientation_vu", _keywords, 0 };
long iID = -1; long iID = -1;
PyObject* pViewList = nullptr; VAVec3 v3View, v3Up;
PyObject* pUpList = nullptr; if( !_PyArg_ParseStack( ppArgs, nArgs, pKeywordNames, &_parser, &iID, &v3View.x, &v3View.y, &v3View.z, &v3Up.x, &v3Up.y, &v3Up.z) )
if( !_PyArg_ParseStack( ppArgs, nArgs, pKeywordNames, &_parser, &iID, &pViewList, &pUpList ) )
return NULL; return NULL;
VAVec3 v3View;
v3View.x = PyFloat_AsDouble( PyList_GetItem( pViewList, 0 ) );
v3View.y = PyFloat_AsDouble( PyList_GetItem( pViewList, 1 ) );
v3View.z = PyFloat_AsDouble( PyList_GetItem( pViewList, 2 ) );
VAVec3 v3Up;
v3Up.x = PyFloat_AsDouble( PyList_GetItem( pUpList, 0 ) );
v3Up.y = PyFloat_AsDouble( PyList_GetItem( pUpList, 1 ) );
v3Up.z = PyFloat_AsDouble( PyList_GetItem( pUpList, 2 ) );
g_pVANetClient->GetCoreInstance()->SetSoundSourceOrientationVU( iID, v3View.x, v3View.y, v3View.z, v3Up.x, v3Up.y, v3Up.z ); g_pVANetClient->GetCoreInstance()->SetSoundSourceOrientationVU( iID, v3View.x, v3View.y, v3View.z, v3Up.x, v3Up.y, v3Up.z );
return Py_None; return Py_None;
...@@ -1133,17 +1105,13 @@ static PyObject* va_set_listener_position( PyObject*, PyObject** ppArgs, Py_ssiz ...@@ -1133,17 +1105,13 @@ static PyObject* va_set_listener_position( PyObject*, PyObject** ppArgs, Py_ssiz
VAPY_REQUIRE_CONN_TRY; VAPY_REQUIRE_CONN_TRY;
static const char * const _keywords[] = { "id", "pos", NULL }; static const char * const _keywords[] = { "id", "pos", NULL };
static _PyArg_Parser _parser = { "i(d,d,d):va_set_listener_position", _keywords, 0 }; static _PyArg_Parser _parser = { "i(ddd):set_listener_position", _keywords, 0 };
long iID = -1; long iID = -1;
PyObject* pPosList = nullptr; VAVec3 v3Pos;
if( !_PyArg_ParseStack( ppArgs, nArgs, pKeywordNames, &_parser, &iID, &pPosList ) ) if (!_PyArg_ParseStack(ppArgs, nArgs, pKeywordNames, &_parser, &iID, &v3Pos.x, &v3Pos.y, &v3Pos.z))
return NULL; return NULL;
VAVec3 v3Pos; g_pVANetClient->GetCoreInstance()->SetListenerPosition(iID, v3Pos.x, v3Pos.y, v3Pos.z);
v3Pos.x = PyFloat_AsDouble( PyList_GetItem( pPosList, 0 ) );
v3Pos.y = PyFloat_AsDouble( PyList_GetItem( pPosList, 1 ) );
v3Pos.z = PyFloat_AsDouble( PyList_GetItem( pPosList, 2 ) );
g_pVANetClient->GetCoreInstance()->SetListenerPosition( iID, v3Pos.x, v3Pos.y, v3Pos.z );
return Py_None; return Py_None;
VAPY_CATCH_RETURN; VAPY_CATCH_RETURN;
...@@ -1162,20 +1130,7 @@ static PyObject* va_get_listener_orientation_vu( PyObject*, PyObject** ppArgs, P ...@@ -1162,20 +1130,7 @@ static PyObject* va_get_listener_orientation_vu( PyObject*, PyObject** ppArgs, P
VAVec3 v3View, v3Up; VAVec3 v3View, v3Up;
g_pVANetClient->GetCoreInstance()->GetListenerOrientationVU( iID, v3View.x, v3View.y, v3View.z, v3Up.x, v3Up.y, v3Up.z ); g_pVANetClient->GetCoreInstance()->GetListenerOrientationVU( iID, v3View.x, v3View.y, v3View.z, v3Up.x, v3Up.y, v3Up.z );
PyObject* pViewList = PyList_New( 3 ); return Py_BuildValue("(ddd)(ddd)", v3View.x, v3View.y, v3View.z, v3Up.x, v3Up.y, v3Up.z);
PyList_SetItem( pViewList, 0, PyFloat_FromDouble( v3View.x ) );
PyList_SetItem( pViewList, 1, PyFloat_FromDouble( v3View.y ) );
PyList_SetItem( pViewList, 2, PyFloat_FromDouble( v3View.z ) );
PyObject* pUpList = PyList_New( 3 );
PyList_SetItem( pUpList, 0, PyFloat_FromDouble( v3View.x ) );
PyList_SetItem( pUpList, 1, PyFloat_FromDouble( v3View.y ) );
PyList_SetItem( pUpList, 2, PyFloat_FromDouble( v3View.z ) );
PyObject* pViewUpList = PyList_New( 2 );
PyList_SetItem( pUpList, 0, pViewList );
PyList_SetItem( pUpList, 1, pUpList );
return pViewUpList;
VAPY_CATCH_RETURN; VAPY_CATCH_RETURN;
}; };
...@@ -1185,24 +1140,13 @@ static PyObject* va_set_listener_orientation_vu( PyObject*, PyObject** ppArgs, P ...@@ -1185,24 +1140,13 @@ static PyObject* va_set_listener_orientation_vu( PyObject*, PyObject** ppArgs, P
VAPY_REQUIRE_CONN_TRY; VAPY_REQUIRE_CONN_TRY;
static const char * const _keywords[] = { "id", "view", "up", NULL }; static const char * const _keywords[] = { "id", "view", "up", NULL };
static _PyArg_Parser _parser = { "i(d,d,d)(d,d,d):set_listener_orientation_vu", _keywords, 0 }; static _PyArg_Parser _parser = { "i(ddd)(ddd):set_listener_orientation_vu", _keywords, 0 };
long iID = -1; long iID = -1;
PyObject* pViewList = nullptr; VAVec3 v3View, v3Up;
PyObject* pUpList = nullptr; if (!_PyArg_ParseStack(ppArgs, nArgs, pKeywordNames, &_parser, &iID, &v3View.x, &v3View.y, &v3View.z, &v3Up.x, &v3Up.y, &v3Up.z))
if( !_PyArg_ParseStack( ppArgs, nArgs, pKeywordNames, &_parser, &iID, &pViewList, &pUpList ) )
return NULL; return NULL;
VAVec3 v3View; g_pVANetClient->GetCoreInstance()->SetListenerOrientationVU(iID, v3View.x, v3View.y, v3View.z, v3Up.x, v3Up.y, v3Up.z);
v3View.x = PyFloat_AsDouble( PyList_GetItem( pViewList, 0 ) );
v3View.y = PyFloat_AsDouble( PyList_GetItem( pViewList, 1 ) );
v3View.z = PyFloat_AsDouble( PyList_GetItem( pViewList, 2 ) );
VAVec3 v3Up;
v3Up.x = PyFloat_AsDouble( PyList_GetItem( pUpList, 0 ) );
v3Up.y = PyFloat_AsDouble( PyList_GetItem( pUpList, 1 ) );
v3Up.z = PyFloat_AsDouble( PyList_GetItem( pUpList, 2 ) );
g_pVANetClient->GetCoreInstance()->SetListenerOrientationVU( iID, v3View.x, v3View.y, v3View.z, v3Up.x, v3Up.y, v3Up.z );
return Py_None; return Py_None;
VAPY_CATCH_RETURN; VAPY_CATCH_RETURN;
...@@ -1237,20 +1181,15 @@ static PyObject* va_set_listener_real_world_position( PyObject*, PyObject** ppAr ...@@ -1237,20 +1181,15 @@ static PyObject* va_set_listener_real_world_position( PyObject*, PyObject** ppAr
VAPY_REQUIRE_CONN_TRY; VAPY_REQUIRE_CONN_TRY;
static const char * const _keywords[] = { "id", "pos", NULL }; static const char * const _keywords[] = { "id", "pos", NULL };
static _PyArg_Parser _parser = { "i(d,d,d):va_set_listener_real_world_position", _keywords, 0 }; static _PyArg_Parser _parser = { "i(ddd):set_listener_real_world_position", _keywords, 0 };
long iID = -1; long iID = -1;
PyObject* pPosList = nullptr; VAVec3 v3Pos;
if( !_PyArg_ParseStack( ppArgs, nArgs, pKeywordNames, &_parser, &iID, &pPosList ) ) if (!_PyArg_ParseStack(ppArgs, nArgs, pKeywordNames, &_parser, &iID, &v3Pos.x, &v3Pos.y, &v3Pos.z))
return NULL; return NULL;
double t; double t;
VAVec3 v3View, v3Up; VAVec3 v3View, v3Up;
g_pVANetClient->GetCoreInstance()->GetListenerRealWorldHeadPositionOrientationVU( iID, t, t, t, v3View.x, v3View.y, v3View.z, v3Up.x, v3Up.y, v3Up.z ); g_pVANetClient->GetCoreInstance()->GetListenerRealWorldHeadPositionOrientationVU( iID, t, t, t, v3View.x, v3View.y, v3View.z, v3Up.x, v3Up.y, v3Up.z );
VAVec3 v3Pos;
v3Pos.x = PyFloat_AsDouble( PyList_GetItem( pPosList, 0 ) );
v3Pos.y = PyFloat_AsDouble( PyList_GetItem( pPosList, 1 ) );
v3Pos.z = PyFloat_AsDouble( PyList_GetItem( pPosList, 2 ) );
g_pVANetClient->GetCoreInstance()->SetListenerRealWorldHeadPositionOrientationVU( iID, v3Pos.x, v3Pos.y, v3Pos.z, v3View.x, v3View.y, v3View.z, v3Up.x, v3Up.y, v3Up.z ); g_pVANetClient->GetCoreInstance()->SetListenerRealWorldHeadPositionOrientationVU( iID, v3Pos.x, v3Pos.y, v3Pos.z, v3View.x, v3View.y, v3View.z, v3Up.x, v3Up.y, v3Up.z );
return Py_None; return Py_None;
...@@ -1271,20 +1210,7 @@ static PyObject* va_get_listener_real_world_orientation_vu( PyObject*, PyObject* ...@@ -1271,20 +1210,7 @@ static PyObject* va_get_listener_real_world_orientation_vu( PyObject*, PyObject*
VAVec3 v3View, v3Up; VAVec3 v3View, v3Up;
g_pVANetClient->GetCoreInstance()->GetListenerRealWorldHeadPositionOrientationVU( iID, t, t, t, v3View.x, v3View.y, v3View.z, v3Up.x, v3Up.y, v3Up.z ); g_pVANetClient->GetCoreInstance()->GetListenerRealWorldHeadPositionOrientationVU( iID, t, t, t, v3View.x, v3View.y, v3View.z, v3Up.x, v3Up.y, v3Up.z );
PyObject* pViewList = PyList_New( 3 ); return Py_BuildValue("(ddd)(ddd)", v3View.x, v3View.y, v3View.z, v3Up.x, v3Up.y, v3Up.z);
PyList_SetItem( pViewList, 0, PyFloat_FromDouble( v3View.x ) );
PyList_SetItem( pViewList, 1, PyFloat_FromDouble( v3View.y ) );
PyList_SetItem( pViewList, 2, PyFloat_FromDouble( v3View.z ) );
PyObject* pUpList = PyList_New( 3 );
PyList_SetItem( pUpList, 0, PyFloat_FromDouble( v3View.x ) );
PyList_SetItem( pUpList, 1, PyFloat_FromDouble( v3View.y ) );
PyList_SetItem( pUpList, 2, PyFloat_FromDouble( v3View.z ) );
PyObject* pViewUpList = PyList_New( 2 );
PyList_SetItem( pUpList, 0, pViewList );
PyList_SetItem( pUpList, 1, pUpList );
return pViewUpList;
VAPY_CATCH_RETURN; VAPY_CATCH_RETURN;
}; };
...@@ -1294,27 +1220,15 @@ static PyObject* va_set_listener_real_world_orientation_vu( PyObject*, PyObject* ...@@ -1294,27 +1220,15 @@ static PyObject* va_set_listener_real_world_orientation_vu( PyObject*, PyObject*
VAPY_REQUIRE_CONN_TRY; VAPY_REQUIRE_CONN_TRY;
static const char * const _keywords[] = { "id", "view", "up", NULL }; static const char * const _keywords[] = { "id", "view", "up", NULL };
static _PyArg_Parser _parser = { "i(d,d,d)(d,d,d):set_listener_real_world_orientation_vu", _keywords, 0 }; static _PyArg_Parser _parser = { "i(ddd)(ddd):set_listener_real_world_orientation_vu", _keywords, 0 };
long iID = -1; long iID = -1;
PyObject* pViewList = nullptr; VAVec3 v3View, v3Up;
PyObject* pUpList = nullptr; if (!_PyArg_ParseStack(ppArgs, nArgs, pKeywordNames, &_parser, &iID, &v3View.x, &v3View.y, &v3View.z, &v3Up.x, &v3Up.y, &v3Up.z))
if( !_PyArg_ParseStack( ppArgs, nArgs, pKeywordNames, &_parser, &iID, &pViewList, &pUpList ) )
return NULL; return NULL;
double t; double t;
VAVec3 v3Pos; VAVec3 v3Pos;
g_pVANetClient->GetCoreInstance()->GetListenerRealWorldHeadPositionOrientationVU( iID, v3Pos.x, v3Pos.y, v3Pos.z, t, t, t, t, t, t ); g_pVANetClient->GetCoreInstance()->GetListenerRealWorldHeadPositionOrientationVU( iID, v3Pos.x, v3Pos.y, v3Pos.z, t, t, t, t, t, t );
VAVec3 v3View;
v3View.x = PyFloat_AsDouble( PyList_GetItem( pViewList, 0 ) );
v3View.y = PyFloat_AsDouble( PyList_GetItem( pViewList, 1 ) );
v3View.z = PyFloat_AsDouble( PyList_GetItem( pViewList, 2 ) );
VAVec3 v3Up;
v3Up.x = PyFloat_AsDouble( PyList_GetItem( pUpList, 0 ) );
v3Up.y = PyFloat_AsDouble( PyList_GetItem( pUpList, 1 ) );
v3Up.z = PyFloat_AsDouble( PyList_GetItem( pUpList, 2 ) );
g_pVANetClient->GetCoreInstance()->SetListenerRealWorldHeadPositionOrientationVU( iID, v3Pos.x, v3Pos.y, v3Pos.z, v3View.x, v3View.y, v3View.z, v3Up.x, v3Up.y, v3Up.z ); g_pVANetClient->GetCoreInstance()->SetListenerRealWorldHeadPositionOrientationVU( iID, v3Pos.x, v3Pos.y, v3Pos.z, v3View.x, v3View.y, v3View.z, v3Up.x, v3Up.y, v3Up.z );
return Py_None; return Py_None;
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment