2023-04-16 15:08:11 +02:00
# ifndef MLPP_TENSOR3_H
# define MLPP_TENSOR3_H
# include "core/math/math_defs.h"
# include "core/containers/pool_vector.h"
# include "core/containers/sort_array.h"
# include "core/containers/vector.h"
# include "core/error/error_macros.h"
# include "core/math/vector2i.h"
# include "core/os/memory.h"
# include "core/object/reference.h"
# include "mlpp_matrix.h"
# include "mlpp_vector.h"
2023-04-23 13:16:04 +02:00
class Image ;
2023-04-16 15:08:11 +02:00
class MLPPTensor3 : public Reference {
GDCLASS ( MLPPTensor3 , Reference ) ;
public :
real_t * ptrw ( ) {
return _data ;
}
const real_t * ptr ( ) const {
return _data ;
}
2023-04-24 21:56:07 +02:00
// TODO: Need to double check whether it's right to call the z axis feature map (probably not)
// TODO: Add helper methods for the other axes aswell (probably shouldn't have as extensive of a coverage as z),
// Only MLPPMatrix: get, add, set.
// TODO: Add Image get, set helper methods to MLPPMatrix -> so the other axis helper methods can use them.
// TODO: _FORCE_INLINE_ less big methods (Also do this in MLPPVEctor and MLPPMatrix)
2023-04-23 10:59:50 +02:00
_FORCE_INLINE_ void add_feature_map ( const Vector < real_t > & p_row ) {
2023-04-16 15:08:11 +02:00
if ( p_row . size ( ) = = 0 ) {
return ;
}
2023-04-23 10:59:50 +02:00
int fms = feature_map_data_size ( ) ;
2023-04-16 15:08:11 +02:00
2023-04-23 10:59:50 +02:00
ERR_FAIL_COND ( fms ! = p_row . size ( ) ) ;
2023-04-16 15:08:11 +02:00
int ci = data_size ( ) ;
2023-04-23 10:59:50 +02:00
+ + _size . z ;
2023-04-16 15:08:11 +02:00
_data = ( real_t * ) memrealloc ( _data , data_size ( ) * sizeof ( real_t ) ) ;
CRASH_COND_MSG ( ! _data , " Out of memory " ) ;
const real_t * row_arr = p_row . ptr ( ) ;
for ( int i = 0 ; i < p_row . size ( ) ; + + i ) {
_data [ ci + i ] = row_arr [ i ] ;
}
}
2023-04-23 10:59:50 +02:00
_FORCE_INLINE_ void add_feature_map_pool_vector ( const PoolRealArray & p_row ) {
2023-04-16 15:08:11 +02:00
if ( p_row . size ( ) = = 0 ) {
return ;
}
2023-04-23 10:59:50 +02:00
int fms = feature_map_data_size ( ) ;
2023-04-16 15:08:11 +02:00
2023-04-23 10:59:50 +02:00
ERR_FAIL_COND ( fms ! = p_row . size ( ) ) ;
2023-04-16 15:08:11 +02:00
int ci = data_size ( ) ;
2023-04-23 10:59:50 +02:00
+ + _size . z ;
2023-04-16 15:08:11 +02:00
_data = ( real_t * ) memrealloc ( _data , data_size ( ) * sizeof ( real_t ) ) ;
CRASH_COND_MSG ( ! _data , " Out of memory " ) ;
PoolRealArray : : Read rread = p_row . read ( ) ;
const real_t * row_arr = rread . ptr ( ) ;
for ( int i = 0 ; i < p_row . size ( ) ; + + i ) {
_data [ ci + i ] = row_arr [ i ] ;
}
}
2023-04-23 10:59:50 +02:00
_FORCE_INLINE_ void add_feature_map_mlpp_vector ( const Ref < MLPPVector > & p_row ) {
2023-04-16 15:08:11 +02:00
ERR_FAIL_COND ( ! p_row . is_valid ( ) ) ;
int p_row_size = p_row - > size ( ) ;
if ( p_row_size = = 0 ) {
return ;
}
2023-04-23 10:59:50 +02:00
int fms = feature_map_data_size ( ) ;
2023-04-16 15:08:11 +02:00
2023-04-23 10:59:50 +02:00
ERR_FAIL_COND ( fms ! = p_row_size ) ;
2023-04-16 15:08:11 +02:00
int ci = data_size ( ) ;
2023-04-23 10:59:50 +02:00
+ + _size . z ;
2023-04-16 15:08:11 +02:00
_data = ( real_t * ) memrealloc ( _data , data_size ( ) * sizeof ( real_t ) ) ;
CRASH_COND_MSG ( ! _data , " Out of memory " ) ;
const real_t * row_ptr = p_row - > ptr ( ) ;
for ( int i = 0 ; i < p_row_size ; + + i ) {
_data [ ci + i ] = row_ptr [ i ] ;
}
}
2023-04-23 10:59:50 +02:00
_FORCE_INLINE_ void add_feature_map_mlpp_matrix ( const Ref < MLPPMatrix > & p_matrix ) {
ERR_FAIL_COND ( ! p_matrix . is_valid ( ) ) ;
2023-04-16 15:08:11 +02:00
2023-04-23 10:59:50 +02:00
int other_data_size = p_matrix - > data_size ( ) ;
2023-04-16 15:08:11 +02:00
if ( other_data_size = = 0 ) {
return ;
}
2023-04-23 10:59:50 +02:00
Size2i matrix_size = p_matrix - > size ( ) ;
Size2i fms = feature_map_size ( ) ;
2023-04-16 15:08:11 +02:00
2023-04-23 10:59:50 +02:00
ERR_FAIL_COND ( fms ! = matrix_size ) ;
2023-04-16 15:08:11 +02:00
int start_offset = data_size ( ) ;
2023-04-23 10:59:50 +02:00
+ + _size . z ;
2023-04-16 15:08:11 +02:00
_data = ( real_t * ) memrealloc ( _data , data_size ( ) * sizeof ( real_t ) ) ;
CRASH_COND_MSG ( ! _data , " Out of memory " ) ;
2023-04-23 10:59:50 +02:00
const real_t * other_ptr = p_matrix - > ptr ( ) ;
2023-04-16 15:08:11 +02:00
for ( int i = 0 ; i < other_data_size ; + + i ) {
_data [ start_offset + i ] = other_ptr [ i ] ;
}
}
2023-04-23 10:59:50 +02:00
void remove_feature_map ( int p_index ) {
ERR_FAIL_INDEX ( p_index , _size . z ) ;
2023-04-16 15:08:11 +02:00
2023-04-23 10:59:50 +02:00
- - _size . z ;
2023-04-16 15:08:11 +02:00
int ds = data_size ( ) ;
if ( ds = = 0 ) {
memfree ( _data ) ;
_data = NULL ;
return ;
}
2023-04-23 10:59:50 +02:00
int fmds = feature_map_data_size ( ) ;
for ( int i = calculate_feature_map_index ( p_index ) ; i < ds ; + + i ) {
_data [ i ] = _data [ i + fmds ] ;
2023-04-16 15:08:11 +02:00
}
_data = ( real_t * ) memrealloc ( _data , data_size ( ) * sizeof ( real_t ) ) ;
CRASH_COND_MSG ( ! _data , " Out of memory " ) ;
}
// Removes the item copying the last value into the position of the one to
// remove. It's generally faster than `remove`.
2023-04-23 10:59:50 +02:00
void remove_feature_map_unordered ( int p_index ) {
ERR_FAIL_INDEX ( p_index , _size . z ) ;
2023-04-16 15:08:11 +02:00
2023-04-23 10:59:50 +02:00
- - _size . z ;
2023-04-16 15:08:11 +02:00
int ds = data_size ( ) ;
if ( ds = = 0 ) {
memfree ( _data ) ;
_data = NULL ;
return ;
}
2023-04-23 10:59:50 +02:00
int start_ind = calculate_feature_map_index ( p_index ) ;
int end_ind = calculate_feature_map_index ( p_index + 1 ) ;
2023-04-16 15:08:11 +02:00
for ( int i = start_ind ; i < end_ind ; + + i ) {
_data [ i ] = _data [ ds + i ] ;
}
_data = ( real_t * ) memrealloc ( _data , data_size ( ) * sizeof ( real_t ) ) ;
CRASH_COND_MSG ( ! _data , " Out of memory " ) ;
}
2023-04-23 10:59:50 +02:00
void swap_feature_map ( int p_index_1 , int p_index_2 ) {
ERR_FAIL_INDEX ( p_index_1 , _size . z ) ;
ERR_FAIL_INDEX ( p_index_2 , _size . z ) ;
2023-04-16 15:08:11 +02:00
2023-04-23 10:59:50 +02:00
int ind1_start = calculate_feature_map_index ( p_index_1 ) ;
int ind2_start = calculate_feature_map_index ( p_index_2 ) ;
2023-04-16 15:08:11 +02:00
2023-04-23 10:59:50 +02:00
int fmds = feature_map_data_size ( ) ;
for ( int i = 0 ; i < fmds ; + + i ) {
2023-04-16 15:08:11 +02:00
SWAP ( _data [ ind1_start + i ] , _data [ ind2_start + i ] ) ;
}
}
2023-04-16 20:28:50 +02:00
_FORCE_INLINE_ void clear ( ) { resize ( Size3i ( ) ) ; }
2023-04-16 15:08:11 +02:00
_FORCE_INLINE_ void reset ( ) {
if ( _data ) {
memfree ( _data ) ;
_data = NULL ;
2023-04-16 20:28:50 +02:00
_size = Size3i ( ) ;
2023-04-16 15:08:11 +02:00
}
}
2023-04-16 20:28:50 +02:00
_FORCE_INLINE_ bool empty ( ) const { return _size = = Size3i ( ) ; }
2023-04-23 10:59:50 +02:00
_FORCE_INLINE_ int feature_map_data_size ( ) const { return _size . x * _size . y ; }
_FORCE_INLINE_ Size2i feature_map_size ( ) const { return Size2i ( _size . x , _size . y ) ; }
2023-04-16 20:28:50 +02:00
_FORCE_INLINE_ int data_size ( ) const { return _size . x * _size . y * _size . z ; }
_FORCE_INLINE_ Size3i size ( ) const { return _size ; }
2023-04-16 15:08:11 +02:00
2023-04-16 20:28:50 +02:00
void resize ( const Size3i & p_size ) {
2023-04-16 15:08:11 +02:00
_size = p_size ;
int ds = data_size ( ) ;
if ( ds = = 0 ) {
if ( _data ) {
memfree ( _data ) ;
_data = NULL ;
}
return ;
}
_data = ( real_t * ) memrealloc ( _data , ds * sizeof ( real_t ) ) ;
CRASH_COND_MSG ( ! _data , " Out of memory " ) ;
}
2023-04-23 11:46:35 +02:00
void set_shape ( const Size3i & p_size ) {
int ds = data_size ( ) ;
int new_data_size = p_size . x * p_size . y * p_size . z ;
ERR_FAIL_COND_MSG ( ds ! = new_data_size , " The new size has a different volume than the old. If this is intended use resize()! " ) ;
_size = p_size ;
}
2023-04-16 20:28:50 +02:00
_FORCE_INLINE_ int calculate_index ( int p_index_y , int p_index_x , int p_index_z ) const {
return p_index_y * _size . x + p_index_x + _size . x * _size . y * p_index_z ;
2023-04-16 15:08:11 +02:00
}
2023-04-23 10:59:50 +02:00
_FORCE_INLINE_ int calculate_feature_map_index ( int p_index_z ) const {
return _size . x * _size . y * p_index_z ;
}
2023-04-16 15:08:11 +02:00
_FORCE_INLINE_ const real_t & operator [ ] ( int p_index ) const {
CRASH_BAD_INDEX ( p_index , data_size ( ) ) ;
return _data [ p_index ] ;
}
_FORCE_INLINE_ real_t & operator [ ] ( int p_index ) {
CRASH_BAD_INDEX ( p_index , data_size ( ) ) ;
return _data [ p_index ] ;
}
2023-04-16 20:38:50 +02:00
_FORCE_INLINE_ real_t get_element_index ( int p_index ) const {
ERR_FAIL_INDEX_V ( p_index , data_size ( ) , 0 ) ;
return _data [ p_index ] ;
}
_FORCE_INLINE_ void set_element_index ( int p_index , real_t p_val ) {
ERR_FAIL_INDEX ( p_index , data_size ( ) ) ;
_data [ p_index ] = p_val ;
}
2023-04-16 20:28:50 +02:00
_FORCE_INLINE_ real_t get_element ( int p_index_y , int p_index_x , int p_index_z ) const {
2023-04-16 15:08:11 +02:00
ERR_FAIL_INDEX_V ( p_index_x , _size . x , 0 ) ;
ERR_FAIL_INDEX_V ( p_index_y , _size . y , 0 ) ;
2023-04-16 20:28:50 +02:00
ERR_FAIL_INDEX_V ( p_index_z , _size . z , 0 ) ;
2023-04-16 15:08:11 +02:00
2023-04-16 20:28:50 +02:00
return _data [ p_index_y * _size . x + p_index_x + _size . x * _size . y * p_index_z ] ;
2023-04-16 15:08:11 +02:00
}
2023-04-16 20:28:50 +02:00
_FORCE_INLINE_ void set_element ( int p_index_y , int p_index_x , int p_index_z , real_t p_val ) {
2023-04-16 15:08:11 +02:00
ERR_FAIL_INDEX ( p_index_x , _size . x ) ;
ERR_FAIL_INDEX ( p_index_y , _size . y ) ;
2023-04-16 20:28:50 +02:00
ERR_FAIL_INDEX ( p_index_z , _size . z ) ;
2023-04-16 15:08:11 +02:00
2023-04-16 20:28:50 +02:00
_data [ p_index_y * _size . x + p_index_x + _size . x * _size . y * p_index_z ] = p_val ;
2023-04-16 15:08:11 +02:00
}
2023-04-16 20:28:50 +02:00
_FORCE_INLINE_ Vector < real_t > get_row_vector ( int p_index_y , int p_index_z ) {
2023-04-16 15:08:11 +02:00
ERR_FAIL_INDEX_V ( p_index_y , _size . y , Vector < real_t > ( ) ) ;
2023-04-16 20:28:50 +02:00
ERR_FAIL_INDEX_V ( p_index_z , _size . z , Vector < real_t > ( ) ) ;
2023-04-16 15:08:11 +02:00
Vector < real_t > ret ;
if ( unlikely ( _size . x = = 0 ) ) {
return ret ;
}
ret . resize ( _size . x ) ;
int ind_start = p_index_y * _size . x ;
real_t * row_ptr = ret . ptrw ( ) ;
for ( int i = 0 ; i < _size . x ; + + i ) {
row_ptr [ i ] = _data [ ind_start + i ] ;
}
2023-04-16 16:05:50 +02:00
return ret ;
2023-04-16 15:08:11 +02:00
}
2023-04-16 20:28:50 +02:00
_FORCE_INLINE_ PoolRealArray get_row_pool_vector ( int p_index_y , int p_index_z ) {
2023-04-16 15:08:11 +02:00
ERR_FAIL_INDEX_V ( p_index_y , _size . y , PoolRealArray ( ) ) ;
2023-04-16 20:28:50 +02:00
ERR_FAIL_INDEX_V ( p_index_z , _size . z , PoolRealArray ( ) ) ;
2023-04-16 15:08:11 +02:00
PoolRealArray ret ;
if ( unlikely ( _size . x = = 0 ) ) {
return ret ;
}
ret . resize ( _size . x ) ;
2023-04-16 20:28:50 +02:00
int ind_start = p_index_y * _size . x + _size . x * _size . y * p_index_z ;
2023-04-16 15:08:11 +02:00
PoolRealArray : : Write w = ret . write ( ) ;
real_t * row_ptr = w . ptr ( ) ;
for ( int i = 0 ; i < _size . x ; + + i ) {
row_ptr [ i ] = _data [ ind_start + i ] ;
}
return ret ;
}
2023-04-16 20:28:50 +02:00
_FORCE_INLINE_ Ref < MLPPVector > get_row_mlpp_vector ( int p_index_y , int p_index_z ) {
2023-04-16 15:08:11 +02:00
ERR_FAIL_INDEX_V ( p_index_y , _size . y , Ref < MLPPVector > ( ) ) ;
2023-04-16 20:28:50 +02:00
ERR_FAIL_INDEX_V ( p_index_z , _size . z , Ref < MLPPVector > ( ) ) ;
2023-04-16 15:08:11 +02:00
Ref < MLPPVector > ret ;
ret . instance ( ) ;
if ( unlikely ( _size . x = = 0 ) ) {
return ret ;
}
ret - > resize ( _size . x ) ;
2023-04-16 20:28:50 +02:00
int ind_start = p_index_y * _size . x + _size . x * _size . y * p_index_z ;
2023-04-16 15:08:11 +02:00
real_t * row_ptr = ret - > ptrw ( ) ;
for ( int i = 0 ; i < _size . x ; + + i ) {
row_ptr [ i ] = _data [ ind_start + i ] ;
}
return ret ;
}
2023-04-16 20:28:50 +02:00
_FORCE_INLINE_ void get_row_into_mlpp_vector ( int p_index_y , int p_index_z , Ref < MLPPVector > target ) const {
2023-04-16 15:08:11 +02:00
ERR_FAIL_COND ( ! target . is_valid ( ) ) ;
ERR_FAIL_INDEX ( p_index_y , _size . y ) ;
2023-04-16 20:28:50 +02:00
ERR_FAIL_INDEX ( p_index_z , _size . z ) ;
2023-04-16 15:08:11 +02:00
if ( unlikely ( target - > size ( ) ! = _size . x ) ) {
target - > resize ( _size . x ) ;
}
2023-04-16 20:28:50 +02:00
int ind_start = p_index_y * _size . x + _size . x * _size . y * p_index_z ;
2023-04-16 15:08:11 +02:00
real_t * row_ptr = target - > ptrw ( ) ;
for ( int i = 0 ; i < _size . x ; + + i ) {
row_ptr [ i ] = _data [ ind_start + i ] ;
}
}
2023-04-16 20:28:50 +02:00
_FORCE_INLINE_ void set_row_vector ( int p_index_y , int p_index_z , const Vector < real_t > & p_row ) {
2023-04-16 15:08:11 +02:00
ERR_FAIL_COND ( p_row . size ( ) ! = _size . x ) ;
ERR_FAIL_INDEX ( p_index_y , _size . y ) ;
2023-04-16 20:28:50 +02:00
ERR_FAIL_INDEX ( p_index_z , _size . z ) ;
2023-04-16 15:08:11 +02:00
2023-04-16 20:28:50 +02:00
int ind_start = p_index_y * _size . x + _size . x * _size . y * p_index_z ;
2023-04-16 15:08:11 +02:00
const real_t * row_ptr = p_row . ptr ( ) ;
for ( int i = 0 ; i < _size . x ; + + i ) {
_data [ ind_start + i ] = row_ptr [ i ] ;
}
}
2023-04-16 20:28:50 +02:00
_FORCE_INLINE_ void set_row_pool_vector ( int p_index_y , int p_index_z , const PoolRealArray & p_row ) {
2023-04-16 15:08:11 +02:00
ERR_FAIL_COND ( p_row . size ( ) ! = _size . x ) ;
ERR_FAIL_INDEX ( p_index_y , _size . y ) ;
2023-04-16 20:28:50 +02:00
ERR_FAIL_INDEX ( p_index_z , _size . z ) ;
2023-04-16 15:08:11 +02:00
2023-04-16 20:28:50 +02:00
int ind_start = p_index_y * _size . x + _size . x * _size . y * p_index_z ;
2023-04-16 15:08:11 +02:00
PoolRealArray : : Read r = p_row . read ( ) ;
const real_t * row_ptr = r . ptr ( ) ;
for ( int i = 0 ; i < _size . x ; + + i ) {
_data [ ind_start + i ] = row_ptr [ i ] ;
}
}
2023-04-16 20:28:50 +02:00
_FORCE_INLINE_ void set_row_mlpp_vector ( int p_index_y , int p_index_z , const Ref < MLPPVector > & p_row ) {
2023-04-16 15:08:11 +02:00
ERR_FAIL_COND ( ! p_row . is_valid ( ) ) ;
ERR_FAIL_COND ( p_row - > size ( ) ! = _size . x ) ;
ERR_FAIL_INDEX ( p_index_y , _size . y ) ;
2023-04-16 20:28:50 +02:00
ERR_FAIL_INDEX ( p_index_z , _size . z ) ;
2023-04-16 15:08:11 +02:00
2023-04-16 20:28:50 +02:00
int ind_start = p_index_y * _size . x + _size . x * _size . y * p_index_z ;
2023-04-16 15:08:11 +02:00
const real_t * row_ptr = p_row - > ptr ( ) ;
for ( int i = 0 ; i < _size . x ; + + i ) {
_data [ ind_start + i ] = row_ptr [ i ] ;
}
}
2023-04-23 11:09:46 +02:00
_FORCE_INLINE_ Vector < real_t > get_feature_map_vector ( int p_index_z ) {
ERR_FAIL_INDEX_V ( p_index_z , _size . z , Vector < real_t > ( ) ) ;
Vector < real_t > ret ;
int fmds = feature_map_data_size ( ) ;
if ( unlikely ( fmds = = 0 ) ) {
return ret ;
}
ret . resize ( fmds ) ;
int ind_start = calculate_feature_map_index ( p_index_z ) ;
real_t * row_ptr = ret . ptrw ( ) ;
for ( int i = 0 ; i < fmds ; + + i ) {
row_ptr [ i ] = _data [ ind_start + i ] ;
}
return ret ;
}
_FORCE_INLINE_ PoolRealArray get_feature_map_pool_vector ( int p_index_z ) {
ERR_FAIL_INDEX_V ( p_index_z , _size . z , PoolRealArray ( ) ) ;
PoolRealArray ret ;
int fmds = feature_map_data_size ( ) ;
if ( unlikely ( fmds = = 0 ) ) {
return ret ;
}
ret . resize ( fmds ) ;
int ind_start = calculate_feature_map_index ( p_index_z ) ;
PoolRealArray : : Write w = ret . write ( ) ;
real_t * row_ptr = w . ptr ( ) ;
for ( int i = 0 ; i < fmds ; + + i ) {
row_ptr [ i ] = _data [ ind_start + i ] ;
}
return ret ;
}
_FORCE_INLINE_ Ref < MLPPVector > get_feature_map_mlpp_vector ( int p_index_z ) {
ERR_FAIL_INDEX_V ( p_index_z , _size . z , Ref < MLPPVector > ( ) ) ;
Ref < MLPPVector > ret ;
ret . instance ( ) ;
int fmds = feature_map_data_size ( ) ;
if ( unlikely ( fmds = = 0 ) ) {
return ret ;
}
ret - > resize ( fmds ) ;
int ind_start = calculate_feature_map_index ( p_index_z ) ;
real_t * row_ptr = ret - > ptrw ( ) ;
for ( int i = 0 ; i < fmds ; + + i ) {
row_ptr [ i ] = _data [ ind_start + i ] ;
}
return ret ;
}
_FORCE_INLINE_ void get_feature_map_into_mlpp_vector ( int p_index_z , Ref < MLPPVector > target ) const {
ERR_FAIL_INDEX ( p_index_z , _size . z ) ;
int fmds = feature_map_data_size ( ) ;
if ( unlikely ( target - > size ( ) ! = fmds ) ) {
target - > resize ( fmds ) ;
}
int ind_start = calculate_feature_map_index ( p_index_z ) ;
real_t * row_ptr = target - > ptrw ( ) ;
for ( int i = 0 ; i < fmds ; + + i ) {
row_ptr [ i ] = _data [ ind_start + i ] ;
}
}
2023-04-23 11:53:57 +02:00
_FORCE_INLINE_ Ref < MLPPMatrix > get_feature_map_mlpp_matrix ( int p_index_z ) {
ERR_FAIL_INDEX_V ( p_index_z , _size . z , Ref < MLPPMatrix > ( ) ) ;
Ref < MLPPMatrix > ret ;
ret . instance ( ) ;
int fmds = feature_map_data_size ( ) ;
if ( unlikely ( fmds = = 0 ) ) {
return ret ;
}
ret - > resize ( feature_map_size ( ) ) ;
int ind_start = calculate_feature_map_index ( p_index_z ) ;
real_t * row_ptr = ret - > ptrw ( ) ;
for ( int i = 0 ; i < fmds ; + + i ) {
row_ptr [ i ] = _data [ ind_start + i ] ;
}
return ret ;
}
2023-04-23 15:39:21 +02:00
_FORCE_INLINE_ void get_feature_map_into_mlpp_matrix ( int p_index_z , Ref < MLPPMatrix > target ) const {
2023-04-23 11:53:57 +02:00
ERR_FAIL_INDEX ( p_index_z , _size . z ) ;
int fmds = feature_map_data_size ( ) ;
Size2i fms = feature_map_size ( ) ;
if ( unlikely ( target - > size ( ) ! = fms ) ) {
target - > resize ( fms ) ;
}
int ind_start = calculate_feature_map_index ( p_index_z ) ;
real_t * row_ptr = target - > ptrw ( ) ;
for ( int i = 0 ; i < fmds ; + + i ) {
row_ptr [ i ] = _data [ ind_start + i ] ;
}
}
2023-04-23 11:09:46 +02:00
_FORCE_INLINE_ void set_feature_map_vector ( int p_index_z , const Vector < real_t > & p_row ) {
ERR_FAIL_INDEX ( p_index_z , _size . z ) ;
int fmds = feature_map_data_size ( ) ;
ERR_FAIL_COND ( p_row . size ( ) ! = fmds ) ;
int ind_start = calculate_feature_map_index ( p_index_z ) ;
const real_t * row_ptr = p_row . ptr ( ) ;
for ( int i = 0 ; i < fmds ; + + i ) {
_data [ ind_start + i ] = row_ptr [ i ] ;
}
}
_FORCE_INLINE_ void set_feature_map_pool_vector ( int p_index_z , const PoolRealArray & p_row ) {
ERR_FAIL_INDEX ( p_index_z , _size . z ) ;
int fmds = feature_map_data_size ( ) ;
ERR_FAIL_COND ( p_row . size ( ) ! = fmds ) ;
int ind_start = calculate_feature_map_index ( p_index_z ) ;
PoolRealArray : : Read r = p_row . read ( ) ;
const real_t * row_ptr = r . ptr ( ) ;
for ( int i = 0 ; i < fmds ; + + i ) {
_data [ ind_start + i ] = row_ptr [ i ] ;
}
}
_FORCE_INLINE_ void set_feature_map_mlpp_vector ( int p_index_z , const Ref < MLPPVector > & p_row ) {
ERR_FAIL_INDEX ( p_index_z , _size . z ) ;
ERR_FAIL_COND ( ! p_row . is_valid ( ) ) ;
int fmds = feature_map_data_size ( ) ;
ERR_FAIL_COND ( p_row - > size ( ) ! = fmds ) ;
int ind_start = calculate_feature_map_index ( p_index_z ) ;
const real_t * row_ptr = p_row - > ptr ( ) ;
for ( int i = 0 ; i < fmds ; + + i ) {
_data [ ind_start + i ] = row_ptr [ i ] ;
}
}
2023-04-23 11:53:57 +02:00
_FORCE_INLINE_ void set_feature_map_mlpp_matrix ( int p_index_z , const Ref < MLPPMatrix > & p_mat ) {
ERR_FAIL_INDEX ( p_index_z , _size . z ) ;
ERR_FAIL_COND ( ! p_mat . is_valid ( ) ) ;
int fmds = feature_map_data_size ( ) ;
ERR_FAIL_COND ( p_mat - > size ( ) ! = feature_map_size ( ) ) ;
int ind_start = calculate_feature_map_index ( p_index_z ) ;
const real_t * row_ptr = p_mat - > ptr ( ) ;
for ( int i = 0 ; i < fmds ; + + i ) {
_data [ ind_start + i ] = row_ptr [ i ] ;
}
}
2023-04-23 13:16:04 +02:00
public :
//Image api
2023-04-23 15:42:38 +02:00
enum ImageChannelFlags {
IMAGE_CHANNEL_FLAG_R = 1 < < 0 ,
IMAGE_CHANNEL_FLAG_G = 1 < < 1 ,
IMAGE_CHANNEL_FLAG_B = 1 < < 2 ,
IMAGE_CHANNEL_FLAG_A = 1 < < 3 ,
IMAGE_CHANNEL_FLAG_NONE = 0 ,
IMAGE_CHANNEL_FLAG_RG = IMAGE_CHANNEL_FLAG_R | IMAGE_CHANNEL_FLAG_G ,
IMAGE_CHANNEL_FLAG_RGB = IMAGE_CHANNEL_FLAG_R | IMAGE_CHANNEL_FLAG_G | IMAGE_CHANNEL_FLAG_B ,
IMAGE_CHANNEL_FLAG_GB = IMAGE_CHANNEL_FLAG_G | IMAGE_CHANNEL_FLAG_B ,
IMAGE_CHANNEL_FLAG_GBA = IMAGE_CHANNEL_FLAG_G | IMAGE_CHANNEL_FLAG_B | IMAGE_CHANNEL_FLAG_A ,
IMAGE_CHANNEL_FLAG_BA = IMAGE_CHANNEL_FLAG_B | IMAGE_CHANNEL_FLAG_A ,
IMAGE_CHANNEL_FLAG_RGBA = IMAGE_CHANNEL_FLAG_R | IMAGE_CHANNEL_FLAG_G | IMAGE_CHANNEL_FLAG_B | IMAGE_CHANNEL_FLAG_A ,
2023-04-23 13:16:04 +02:00
} ;
2023-04-23 15:42:38 +02:00
void add_feature_maps_image ( const Ref < Image > & p_img , const int p_channels = IMAGE_CHANNEL_FLAG_RGBA ) ;
2023-04-23 13:16:04 +02:00
Ref < Image > get_feature_map_image ( const int p_index_z ) ;
Ref < Image > get_feature_maps_image ( const int p_index_r = - 1 , const int p_index_g = - 1 , const int p_index_b = - 1 , const int p_index_a = - 1 ) ;
2023-04-23 15:42:38 +02:00
void get_feature_map_into_image ( Ref < Image > p_target , const int p_index_z , const int p_target_channels = IMAGE_CHANNEL_FLAG_RGB ) const ;
2023-04-23 14:48:44 +02:00
void get_feature_maps_into_image ( Ref < Image > p_target , const int p_index_r = - 1 , const int p_index_g = - 1 , const int p_index_b = - 1 , const int p_index_a = - 1 ) const ;
2023-04-23 13:16:04 +02:00
2023-04-23 15:42:38 +02:00
void set_feature_map_image ( const Ref < Image > & p_img , const int p_index_z , const int p_image_channel_flag = IMAGE_CHANNEL_FLAG_R ) ;
2023-04-23 13:16:04 +02:00
void set_feature_maps_image ( const Ref < Image > & p_img , const int p_index_r = - 1 , const int p_index_g = - 1 , const int p_index_b = - 1 , const int p_index_a = - 1 ) ;
2023-04-23 15:42:38 +02:00
void set_from_image ( const Ref < Image > & p_img , const int p_channels = IMAGE_CHANNEL_FLAG_RGBA ) ;
2023-04-23 13:16:04 +02:00
2023-04-24 11:40:46 +02:00
public :
//math api
//Vector<Ref<MLPPMatrix>> additionnvt(const Vector<Ref<MLPPMatrix>> &A, const Vector<Ref<MLPPMatrix>> &B);
//Vector<Ref<MLPPMatrix>> element_wise_divisionnvnvt(const Vector<Ref<MLPPMatrix>> &A, const Vector<Ref<MLPPMatrix>> &B);
//Vector<Ref<MLPPMatrix>> sqrtnvt(const Vector<Ref<MLPPMatrix>> &A);
//Vector<Ref<MLPPMatrix>> exponentiatenvt(const Vector<Ref<MLPPMatrix>> &A, real_t p);
//std::vector<std::vector<real_t>> tensor_vec_mult(std::vector<std::vector<std::vector<real_t>>> A, std::vector<real_t> b);
//std::vector<real_t> flatten(std::vector<std::vector<std::vector<real_t>>> A);
//Vector<Ref<MLPPMatrix>> scalar_multiplynvt(real_t scalar, Vector<Ref<MLPPMatrix>> A);
//Vector<Ref<MLPPMatrix>> scalar_addnvt(real_t scalar, Vector<Ref<MLPPMatrix>> A);
//Vector<Ref<MLPPMatrix>> resizenvt(const Vector<Ref<MLPPMatrix>> &A, const Vector<Ref<MLPPMatrix>> &B);
//std::vector<std::vector<std::vector<real_t>>> hadamard_product(std::vector<std::vector<std::vector<real_t>>> A, std::vector<std::vector<std::vector<real_t>>> B);
//Vector<Ref<MLPPMatrix>> maxnvt(const Vector<Ref<MLPPMatrix>> &A, const Vector<Ref<MLPPMatrix>> &B);
//Vector<Ref<MLPPMatrix>> absnvt(const Vector<Ref<MLPPMatrix>> &A);
//real_t norm_2(std::vector<std::vector<std::vector<real_t>>> A);
//std::vector<std::vector<std::vector<real_t>>> vector_wise_tensor_product(std::vector<std::vector<std::vector<real_t>>> A, std::vector<std::vector<real_t>> B);
2023-04-23 13:16:04 +02:00
public :
2023-04-16 15:08:11 +02:00
void fill ( real_t p_val ) {
if ( ! _data ) {
return ;
}
int ds = data_size ( ) ;
for ( int i = 0 ; i < ds ; + + i ) {
_data [ i ] = p_val ;
}
}
Vector < real_t > to_flat_vector ( ) const {
Vector < real_t > ret ;
ret . resize ( data_size ( ) ) ;
real_t * w = ret . ptrw ( ) ;
memcpy ( w , _data , sizeof ( real_t ) * data_size ( ) ) ;
return ret ;
}
PoolRealArray to_flat_pool_vector ( ) const {
PoolRealArray pl ;
if ( data_size ( ) ) {
pl . resize ( data_size ( ) ) ;
typename PoolRealArray : : Write w = pl . write ( ) ;
real_t * dest = w . ptr ( ) ;
for ( int i = 0 ; i < data_size ( ) ; + + i ) {
dest [ i ] = static_cast < real_t > ( _data [ i ] ) ;
}
}
return pl ;
}
Vector < uint8_t > to_flat_byte_array ( ) const {
Vector < uint8_t > ret ;
ret . resize ( data_size ( ) * sizeof ( real_t ) ) ;
uint8_t * w = ret . ptrw ( ) ;
memcpy ( w , _data , sizeof ( real_t ) * data_size ( ) ) ;
return ret ;
}
2023-04-23 12:15:56 +02:00
Ref < MLPPTensor3 > duplicate ( ) const {
Ref < MLPPTensor3 > ret ;
2023-04-16 15:08:11 +02:00
ret . instance ( ) ;
2023-04-23 12:15:56 +02:00
ret - > set_from_mlpp_tensor3r ( * this ) ;
2023-04-16 15:08:11 +02:00
return ret ;
}
2023-04-23 12:15:56 +02:00
_FORCE_INLINE_ void set_from_mlpp_tensor3 ( const Ref < MLPPTensor3 > & p_from ) {
ERR_FAIL_COND ( ! p_from . is_valid ( ) ) ;
resize ( p_from - > size ( ) ) ;
int ds = p_from - > data_size ( ) ;
const real_t * ptr = p_from - > ptr ( ) ;
for ( int i = 0 ; i < ds ; + + i ) {
_data [ i ] = ptr [ i ] ;
}
}
_FORCE_INLINE_ void set_from_mlpp_tensor3r ( const MLPPTensor3 & p_from ) {
resize ( p_from . size ( ) ) ;
int ds = p_from . data_size ( ) ;
const real_t * ptr = p_from . ptr ( ) ;
for ( int i = 0 ; i < ds ; + + i ) {
_data [ i ] = ptr [ i ] ;
}
}
2023-04-16 15:08:11 +02:00
_FORCE_INLINE_ void set_from_mlpp_matrix ( const Ref < MLPPMatrix > & p_from ) {
ERR_FAIL_COND ( ! p_from . is_valid ( ) ) ;
2023-04-16 20:28:50 +02:00
Size2i mat_size = p_from - > size ( ) ;
resize ( Size3i ( mat_size . x , mat_size . y , 1 ) ) ;
int ds = p_from - > data_size ( ) ;
const real_t * ptr = p_from - > ptr ( ) ;
for ( int i = 0 ; i < ds ; + + i ) {
_data [ i ] = ptr [ i ] ;
}
2023-04-16 15:08:11 +02:00
}
_FORCE_INLINE_ void set_from_mlpp_matrixr ( const MLPPMatrix & p_from ) {
2023-04-16 20:28:50 +02:00
Size2i mat_size = p_from . size ( ) ;
resize ( Size3i ( mat_size . x , mat_size . y , 1 ) ) ;
int ds = p_from . data_size ( ) ;
const real_t * ptr = p_from . ptr ( ) ;
for ( int i = 0 ; i < ds ; + + i ) {
_data [ i ] = ptr [ i ] ;
}
2023-04-16 15:08:11 +02:00
}
_FORCE_INLINE_ void set_from_mlpp_vectors ( const Vector < Ref < MLPPVector > > & p_from ) {
if ( p_from . size ( ) = = 0 ) {
reset ( ) ;
return ;
}
if ( ! p_from [ 0 ] . is_valid ( ) ) {
reset ( ) ;
return ;
}
2023-04-16 20:28:50 +02:00
resize ( Size3i ( p_from [ 0 ] - > size ( ) , p_from . size ( ) , 1 ) ) ;
2023-04-16 15:08:11 +02:00
if ( data_size ( ) = = 0 ) {
reset ( ) ;
return ;
}
for ( int i = 0 ; i < p_from . size ( ) ; + + i ) {
const Ref < MLPPVector > & r = p_from [ i ] ;
ERR_CONTINUE ( ! r . is_valid ( ) ) ;
ERR_CONTINUE ( r - > size ( ) ! = _size . x ) ;
int start_index = i * _size . x ;
const real_t * from_ptr = r - > ptr ( ) ;
for ( int j = 0 ; j < _size . x ; j + + ) {
_data [ start_index + j ] = from_ptr [ j ] ;
}
}
}
2023-04-23 12:15:56 +02:00
_FORCE_INLINE_ void set_from_mlpp_matricess ( const Vector < Ref < MLPPMatrix > > & p_from ) {
2023-04-16 15:08:11 +02:00
if ( p_from . size ( ) = = 0 ) {
reset ( ) ;
return ;
}
2023-04-23 12:15:56 +02:00
if ( ! p_from [ 0 ] . is_valid ( ) ) {
2023-04-16 15:08:11 +02:00
reset ( ) ;
return ;
}
2023-04-23 12:15:56 +02:00
resize ( Size3i ( p_from [ 0 ] - > size ( ) . x , p_from [ 0 ] - > size ( ) . y , p_from . size ( ) ) ) ;
2023-04-16 15:08:11 +02:00
if ( data_size ( ) = = 0 ) {
reset ( ) ;
return ;
}
2023-04-23 12:15:56 +02:00
Size2i fms = feature_map_size ( ) ;
int fmds = feature_map_data_size ( ) ;
2023-04-16 15:08:11 +02:00
for ( int i = 0 ; i < p_from . size ( ) ; + + i ) {
2023-04-23 12:15:56 +02:00
const Ref < MLPPMatrix > & r = p_from [ i ] ;
2023-04-16 15:08:11 +02:00
ERR_CONTINUE ( ! r . is_valid ( ) ) ;
2023-04-23 12:15:56 +02:00
ERR_CONTINUE ( r - > size ( ) ! = fms ) ;
2023-04-16 15:08:11 +02:00
2023-04-23 12:15:56 +02:00
int start_index = calculate_feature_map_index ( i ) ;
2023-04-16 15:08:11 +02:00
const real_t * from_ptr = r - > ptr ( ) ;
2023-04-23 12:15:56 +02:00
for ( int j = 0 ; j < fmds ; j + + ) {
2023-04-16 15:08:11 +02:00
_data [ start_index + j ] = from_ptr [ j ] ;
}
}
}
2023-04-23 12:15:56 +02:00
_FORCE_INLINE_ void set_from_mlpp_vectors_array ( const Array & p_from ) {
2023-04-16 15:08:11 +02:00
if ( p_from . size ( ) = = 0 ) {
reset ( ) ;
return ;
}
2023-04-23 12:15:56 +02:00
Ref < MLPPVector > v0 = p_from [ 0 ] ;
if ( ! v0 . is_valid ( ) ) {
reset ( ) ;
return ;
}
resize ( Size3i ( v0 - > size ( ) , p_from . size ( ) , 1 ) ) ;
2023-04-16 15:08:11 +02:00
if ( data_size ( ) = = 0 ) {
reset ( ) ;
return ;
}
for ( int i = 0 ; i < p_from . size ( ) ; + + i ) {
2023-04-23 12:15:56 +02:00
Ref < MLPPVector > r = p_from [ i ] ;
2023-04-16 15:08:11 +02:00
2023-04-23 12:15:56 +02:00
ERR_CONTINUE ( ! r . is_valid ( ) ) ;
ERR_CONTINUE ( r - > size ( ) ! = _size . x ) ;
2023-04-16 15:08:11 +02:00
int start_index = i * _size . x ;
2023-04-23 12:15:56 +02:00
const real_t * from_ptr = r - > ptr ( ) ;
2023-04-16 15:08:11 +02:00
for ( int j = 0 ; j < _size . x ; j + + ) {
_data [ start_index + j ] = from_ptr [ j ] ;
}
}
}
2023-04-23 12:15:56 +02:00
_FORCE_INLINE_ void set_from_mlpp_matrices_array ( const Array & p_from ) {
2023-04-16 15:08:11 +02:00
if ( p_from . size ( ) = = 0 ) {
reset ( ) ;
return ;
}
2023-04-23 12:15:56 +02:00
Ref < MLPPMatrix > v0 = p_from [ 0 ] ;
2023-04-16 15:08:11 +02:00
2023-04-23 12:15:56 +02:00
if ( ! v0 . is_valid ( ) ) {
reset ( ) ;
return ;
}
resize ( Size3i ( v0 - > size ( ) . x , v0 - > size ( ) . y , p_from . size ( ) ) ) ;
2023-04-16 15:08:11 +02:00
if ( data_size ( ) = = 0 ) {
reset ( ) ;
return ;
}
2023-04-23 12:15:56 +02:00
Size2i fms = feature_map_size ( ) ;
int fmds = feature_map_data_size ( ) ;
2023-04-16 15:08:11 +02:00
for ( int i = 0 ; i < p_from . size ( ) ; + + i ) {
2023-04-23 12:15:56 +02:00
Ref < MLPPMatrix > r = p_from [ i ] ;
2023-04-16 15:08:11 +02:00
2023-04-23 12:15:56 +02:00
ERR_CONTINUE ( ! r . is_valid ( ) ) ;
ERR_CONTINUE ( r - > size ( ) ! = fms ) ;
2023-04-16 15:08:11 +02:00
2023-04-23 12:15:56 +02:00
int start_index = calculate_feature_map_index ( i ) ;
2023-04-16 15:08:11 +02:00
2023-04-23 12:15:56 +02:00
const real_t * from_ptr = r - > ptr ( ) ;
for ( int j = 0 ; j < fmds ; j + + ) {
2023-04-16 15:08:11 +02:00
_data [ start_index + j ] = from_ptr [ j ] ;
}
}
}
_FORCE_INLINE_ bool is_equal_approx ( const Ref < MLPPTensor3 > & p_with , real_t tolerance = static_cast < real_t > ( CMP_EPSILON ) ) const {
ERR_FAIL_COND_V ( ! p_with . is_valid ( ) , false ) ;
if ( unlikely ( this = = p_with . ptr ( ) ) ) {
return true ;
}
if ( _size ! = p_with - > size ( ) ) {
return false ;
}
int ds = data_size ( ) ;
for ( int i = 0 ; i < ds ; + + i ) {
if ( ! Math : : is_equal_approx ( _data [ i ] , p_with - > _data [ i ] , tolerance ) ) {
return false ;
}
}
return true ;
}
2023-04-16 20:28:50 +02:00
String to_string ( ) ;
2023-04-16 15:08:11 +02:00
_FORCE_INLINE_ MLPPTensor3 ( ) {
_data = NULL ;
}
2023-04-16 16:23:21 +02:00
2023-04-16 15:08:11 +02:00
_FORCE_INLINE_ MLPPTensor3 ( const MLPPMatrix & p_from ) {
_data = NULL ;
2023-04-16 20:28:50 +02:00
Size2i mat_size = p_from . size ( ) ;
resize ( Size3i ( mat_size . x , mat_size . y , 1 ) ) ;
int ds = p_from . data_size ( ) ;
const real_t * ptr = p_from . ptr ( ) ;
for ( int i = 0 ; i < ds ; + + i ) {
_data [ i ] = ptr [ i ] ;
}
2023-04-16 15:08:11 +02:00
}
MLPPTensor3 ( const Array & p_from ) {
_data = NULL ;
2023-04-23 12:15:56 +02:00
set_from_mlpp_matrices_array ( p_from ) ;
2023-04-16 15:08:11 +02:00
}
_FORCE_INLINE_ ~ MLPPTensor3 ( ) {
if ( _data ) {
reset ( ) ;
}
}
// TODO: These are temporary
std : : vector < real_t > to_flat_std_vector ( ) const ;
2023-04-16 20:28:50 +02:00
void set_from_std_vectors ( const std : : vector < std : : vector < std : : vector < real_t > > > & p_from ) ;
std : : vector < std : : vector < std : : vector < real_t > > > to_std_vector ( ) ;
MLPPTensor3 ( const std : : vector < std : : vector < std : : vector < real_t > > > & p_from ) ;
2023-04-16 15:08:11 +02:00
protected :
static void _bind_methods ( ) ;
protected :
2023-04-16 20:28:50 +02:00
Size3i _size ;
2023-04-16 15:08:11 +02:00
real_t * _data ;
} ;
2023-04-23 15:42:38 +02:00
VARIANT_ENUM_CAST ( MLPPTensor3 : : ImageChannelFlags ) ;
2023-04-23 13:16:04 +02:00
2023-04-16 15:08:11 +02:00
# endif