ailia_tflite  1.2.4.0
Public Member Functions | Protected Member Functions | Protected Attributes | List of all members
ailiaTFLite.AiliaTFLiteModel Class Reference
Inheritance diagram for ailiaTFLite.AiliaTFLiteModel:
Inheritance graph
[legend]
Collaboration diagram for ailiaTFLite.AiliaTFLiteModel:
Collaboration graph
[legend]

Public Member Functions

void Close ()
 Destroys network objects. More...
 
virtual void Dispose ()
 Release resources. More...
 
bool OpenMem (byte[] tflite_model_buf, Int32 env_id, Int32 memory_mode, UInt32 flags)
 Creates network objects from memory. More...
 
bool OpenFile (string tflite_model_path, Int32 env_id, Int32 memory_mode, UInt32 flags)
 Create a network object from a model file. More...
 
bool SelectDevice (ref string device_list, bool reference_only=false)
 Select device. More...
 
bool AllocateTensors ()
 Ensure the internal buffer. More...
 
bool Predict ()
 Perform inference. More...
 
bool GetInputTensorInfo (ref Int32[] shape, ref IntPtr buffer, ref sbyte tensor_type, ref float[] quant_scale, ref Int64[] quant_zero_point, ref Int32 quant_axis, Int32 input_index=0)
 Get the input tensor. More...
 
bool GetOutputTensorInfo (ref Int32[] shape, ref IntPtr buffer, ref sbyte tensor_type, ref float[] quant_scale, ref Int64[] quant_zero_point, ref Int32 quant_axis, Int32 output_index=0)
 Get the output tensor. More...
 
bool GetInputTensorShape (ref int[] shape, int input_index)
 Get the shape of input tensor. More...
 
bool GetOutputTensorShape (ref int[] shape, int output_index)
 Get the shape of output tensor. More...
 
bool SetInputTensorData (float[] input_data, int input_index)
 Set the data of input tensor. More...
 
bool GetOutputTensorData (float[] output_data, int output_index)
 Get the data of output tensor. More...
 
bool SetProfileMode (int profile_mode)
 Enable profile mode. More...
 
string GetSummary ()
 Obtain network information and profile results. More...
 

Protected Member Functions

virtual void Dispose (bool disposing)
 

Protected Attributes

IntPtr instance = IntPtr.Zero
 
bool logging = true
 

Member Function Documentation

◆ AllocateTensors()

bool ailiaTFLite.AiliaTFLiteModel.AllocateTensors ( )
inline

Ensure the internal buffer.

Returns
If this function is successful, it returns true , or false otherwise.
249  {
250  int status = AiliaTFLite.ailiaTFLiteAllocateTensors(instance);
251  if(!CheckStatus(status, "ailiaTFLiteAllocateTensors")) return false;
252  return true;
253  }
IntPtr instance
Definition: AiliaTFLiteModel.cs:25

◆ Close()

void ailiaTFLite.AiliaTFLiteModel.Close ( )
inline

Destroys network objects.

Destroys and initializes the network object.

63  {
64  CloseInstance();
65  ReleaseMem();
66  }

◆ Dispose() [1/2]

virtual void ailiaTFLite.AiliaTFLiteModel.Dispose ( )
inlinevirtual

Release resources.

76  {
77  Dispose(true);
78  }
virtual void Dispose()
Release resources.
Definition: AiliaTFLiteModel.cs:75

◆ Dispose() [2/2]

virtual void ailiaTFLite.AiliaTFLiteModel.Dispose ( bool  disposing)
inlineprotectedvirtual
81  {
82  if (disposing){
83  // release managed resource
84  }
85  Close(); // release unmanaged resource
86  }
void Close()
Destroys network objects.
Definition: AiliaTFLiteModel.cs:63

◆ GetInputTensorInfo()

bool ailiaTFLite.AiliaTFLiteModel.GetInputTensorInfo ( ref Int32[]  shape,
ref IntPtr  buffer,
ref sbyte  tensor_type,
ref float[]  quant_scale,
ref Int64[]  quant_zero_point,
ref Int32  quant_axis,
Int32  input_index = 0 
)
inline

Get the input tensor.

Parameters
shapeInput Shape
buffer Pointerto input buffer
tensor_typeType of input buffer
quant_scaleQuantization scale
quant_zero_point0 points
quant_axisQuantization axis
input_indexInput tensor index
Returns
If this function is successful, it returns true , or false otherwise.
303  {
304  Int32 status;
305  Int32 num_of_input_tensor=-1;
306  Int32 tensor_index=-1;
307  Int32 tensor_dim=-1;
308 
309  status = AiliaTFLite.ailiaTFLiteGetNumberOfInputs(instance, ref num_of_input_tensor);
310  if(!CheckStatus(status, "ailiaTFLiteGetNumberOfInputs")) return false;
311  if(input_index >= num_of_input_tensor) return false;
312  status = AiliaTFLite.ailiaTFLiteGetInputTensorIndex(instance, ref tensor_index, input_index);
313  if(!CheckStatus(status, "ailiaTFLiteGetInputTensorIndex")) return false;
314  status = AiliaTFLite.ailiaTFLiteGetTensorDimension(instance, ref tensor_dim, tensor_index);
315  shape = new Int32[tensor_dim];
316  if(!CheckStatus(status, "ailiaTFLiteGetTensorDimension")) return false;
317  status = AiliaTFLite.ailiaTFLiteGetTensorShape(instance, shape, tensor_index);
318  if(!CheckStatus(status, "ailiaTFLiteGetTensorShape")) return false;
319  status = AiliaTFLite.ailiaTFLiteGetTensorBuffer(instance, ref buffer, tensor_index);
320  if(!CheckStatus(status, "ailiaTFLiteGetTensorBuffer")) return false;
321  status = AiliaTFLite.ailiaTFLiteGetTensorType(instance, ref tensor_type, tensor_index);
322  if(!CheckStatus(status, "ailiaTFLiteGetTensorType")) return false;
323 
324  // quantization params
325  if (tensor_type != AiliaTFLite.AILIA_TFLITE_TENSOR_TYPE_FLOAT32){
326  Int32 quant_count = 0;
327  status = AiliaTFLite.ailiaTFLiteGetTensorQuantizationCount(instance, ref quant_count, tensor_index);
328  if(!CheckStatus(status, "ailiaTFLiteGetTensorQuantizationCount")) return false;
329  quant_scale = new float[quant_count];
330  status = AiliaTFLite.ailiaTFLiteGetTensorQuantizationScale(instance, quant_scale, tensor_index);
331  if(!CheckStatus(status, "ailiaTFLiteGetTensorQuantizationScale")) return false;
332  quant_zero_point = new Int64[quant_count];
333  status = AiliaTFLite.ailiaTFLiteGetTensorQuantizationZeroPoint(instance, quant_zero_point, tensor_index);
334  if(!CheckStatus(status, "ailiaTFLiteGetTensorQuantizationZeroPoint")) return false;
335  status = AiliaTFLite.ailiaTFLiteGetTensorQuantizationQuantizedDimension(instance, ref quant_axis, tensor_index);
336  if(!CheckStatus(status, "ailiaTFLiteGetTensorQuantizationQuantizedDimension")) return false;
337  }
338 
339  //log("input num_of_input_tensor:"+num_of_input_tensor+", index:"+tensor_index+", dim:"+tensor_dim+", shape:"+shape[0]+","+shape[1]+","+shape[2]+","+shape[3]+", quant_zero_point:"+quant_zero_point[0]+", quant_scale:"+quant_scale[0]+", tensor_type:"+tensor_type);
340  return true;
341  }

◆ GetInputTensorShape()

bool ailiaTFLite.AiliaTFLiteModel.GetInputTensorShape ( ref int[]  shape,
int  input_index 
)
inline

Get the shape of input tensor.

Parameters
shapeInput Shape
input_indexInput tensor index
Returns
If this function is successful, it returns true , or false otherwise.
428  {
429  Int32 status;
430  Int32 num_of_input_tensor=-1;
431  Int32 tensor_index=-1;
432  Int32 tensor_dim=-1;
433 
434  status = AiliaTFLite.ailiaTFLiteGetNumberOfInputs(instance, ref num_of_input_tensor);
435  if(!CheckStatus(status, "ailiaTFLiteGetNumberOfInputs")) return false;
436  if(input_index >= num_of_input_tensor) return false;
437  status = AiliaTFLite.ailiaTFLiteGetInputTensorIndex(instance, ref tensor_index, input_index);
438  if(!CheckStatus(status, "ailiaTFLiteGetInputTensorIndex")) return false;
439  status = AiliaTFLite.ailiaTFLiteGetTensorDimension(instance, ref tensor_dim, tensor_index);
440  shape = new Int32[tensor_dim];
441  if(!CheckStatus(status, "ailiaTFLiteGetTensorDimension")) return false;
442  status = AiliaTFLite.ailiaTFLiteGetTensorShape(instance, shape, tensor_index);
443  if(!CheckStatus(status, "ailiaTFLiteGetTensorShape")) return false;
444 
445  return true;
446  }

◆ GetOutputTensorData()

bool ailiaTFLite.AiliaTFLiteModel.GetOutputTensorData ( float[]  output_data,
int  output_index 
)
inline

Get the data of output tensor.

Parameters
output_dataOutput Data
output_indexIndex of output tensor
Returns
If this function is successful, it returns true , or false otherwise.
538  {
539  Int32 [] output_shape = null;
540  IntPtr output_buffer = IntPtr.Zero;
541  sbyte output_tensor_type = 0;
542  float [] output_quant_scale = null;
543  Int64 [] output_quant_zero_point = null;
544  Int32 output_quant_axis = 0;
545 
546  GetOutputTensorInfo(ref output_shape, ref output_buffer, ref output_tensor_type,
547  ref output_quant_scale, ref output_quant_zero_point, ref output_quant_axis, output_index
548  );
549 
550  int output_size = output_data.Length;
551  if (output_tensor_type == AiliaTFLite.AILIA_TFLITE_TENSOR_TYPE_FLOAT32){
552  Marshal.Copy(output_buffer, output_data, 0, output_size);
553  }else{
554  byte[] buf = new byte[output_size];
555  Marshal.Copy(output_buffer, buf, 0, output_size);
556  dequant(output_data, buf, output_quant_scale[0], output_quant_zero_point[0], output_tensor_type);
557  }
558  return true;
559  }
bool GetOutputTensorInfo(ref Int32[] shape, ref IntPtr buffer, ref sbyte tensor_type, ref float[] quant_scale, ref Int64[] quant_zero_point, ref Int32 quant_axis, Int32 output_index=0)
Get the output tensor.
Definition: AiliaTFLiteModel.cs:368

◆ GetOutputTensorInfo()

bool ailiaTFLite.AiliaTFLiteModel.GetOutputTensorInfo ( ref Int32[]  shape,
ref IntPtr  buffer,
ref sbyte  tensor_type,
ref float[]  quant_scale,
ref Int64[]  quant_zero_point,
ref Int32  quant_axis,
Int32  output_index = 0 
)
inline

Get the output tensor.

Parameters
shapeOutput Shape
buffer Pointerto output buffer
tensor_typeType of output buffer
quant_scaleQuantization scale
quant_zero_point0 points
quant_axisQuantization axis
output_indexIndex of output tensor
Returns
If this function is successful, it returns true , or false otherwise.
371  {
372  Int32 status;
373  Int32 num_of_output_tensor=-1;
374  Int32 tensor_index=-1;
375  Int32 tensor_dim=-1;
376 
377  status = AiliaTFLite.ailiaTFLiteGetNumberOfOutputs(instance, ref num_of_output_tensor);
378  if(!CheckStatus(status, "ailiaTFLiteGetNumberOfOutputs")) return false;
379  if(output_index >= num_of_output_tensor) return false;
380 
381  status = AiliaTFLite.ailiaTFLiteGetOutputTensorIndex(instance, ref tensor_index, output_index);
382  if(!CheckStatus(status, "ailiaTFLiteGetOutputTensorIndex")) return false;
383  status = AiliaTFLite.ailiaTFLiteGetTensorDimension(instance, ref tensor_dim, tensor_index);
384  shape = new Int32[tensor_dim];
385  if(!CheckStatus(status, "ailiaTFLiteGetTensorDimension")) return false;
386  status = AiliaTFLite.ailiaTFLiteGetTensorShape(instance, shape, tensor_index);
387  if(!CheckStatus(status, "ailiaTFLiteGetTensorShape")) return false;
388  status = AiliaTFLite.ailiaTFLiteGetTensorBuffer(instance, ref buffer, tensor_index);
389  if(!CheckStatus(status, "ailiaTFLiteGetTensorBuffer")) return false;
390  status = AiliaTFLite.ailiaTFLiteGetTensorType(instance, ref tensor_type, tensor_index);
391  if(!CheckStatus(status, "ailiaTFLiteGetTensorType")) return false;
392 
393  // quantization params
394  if (tensor_type != AiliaTFLite.AILIA_TFLITE_TENSOR_TYPE_FLOAT32){
395  Int32 quant_count = 0;
396  status = AiliaTFLite.ailiaTFLiteGetTensorQuantizationCount(instance, ref quant_count, tensor_index);
397  if(!CheckStatus(status, "ailiaTFLiteGetTensorQuantizationCount")) return false;
398  quant_scale = new float[quant_count];
399  status = AiliaTFLite.ailiaTFLiteGetTensorQuantizationScale(instance, quant_scale, tensor_index);
400  if(!CheckStatus(status, "ailiaTFLiteGetTensorQuantizationScale")) return false;
401  quant_zero_point = new Int64[quant_count];
402  status = AiliaTFLite.ailiaTFLiteGetTensorQuantizationZeroPoint(instance, quant_zero_point, tensor_index);
403  if(!CheckStatus(status, "ailiaTFLiteGetTensorQuantizationZeroPoint")) return false;
404  status = AiliaTFLite.ailiaTFLiteGetTensorQuantizationQuantizedDimension(instance, ref quant_axis, tensor_index);
405  if(!CheckStatus(status, "ailiaTFLiteGetTensorQuantizationQuantizedDimension")) return false;
406  }
407 
408  //log("output num_of_output_tensor:"+num_of_output_tensor+", index:"+tensor_index+", dim:"+tensor_dim+", shape:"+shape[0]+","+shape[1]+","+shape[2]+", quant_count:"+quant_count+", quant_scale:"+quant_scale[0]+", quant_zero_point:"+quant_zero_point[0]+", quant_axis:"+quant_axis+", tensor_type:"+tensor_type);
409  return true;
410  }

◆ GetOutputTensorShape()

bool ailiaTFLite.AiliaTFLiteModel.GetOutputTensorShape ( ref int[]  shape,
int  output_index 
)
inline

Get the shape of output tensor.

Parameters
shapeOutput Shape
output_indexIndex of output tensor
Returns
If this function is successful, it returns true , or false otherwise.
464  {
465  Int32 status;
466  Int32 num_of_output_tensor=-1;
467  Int32 tensor_index=-1;
468  Int32 tensor_dim=-1;
469 
470  status = AiliaTFLite.ailiaTFLiteGetNumberOfOutputs(instance, ref num_of_output_tensor);
471  if(!CheckStatus(status, "ailiaTFLiteGetNumberOfOutputs")) return false;
472  if(output_index >= num_of_output_tensor) return false;
473 
474  status = AiliaTFLite.ailiaTFLiteGetOutputTensorIndex(instance, ref tensor_index, output_index);
475  if(!CheckStatus(status, "ailiaTFLiteGetOutputTensorIndex")) return false;
476  status = AiliaTFLite.ailiaTFLiteGetTensorDimension(instance, ref tensor_dim, tensor_index);
477  shape = new Int32[tensor_dim];
478  if(!CheckStatus(status, "ailiaTFLiteGetTensorDimension")) return false;
479  status = AiliaTFLite.ailiaTFLiteGetTensorShape(instance, shape, tensor_index);
480  if(!CheckStatus(status, "ailiaTFLiteGetTensorShape")) return false;
481 
482  return true;
483  }

◆ GetSummary()

string ailiaTFLite.AiliaTFLiteModel.GetSummary ( )
inline

Obtain network information and profile results.

Returns
Returns an ASCII string displaying the network information and profile results on success, or null on failure. @detail Obtains a string containing network information and profile results.
659  {
660  if (instance == null){
661  return "";
662  }
663  UInt64 buffer_size = 0;
664  int status = AiliaTFLite.ailiaTFLiteGetSummaryLength(instance, ref buffer_size);
665  if(!CheckStatus(status, "ailiaTFLiteGetSummaryLength")) return null;
666  byte[] buffer = new byte[buffer_size];
667  status = AiliaTFLite.ailiaTFLiteGetSummary(instance, buffer, buffer_size);
668  if(!CheckStatus(status, "ailiaTFLiteGetSummary")) return null;
669  return System.Text.Encoding.ASCII.GetString(buffer);
670  }

◆ OpenFile()

bool ailiaTFLite.AiliaTFLiteModel.OpenFile ( string  tflite_model_path,
Int32  env_id,
Int32  memory_mode,
UInt32  flags 
)
inline

Create a network object from a model file.

Parameters
tflite_model_pathPathname of the tflite file(MBSC or UTF16)
env_idProgress execution environment used for calculation (AILIA_TFLITE_ENV_NAPI_*)
memory_modeMemory mode (AILIA_TFLITE_MEMORY_MODE_*)
flagsFlag (logical sum of AILIA_TFLITE_FLAG_*)
Returns
If this function is successful, it returns true , or false otherwise. @detail Create a network object from a model file.
168  {
169  if(tflite_model_path == null){
170  log("tflite_model_path is empty.");
171  return false;
172  }
173  byte[] data = ReadFile(tflite_model_path);
174 
175  return OpenMem(data, env_id, memory_mode, flags);
176  }
bool OpenMem(byte[] tflite_model_buf, Int32 env_id, Int32 memory_mode, UInt32 flags)
Creates network objects from memory.
Definition: AiliaTFLiteModel.cs:122

◆ OpenMem()

bool ailiaTFLite.AiliaTFLiteModel.OpenMem ( byte[]  tflite_model_buf,
Int32  env_id,
Int32  memory_mode,
UInt32  flags 
)
inline

Creates network objects from memory.

Parameters
tflite_model_bufPointer to data in tflite file
env_idProgress execution environment used for calculation (AILIA_TFLITE_ENV_NAPI_*)
memory_modeMemory mode (AILIA_TFLITE_MEMORY_MODE_*)
flagsFlag (logical sum of AILIA_TFLITE_FLAG_*)
Returns
If this function is successful, it returns true , or false otherwise. @detail Creates network objects from memory.
122  {
123  Int32 status;
124 
125  if(tflite_model_buf == null || tflite_model_buf.Length == 0){
126  log("tflite_model_buf is empty.");
127  return false;
128  }
129  CloseInstance();
130  AiliaTFLiteLicense.CheckAndDownloadLicense();
131 
132  // fix the address
133  model_mem_handle = GCHandle.Alloc(tflite_model_buf, GCHandleType.Pinned);
134  b_model_mem_handle = true;
135 
136  status = AiliaTFLite.ailiaTFLiteCreate(ref instance, tflite_model_buf, (UInt32)tflite_model_buf.Length,
137  IntPtr.Zero, IntPtr.Zero, IntPtr.Zero, IntPtr.Zero,
138  env_id, memory_mode, flags
139  );
140  if(!CheckStatus(status, "ailiaTFLiteCreate")) return false;
141 
142  return true;
143  }

◆ Predict()

bool ailiaTFLite.AiliaTFLiteModel.Predict ( )
inline

Perform inference.

Returns
If this function is successful, it returns true , or false otherwise.
266  {
267  Int32 status;
268 
269  status = AiliaTFLite.ailiaTFLitePredict(instance);
270  if(!CheckStatus(status, "ailiaTFLitePredict")) return false;
271 
272  return true;
273  }

◆ SelectDevice()

bool ailiaTFLite.AiliaTFLiteModel.SelectDevice ( ref string  device_list,
bool  reference_only = false 
)
inline

Select device.

Parameters
device_listSelected device name
reference_onlyOnly use nnapi reference implementation (for debug)
Returns
If this function is successful, it returns true , or false otherwise.
200  {
201  device_list = "";
202  string all_device_list = "";
203 
204  // Get Device Count
205  UInt64 device_count = 0;
206  int status = AiliaTFLite.ailiaTFLiteGetDeviceCount(instance, ref device_count);
207  if(!CheckStatus(status, "ailiaTFLiteGetDeviceCount")) return false;
208 
209  // Get Device Name and Select require device
210  Int32 [] device_idxes = new Int32[device_count];
211  UInt64 active_device_cnt = 0;
212  for (UInt64 device_idx = 0; device_idx < device_count; device_idx++){
213  IntPtr name = IntPtr.Zero;
214  status = AiliaTFLite.ailiaTFLiteGetDeviceName(instance, (int)device_idx, ref name);
215  if(!CheckStatus(status, "ailiaTFLiteGetDeviceName")) return false;
216  if (device_list != ""){
217  device_list += " , ";
218  }
219  string device_name = Marshal.PtrToStringAnsi(name);
220  if (reference_only == false || device_name.Contains("reference")){
221  device_list += device_name;
222  device_idxes[active_device_cnt] = (Int32)device_idx;
223  active_device_cnt = active_device_cnt + 1;
224  }
225  all_device_list += device_name;
226  }
227 
228  // Apply selected device
229  if (active_device_cnt == 0){
230  device_list = all_device_list;
231  }else{
232  status = AiliaTFLite.ailiaTFLiteSelectDevices(instance, device_idxes, active_device_cnt);
233  if(!CheckStatus(status, "ailiaTFLiteSelectDevices")) return false;
234  }
235  return true;
236  }

◆ SetInputTensorData()

bool ailiaTFLite.AiliaTFLiteModel.SetInputTensorData ( float[]  input_data,
int  input_index 
)
inline

Set the data of input tensor.

Parameters
input_dataInput Data
input_indexInput tensor index
Returns
If this function is successful, it returns true , or false otherwise.
500  {
501  Int32 [] input_shape = null;
502  IntPtr input_buffer = IntPtr.Zero;
503  sbyte input_tensor_type = 0;
504  float [] input_quant_scale = null;
505  Int64 [] input_quant_zero_point = null;
506  Int32 input_quant_axis = 0;
507  GetInputTensorInfo(ref input_shape, ref input_buffer, ref input_tensor_type,
508  ref input_quant_scale, ref input_quant_zero_point, ref input_quant_axis, input_index
509  );
510  int dst_data_size = input_data.Length;
511 
512  if (input_tensor_type == AiliaTFLite.AILIA_TFLITE_TENSOR_TYPE_FLOAT32){
513  Marshal.Copy(input_data, 0, input_buffer, dst_data_size);
514  }else{
515  byte[] dst_data_ptr = new byte[dst_data_size];
516  quant(dst_data_ptr, input_data, input_quant_scale[0], input_quant_zero_point[0], input_tensor_type);
517  Marshal.Copy(dst_data_ptr, 0, input_buffer, dst_data_size);
518  }
519 
520  return true;
521  }
bool GetInputTensorInfo(ref Int32[] shape, ref IntPtr buffer, ref sbyte tensor_type, ref float[] quant_scale, ref Int64[] quant_zero_point, ref Int32 quant_axis, Int32 input_index=0)
Get the input tensor.
Definition: AiliaTFLiteModel.cs:300

◆ SetProfileMode()

bool ailiaTFLite.AiliaTFLiteModel.SetProfileMode ( int  profile_mode)
inline

Enable profile mode.

Returns
Returns true on success, false on failure. @detail Enable profile mode. After enabling profile mode and inference, get profile results in Summary API.
634  {
635  if (instance == null){
636  return false;
637  }
638  int status = AiliaTFLite.ailiaTFLiteSetProfileMode(instance, profile_mode);
639  if(!CheckStatus(status, "ailiaTFLiteSetProfileMode")) return false;
640  return true;
641  }

Member Data Documentation

◆ instance

IntPtr ailiaTFLite.AiliaTFLiteModel.instance = IntPtr.Zero
protected

◆ logging

bool ailiaTFLite.AiliaTFLiteModel.logging = true
protected

The documentation for this class was generated from the following file: