123792 - rpi4b tflite
123792 - rpi4b tflite
Summary: Pipeline succeeded and valid report was generated.
Model Details
- model name : mobilenet_v1_0.5_224_tfs_int8.tflite
- model url : Download here
Logs Details
user.log
[33mWarning: Permanently added '192.168.2.41' (ED25519) to the list of known hosts. [0m
Run command : benchmark_model --graph=mobilenet_v1_0.5_224_tfs_int8.tflite --num_threads=4 --enable_op_profiling=true --report_peak_memory_footprint=true --profiling_output_csv_file=log.csv --op_profiling_output_mode=csv
[33mINFO: Created TensorFlow Lite XNNPACK delegate for CPU. [0m
End of command : benchmark_modelerror.log
Report Details
report.json
{
"GFLOPs": null,
"accuracy": null,
"ambiant_temperature": null,
"benchmark_type": "TYPE1",
"date": "2026-04-14T22:28:06.",
"energy_efficiency": null,
"flash_size": 30688104448,
"flash_usage": 0.00326591693435571,
"inference_engine": "tflite",
"inference_latency": {
"latency_per_layers": [
{
"layer_name": " [tfl.quantize]:0",
"max": 228.041,
"mean": 228.041,
"min": 228.041,
"std": 0.0
},
{
"layer_name": " Delegate/Convolution (NHWC QC8) IGEMM:0",
"max": 1311.3999999999999,
"mean": 1311.3999999999999,
"min": 1311.3999999999999,
"std": 0.0
},
{
"layer_name": " Delegate/Convolution (NHWC QC8) DWConv:1",
"max": 366.22700000000003,
"mean": 366.22700000000003,
"min": 366.22700000000003,
"std": 0.0
},
{
"layer_name": " Delegate/Convolution (NHWC QC8) GEMM:2",
"max": 566.1650000000001,
"mean": 566.1650000000001,
"min": 566.1650000000001,
"std": 0.0
},
{
"layer_name": " Delegate/Constant Pad (ND X8):3",
"max": 297.89700000000005,
"mean": 297.89700000000005,
"min": 297.89700000000005,
"std": 0.0
},
{
"layer_name": " Delegate/Convolution (NHWC QC8) DWConv:4",
"max": 116.56700000000001,
"mean": 116.56700000000001,
"min": 116.56700000000001,
"std": 0.0
},
{
"layer_name": " Delegate/Convolution (NHWC QC8) GEMM:5",
"max": 291.938,
"mean": 291.938,
"min": 291.938,
"std": 0.0
},
{
"layer_name": " Delegate/Convolution (NHWC QC8) DWConv:6",
"max": 302.67,
"mean": 302.67,
"min": 302.67,
"std": 0.0
},
{
"layer_name": " Delegate/Convolution (NHWC QC8) GEMM:7",
"max": 532.175,
"mean": 532.175,
"min": 532.175,
"std": 0.0
},
{
"layer_name": " Delegate/Constant Pad (ND X8):8",
"max": 58.433,
"mean": 58.433,
"min": 58.433,
"std": 0.0
},
{
"layer_name": " Delegate/Convolution (NHWC QC8) DWConv:9",
"max": 49.8351,
"mean": 49.8351,
"min": 49.8351,
"std": 0.0
},
{
"layer_name": " Delegate/Convolution (NHWC QC8) GEMM:10",
"max": 279.175,
"mean": 279.175,
"min": 279.175,
"std": 0.0
},
{
"layer_name": " Delegate/Convolution (NHWC QC8) DWConv:11",
"max": 178.649,
"mean": 178.649,
"min": 178.649,
"std": 0.0
},
{
"layer_name": " Delegate/Convolution (NHWC QC8) GEMM:12",
"max": 397.959,
"mean": 397.959,
"min": 397.959,
"std": 0.0
},
{
"layer_name": " Delegate/Constant Pad (ND X8):13",
"max": 15.0928,
"mean": 15.0928,
"min": 15.0928,
"std": 0.0
},
{
"layer_name": " Delegate/Convolution (NHWC QC8) DWConv:14",
"max": 26.9588,
"mean": 26.9588,
"min": 26.9588,
"std": 0.0
},
{
"layer_name": " Delegate/Convolution (NHWC QC8) GEMM:15",
"max": 206.876,
"mean": 206.876,
"min": 206.876,
"std": 0.0
},
{
"layer_name": " Delegate/Convolution (NHWC QC8) DWConv:16",
"max": 87.3299,
"mean": 87.3299,
"min": 87.3299,
"std": 0.0
},
{
"layer_name": " Delegate/Convolution (NHWC QC8) GEMM:17",
"max": 504.351,
"mean": 504.351,
"min": 504.351,
"std": 0.0
},
{
"layer_name": " Delegate/Convolution (NHWC QC8) DWConv:18",
"max": 58.7423,
"mean": 58.7423,
"min": 58.7423,
"std": 0.0
},
{
"layer_name": " Delegate/Convolution (NHWC QC8) GEMM:19",
"max": 518.897,
"mean": 518.897,
"min": 518.897,
"std": 0.0
},
{
"layer_name": " Delegate/Convolution (NHWC QC8) DWConv:20",
"max": 46.5155,
"mean": 46.5155,
"min": 46.5155,
"std": 0.0
},
{
"layer_name": " Delegate/Convolution (NHWC QC8) GEMM:21",
"max": 709.866,
"mean": 709.866,
"min": 709.866,
"std": 0.0
},
{
"layer_name": " Delegate/Convolution (NHWC QC8) DWConv:22",
"max": 240.52599999999998,
"mean": 240.52599999999998,
"min": 240.52599999999998,
"std": 0.0
},
{
"layer_name": " Delegate/Convolution (NHWC QC8) GEMM:23",
"max": 637.2370000000001,
"mean": 637.2370000000001,
"min": 637.2370000000001,
"std": 0.0
},
{
"layer_name": " Delegate/Convolution (NHWC QC8) DWConv:24",
"max": 136.206,
"mean": 136.206,
"min": 136.206,
"std": 0.0
},
{
"layer_name": " Delegate/Convolution (NHWC QC8) GEMM:25",
"max": 687.5980000000001,
"mean": 687.5980000000001,
"min": 687.5980000000001,
"std": 0.0
},
{
"layer_name": " Delegate/Constant Pad (ND X8):26",
"max": 102.67999999999999,
"mean": 102.67999999999999,
"min": 102.67999999999999,
"std": 0.0
},
{
"layer_name": " Delegate/Convolution (NHWC QC8) DWConv:27",
"max": 62.0619,
"mean": 62.0619,
"min": 62.0619,
"std": 0.0
},
{
"layer_name": " Delegate/Convolution (NHWC QC8) GEMM:28",
"max": 272.155,
"mean": 272.155,
"min": 272.155,
"std": 0.0
},
{
"layer_name": " Delegate/Convolution (NHWC QC8) DWConv:29",
"max": 67.19590000000001,
"mean": 67.19590000000001,
"min": 67.19590000000001,
"std": 0.0
},
{
"layer_name": " Delegate/Convolution (NHWC QC8) GEMM:30",
"max": 634.6800000000001,
"mean": 634.6800000000001,
"min": 634.6800000000001,
"std": 0.0
},
{
"layer_name": " Delegate/Global Average Pooling (NWC QS8):31",
"max": 8.03093,
"mean": 8.03093,
"min": 8.03093,
"std": 0.0
},
{
"layer_name": " Delegate/Fully Connected (NC QS8) GEMM:32",
"max": 21.6907,
"mean": 21.6907,
"min": 21.6907,
"std": 0.0
},
{
"layer_name": " [StatefulPartitionedCall:01]:34",
"max": 7.030930000000001,
"mean": 7.030930000000001,
"min": 7.030930000000001,
"std": 0.0
},
{
"layer_name": " Delegate/Convert (NC QS8 F32):0",
"max": 1.35052,
"mean": 1.35052,
"min": 1.35052,
"std": 0.0
}
],
"max": 58519.0,
"mean": 10030.2,
"min": 6382.0,
"std": 6282.0,
"troughput": null
},
"load_accelerator": null,
"load_cpu": 76,
"model_file_name": "mobilenet_v1_0.5_224_tfs_int8.tflite",
"model_size": 1002248,
"nb_inference": 97,
"nb_parameters_model": null,
"postprocess_time": {
"max": null,
"mean": null,
"min": null,
"std": null
},
"power_consumption": null,
"preprocess_time": {
"max": null,
"mean": null,
"min": null,
"std": null
},
"ram_peak_usage": 0.31646254214607344,
"ram_size": 3976114176,
"target": "rpi4b",
"target_id": "0000019c716362ba976cf0240430d8609d2545907baf474a564f4d508e331fdf",
"temperature": null,
"version": 0,
"version_tag": ""
}