... # predict latency result = {} for model in input_model_list: latency = predictor.predict(model, model_type) # in unit of ms result[os.path.basename(model)] = latency logging.result(f'[RESULT] predict latency for {os.path.basename(model)}: {latency} ms') return result
#!/bin/bash # 遍历当前目录下所有的文件 for file in * do # 判断文件名是否以"_finegrained2.pkl"结尾 if [[ $file == *_finegrained2.pkl ]] then # 替换文件名中的"_finegrained2.pkl"为".pkl" new_name=${file/_finegrained2.pkl/.pkl} # 重命名文件 echo"$new_name" mv"$file""$new_name" fi done
import tensorflow as tf from tensorflow.keras.applications.resnet50 import ResNet50 from tensorflow.python.framework.convert_to_constants import convert_variables_to_constants_v2 # 加载模型 model = ResNet50(weights='imagenet')
# Get frozen ConcreteFunction frozen_func = convert_variables_to_constants_v2(full_model) frozen_func.graph.as_graph_def()
layers = [op.name for op in frozen_func.graph.get_operations()] print("-" * 50) print("Frozen model layers: ") for layer in layers: print(layer)
print("-" * 50) print("Frozen model inputs: ") print(frozen_func.inputs) print("Frozen model outputs: ") print(frozen_func.outputs)
# Save frozen graph from frozen ConcreteFunction to hard drive tf.io.write_graph(graph_or_graph_def=frozen_func.graph, logdir="./frozen_models", name="frozen_graph.pb", as_text=False)
# 将模型转换为 TensorFlow Lite 格式,并保存为 .tflite 文件 converter = tf.lite.TFLiteConverter.from_keras_model(model) tflite_model = converter.convert() withopen('resnet50.tflite', 'wb') as f: f.write(tflite_model)
STARTING! Log parameter values verbosely: [0] Min num runs: [50] Num threads: [4] Min warmup runs: [30] Graph: [/mnt/sdcard/tflite_models/resnet50.tflite] #threads used for CPU inference: [4] Loaded model /mnt/sdcard/tflite_models/resnet50.tflite INFO: Initialized TensorFlow Lite runtime. INFO: Created TensorFlow Lite XNNPACK delegate for CPU. INFO: Replacing 75 node(s) with delegate (TfLiteXNNPackDelegate) node, yielding 1 partitions. The input model file size (MB): 102.161 Initialized session in 98.471ms. Running benchmark for at least 30 iterations and at least 0.5 seconds but terminate if exceeding 150 seconds. count=30 first=104448 curr=87126 min=86737 max=104448 avg=88622.5 std=3079
Running benchmark for at least 50 iterations and at least 1 seconds but terminate if exceeding 150 seconds. count=50 first=87163 curr=89038 min=86939 max=93704 avg=88199.2 std=1353
Inference timings in us: Init: 98471, First inference: 104448, Warmup (avg): 88622.5, Inference (avg): 88199.2 Note: as the benchmark tool itself affects memory footprint, the following is only APPROXIMATE to the actual memory footprint of the model at runtime. Take the information at your discretion. Memory footprint delta from the start of the tool (MB): init=134.562 overall=208.699
(nn-Meter) Start latency prediction ... (nn-Meter) Empty shape information with Constant_339 (nn-Meter) Empty shape information with Shape_340 (nn-Meter) Empty shape information with Gather_341 (nn-Meter) Empty shape information with Constant_342 (nn-Meter) Empty shape information with Constant_343 (nn-Meter) Empty shape information with Unsqueeze_344 (nn-Meter) Empty shape information with Unsqueeze_345 (nn-Meter) Empty shape information with Unsqueeze_346 (nn-Meter) Empty shape information with Concat_347 (nn-Meter) Empty shape information with Reshape_348 (nn-Meter) Empty shape information with Constant_350 (nn-Meter) Empty shape information with Shape_351 (nn-Meter) Empty shape information with Gather_352 (nn-Meter) Empty shape information with Constant_353 (nn-Meter) Empty shape information with Constant_354 ... (nn-Meter) Empty shape information with Unsqueeze_scores Traceback (most recent call last): File "/root/anaconda3/envs/nnmeter_tflite/bin/nn-meter", line 8, in <module> sys.exit(nn_meter_cli()) File "/root/anaconda3/envs/nnmeter_tflite/lib/python3.8/site-packages/nn_meter/utils/nn_meter_cli/interface.py", line 266, in nn_meter_cli args.func(args) File "/root/anaconda3/envs/nnmeter_tflite/lib/python3.8/site-packages/nn_meter/utils/nn_meter_cli/predictor.py", line 56, in apply_latency_predictor_cli latency = predictor.predict(model, model_type) # in unit of ms File "/root/anaconda3/envs/nnmeter_tflite/lib/python3.8/site-packages/nn_meter/predictor/nn_meter_predictor.py", line 111, in predict self.kd.load_graph(graph) File "/root/anaconda3/envs/nnmeter_tflite/lib/python3.8/site-packages/nn_meter/kernel_detector/kernel_detector.py", line 19, in load_graph new_graph = convert_nodes(graph) File "/root/anaconda3/envs/nnmeter_tflite/lib/python3.8/site-packages/nn_meter/kernel_detector/utils/ir_tools.py", line 14, in convert_nodes type = node["attr"]["type"] KeyError: 'type'
import onnx model_file = "/root/workspace/nn-Meter/workspace/models/mobilenetv3small_0.onnx"# ONNX模型文件路径 model = onnx.load(model_file) op_types = set() for node in model.graph.node: op_types.add(node.op_type) op_types = list(op_types) [print(op_type) for op_type in op_types]
deb http://mirrors.aliyun.com/ubuntu/ focal main restricted universe multiverse deb-src http://mirrors.aliyun.com/ubuntu/ focal main restricted universe multiverse
deb http://mirrors.aliyun.com/ubuntu/ focal-security main restricted universe multiverse deb-src http://mirrors.aliyun.com/ubuntu/ focal-security main restricted universe multiverse
deb http://mirrors.aliyun.com/ubuntu/ focal-updates main restricted universe multiverse deb-src http://mirrors.aliyun.com/ubuntu/ focal-updates main restricted universe multiverse
# deb http://mirrors.aliyun.com/ubuntu/ focal-proposed main restricted universe multiverse # deb-src http://mirrors.aliyun.com/ubuntu/ focal-proposed main restricted universe multiverse
deb http://mirrors.aliyun.com/ubuntu/ focal-backports main restricted universe multiverse deb-src http://mirrors.aliyun.com/ubuntu/ focal-backports main restricted universe multiverse
# 默认注释了源码仓库,如有需要可自行取消注释 deb http://mirrors.ustc.edu.cn/ubuntu/ focal main restricted universe multiverse # deb-src https://mirrors.ustc.edu.cn/ubuntu/ focal main restricted universe multiverse
deb http://mirrors.ustc.edu.cn/ubuntu/ focal-security main restricted universe multiverse # deb-src https://mirrors.ustc.edu.cn/ubuntu/ focal-security main restricted universe multiverse
deb http://mirrors.ustc.edu.cn/ubuntu/ focal-updates main restricted universe multiverse # deb-src https://mirrors.ustc.edu.cn/ubuntu/ focal-updates main restricted universe multiverse
deb http://mirrors.ustc.edu.cn/ubuntu/ focal-backports main restricted universe multiverse # deb-src https://mirrors.ustc.edu.cn/ubuntu/ focal-backports main restricted universe multiverse
# 预发布软件源,不建议启用 # deb https://mirrors.ustc.edu.cn/ubuntu/ focal-proposed main restricted universe multiverse # deb-src https://mirrors.ustc.edu.cn/ubuntu/ focal-proposed main restricted universe multiverse