{"id":52960,"date":"2025-08-11T21:59:18","date_gmt":"2025-08-11T13:59:18","guid":{"rendered":"https:\/\/www.wsisp.com\/helps\/52960.html"},"modified":"2025-08-11T21:59:18","modified_gmt":"2025-08-11T13:59:18","slug":"autudl%e5%8d%8e%e4%b8%ba%e6%98%87%e8%85%be%e7%b3%bb%e5%88%97npu%e7%ae%80%e4%bb%8b%e5%92%8c%e9%83%a8%e7%bd%b2%e6%8e%a8%e7%90%86yolo11-yolov8-yolov5%e5%88%86%e5%89%b2%e6%a8%a1%e5%9e%8b","status":"publish","type":"post","link":"https:\/\/www.wsisp.com\/helps\/52960.html","title":{"rendered":"Autudl\u534e\u4e3a\u6607\u817e\u7cfb\u5217NPU\u7b80\u4ecb\u548c\u90e8\u7f72\u63a8\u7406yolo11 yolov8 yolov5\u5206\u5272\u6a21\u578b"},"content":{"rendered":"<h3>0.\u914d\u7f6eAutudl<\/h3>\n<p>\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u4e0b\u9762\u56fe\u7247\u662f\u6211\u6240\u79df\u7684\u6607\u817e\u5361\u548c\u5177\u4f53\u73af\u5883\u7248\u672c&#xff0c;\u592a\u5177\u4f53\u7684\u5c31\u4e0d\u8bf4\u4e86&#xff0c;\u6709\u9700\u8981\u7684\u8bdd\u6211\u5355\u72ec\u51fa\u4e00\u671fAutudl\u79df\u663e\u5361\u7684\u6559\u7a0b&#xff0c;\u4e3b\u8981\u662f\u4e3a\u4e86\u5b66\u4e60\u6607\u817e\u73af\u5883\u5982\u4f55\u8fd0\u884cYolo\u7cfb\u5217\u6a21\u578b\u3002<\/p>\n<p><img decoding=\"async\" src=\"https:\/\/www.wsisp.com\/helps\/wp-content\/uploads\/2025\/08\/20250811135915-6899f7339d14e.png\" \/><\/p>\n<h3>0.1\u534e\u4e3a\u6607\u817e\u82af\u7247&#xff08;Ascend&#xff09;\u7b80\u4ecb<\/h3>\n<h4>1.Ascend 310&#xff08;\u00a0\u8fb9\u7f18\u63a8\u7406 SoC&#xff09;<\/h4>\n<p>\u00a0 \u00a0 \u00a0\u5728\u4ec5 8\u202fW \u7684\u529f\u8017\u4e0b&#xff0c;\u652f\u6301 FP16 \u8fbe 8\u202fTOPS&#xff0c;INT8 \u8fbe 16\u202fTOPS\u3002 \u00a0 \u00a0 \u00a0 \u652f\u6301\u591a\u901a\u9053\u9ad8\u6e05\u89c6\u9891\u5904\u7406&#xff1a;\u6700\u9ad8\u53ef\u5b9e\u73b0 16 \u8def 1080p H.264\/H.265 \u89e3\u7801\u30011 \u8def 1080p \u89c6\u9891\u7f16\u7801&#xff0c;\u4ee5\u53ca JPEG\/PNG \u7f16\u89e3\u7801\u529f\u80fd\u3002 \u00a0 \u00a0 \u00a0 \u00a0 \u5185\u542b 2 \u4e2a Da\u202fVinci Max AI \u6838\u5fc3\u30018 \u4e2a ARM Cortex\u2011A55 CPU \u6838\u5fc3\u548c 8\u202fMB \u7247\u4e0a\u7f13\u5b58\u3002<\/p>\n<h4>2.Ascend 910&#xff08;\u6570\u636e\u4e2d\u5fc3\u7ea7\u8bad\u7ec3 NPU&#xff09;<\/h4>\n<p>\u00a0 \u00a0 \u00a0 \u00a0 \u4e8e 2019 \u5e74\u53d1\u5e03&#xff0c;\u4ee3\u53f7 Ascend\u2011Max&#xff0c;\u57fa\u4e8e Da\u202fVinci \u67b6\u6784&#xff0c;\u62e5\u6709 32 \u4e2a Da\u202fVinci Max AI \u6838\u5fc3&#xff0c;FP16 \u8fbe 256\u202fTFLOPS&#xff0c;INT8 \u8fbe 512\u202fTOPS\u3002 \u00a0 \u00a0 \u00a0 \u00a0 \u914d\u5907\u9ad8\u5e26\u5bbd\u4e92\u8054&#xff08;NoC Mesh 1024 \u4f4d&#xff0c;HBM2E \u5e26\u5bbd 1.2\u202fTB\/s&#xff0c;350\u202fW \u529f\u8017&#xff09;<\/p>\n<h4>3.Ascend 910C&#xff08;2025 \u5e74\u4e3b\u6253\u4ea7\u54c1&#xff09;<\/h4>\n<p>\u00a0 \u00a0 \u00a0 \u00a0 \u57fa\u4e8e SMIC \u7684 7\u202fnm (N&#043;2) \u5de5\u827a&#xff0c;\u91c7\u7528\u53cc 910B Die \u5c01\u88c5\u8bbe\u8ba1&#xff0c;\u62e5\u6709\u7ea6 53 \u4ebf\u6676\u4f53\u7ba1\u3002 \u00a0 \u00a0 \u00a0 \u00a0 \u5b9e\u73b0 FP16 \u7ea6 800\u202fTFLOPS \u6027\u80fd&#xff0c;\u642d\u8f7d\u9ad8\u8fbe 128\u202fGB \u7684 HBM3 \u5b58\u50a8&#xff08;\u9886\u5148\u4e8e NVIDIA H100 \u7684 80\u202fGB&#xff09;\u3002\u63a8\u6d4b\u6027\u80fd\u7ea6\u4e3a NVIDIA H100 \u63a8\u7406\u6027\u80fd\u7684 60%<\/p>\n<h3>0.2MindSpore\u6846\u67b6<\/h3>\n<p>\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0MindSpore \u662f\u534e\u4e3a\u81ea\u4e3b\u7814\u53d1\u7684\u5168\u573a\u666f AI \u8ba1\u7b97\u6846\u67b6&#xff0c;\u8986\u76d6\u4e91\u3001\u8fb9\u3001\u7aef\u591a\u79cd\u90e8\u7f72\u73af\u5883\u3002\u5411\u4e0a\u652f\u6301\u81ea\u7136\u8bed\u8a00\u5904\u7406\u3001\u8ba1\u7b97\u673a\u89c6\u89c9\u3001\u79d1\u5b66\u8ba1\u7b97\u7b49\u591a\u7c7b AI \u5e94\u7528&#xff0c;\u5411\u4e0b\u901a\u8fc7 CANN \u5bf9\u63a5\u6607\u817e AI \u5904\u7406\u5668&#xff0c;\u5b9e\u73b0\u8f6f\u786c\u4ef6\u534f\u540c\u4f18\u5316\u3002 \u00a0 \u00a0 \u00a0 \u00a0 \u00a0\u63d0\u4f9b\u7aef\u5230\u7aef\u7684\u6a21\u578b\u5f00\u53d1\u80fd\u529b&#xff0c;\u5305\u62ec\u6570\u636e\u5904\u7406\u3001\u6a21\u578b\u6784\u5efa\u3001\u8bad\u7ec3\u3001\u63a8\u7406\u4e0e\u90e8\u7f72&#xff0c;\u5185\u7f6e\u81ea\u52a8\u5e76\u884c\u3001\u7b97\u5b50\u878d\u5408\u3001\u6df7\u5408\u7cbe\u5ea6\u7b49\u4f18\u5316\u673a\u5236&#xff0c;\u663e\u8457\u63d0\u5347\u6027\u80fd\u4e0e\u7b97\u529b\u5229\u7528\u7387\u3002\u5176\u529f\u80fd\u5b9a\u4f4d\u7c7b\u4f3c\u4e8e TensorFlow \u6216 PyTorch&#xff0c;\u5728\u6607\u817e\u751f\u6001\u4e2d\u627f\u62c5\u6838\u5fc3 AI \u6846\u67b6\u7684\u89d2\u8272\u3002<\/p>\n<h3>0.3CANN\u8ba1\u7b97\u67b6\u6784&#xff08;Compute Architecture for Neural Networks&#xff09;<\/h3>\n<p>\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u534e\u4e3a\u9762\u5411AI\u573a\u666f\u63a8\u51fa\u7684\u5f02\u6784\u8ba1\u7b97\u67b6\u6784 CANN&#xff0c;\u4f5c\u4e3a\u6607\u817eAI\u5904\u7406\u5668\u7684\u8f6f\u4ef6\u9a71\u52a8\u4e0e\u7b97\u5b50\u52a0\u901f\u5e73\u53f0&#xff0c;\u5411\u4e0a\u517c\u5bb9\u591a\u79cd\u4e3b\u6d41AI\u6846\u67b6&#xff08;\u5982 MindSpore\u3001PyTorch\u3001TensorFlow \u7b49&#xff09;&#xff0c;\u5411\u4e0b\u5bf9\u63a5\u6607\u817e\u7cfb\u5217AI\u5904\u7406\u5668\u7684\u7b97\u5b50\u5e93\u3001\u8fd0\u884c\u65f6\u4e0e\u8c03\u5ea6\u5c42&#xff0c;\u53d1\u6325\u627f\u4e0a\u542f\u4e0b\u7684\u5173\u952e\u4f5c\u7528\u3002CANN \u5c4f\u853d\u4e86\u5e95\u5c42\u786c\u4ef6\u5dee\u5f02&#xff0c;\u4f7f\u5f00\u53d1\u8005\u80fd\u591f\u5728\u4e0d\u5173\u5fc3\u786c\u4ef6\u7ec6\u8282\u7684\u60c5\u51b5\u4e0b\u65e0\u7f1d\u4f7f\u7528\u4e3b\u6d41\u6df1\u5ea6\u5b66\u4e60\u6846\u67b6\u8fdb\u884c\u6a21\u578b\u5f00\u53d1\u4e0e\u90e8\u7f72\u3002<\/p>\n<p>\u00a0 \u00a0 \u00a0 \u00a0 \u540c\u65f6&#xff0c;CANN \u9488\u5bf9\u591a\u6837\u5316\u7684\u5e94\u7528\u573a\u666f\u63d0\u4f9b\u591a\u5c42\u6b21\u7f16\u7a0b\u63a5\u53e3&#xff08;\u5982\u7b97\u5b50\u7ea7\u3001\u56fe\u7ea7\u3001\u5e94\u7528\u7ea7&#xff09;&#xff0c;\u652f\u6301\u7528\u6237\u5feb\u901f\u6784\u5efa\u548c\u4f18\u5316\u57fa\u4e8e\u6607\u817e\u5e73\u53f0\u7684AI\u5e94\u7528\u4e0e\u4e1a\u52a1&#xff0c;\u663e\u8457\u63d0\u5347\u6a21\u578b\u7684\u6267\u884c\u6548\u7387\u4e0e\u786c\u4ef6\u7b97\u529b\u5229\u7528\u7387&#xff0c;\u5176\u529f\u80fd\u5b9a\u4f4d\u7c7b\u4f3c\u4e8e NVIDIA \u7684 CUDA \u5e73\u53f0\u3002<\/p>\n<p>\u603b\u7ed3&#xff1a;CANN\u7528\u6765\u5c4f\u853d\u5e95\u5c42\u786c\u4ef6\u5dee\u5f02&#xff0c;\u4f7f\u5f97\u7528\u6237\u80fd\u591f\u65e0\u7f1d\u4f7f\u7528Pytorch\u7b49\u4e3b\u6d41\u6df1\u5ea6\u5b66\u4e60\u6846\u67b6\u8fdb\u884c\u5f00\u53d1<\/p>\n<p> \u2f2f\u4f5c\u6d41\u7a0b&#xff1a;<br \/>\n ONNX<br \/>\n \u2192<br \/>\n  ATC \u8f6c\u6362<br \/>\n \u2192<br \/>\n  ACLruntime \u8c03\u2f64<br \/>\n \u2192 \u63a8\u7406\u7ed3\u679c<\/p>\n<h2>1\u914d\u7f6e\u73af\u5883<\/h2>\n<h3>1.1\u521b\u5efa\u4e00\u4e2ayolo\u73af\u5883\u5e76\u8fdb\u5165<\/h3>\n<p> conda create -n yolo11 python&#061;3.10<br \/>\nconda activate yolo11 <\/p>\n<h3>\u00a01.2\u5b89\u88c5yolo\u6240\u9700\u8981\u7684\u5e93<\/h3>\n<p> pip install ultralytics <\/p>\n<h3>1.3\u5b89\u88c5ais_bench\u63a8\u7406\u5de5\u5177&#xff0c;\u548caclruntime<\/h3>\n<p>\u6839\u636e\u81ea\u5df1\u4f7f\u7528\u7684python\u3001\u7248\u672c\u7cfb\u7edf\u548c\u67b6\u6784\u9009\u62e9\u5177\u4f53\u7684whl\u6587\u4ef6<\/p>\n<p><span class=\"link-card-box\"><span class=\"link-title\">tools: Ascend tools &#8211; Gitee.com<\/span><span class=\"link-link\"><img decoding=\"async\" alt=\" \" class=\"link-link-icon\" src=\"https:\/\/www.wsisp.com\/helps\/wp-content\/uploads\/2025\/08\/20250811135916-6899f734a1f62.png\" \/>https:\/\/gitee.com\/ascend\/tools\/tree\/master\/ais-bench_workload\/tool\/ais_bench#%E4%B8%8B%E8%BD%BDwhl%E5%8C%85%E5%AE%89%E8%A3%85<\/span><\/span><img decoding=\"async\" src=\"https:\/\/www.wsisp.com\/helps\/wp-content\/uploads\/2025\/08\/20250811135916-6899f734d23df.png\" \/><\/p>\n<p>\u00a0\u4e0b\u8f7d\u5230\u672c\u5730\u540e&#xff0c;\u5b89\u88c5\u4e24\u4e2awhl\u6587\u4ef6<\/p>\n<h4>1.3.1\u5b89\u88c5ais_bench<\/h4>\n<p>pip install ais_bench-0.0.2-py3-none-any.whl -i https:\/\/mirrors.tuna.tsinghua.edu.cn\/pypi\/web\/simple <\/p>\n<h4>1.3.2\u5b89\u88c5aclruntime<\/h4>\n<p>pip install aclruntime-0.0.2-cp310-cp310-linux_aarch64.whl <\/p>\n<p>\u540e\u9762\u542f\u52a8\u53ef\u80fd\u4f1a\u62a5\u4e00\u4e9b\u9519\u8bef&#xff0c;\u8ba9ai\u89e3\u51b3\u4e00\u4e0b&#xff0c;\u5f88\u597d\u89e3\u51b3<\/p>\n<p>NEW_LIB&#061;~\/autodl-tmp\/libstdcpp_arm64\/usr\/lib\/aarch64-linux-gnu\/libstdc&#043;&#043;.so.6<br \/>\nln -sf \/usr\/lib\/aarch64-linux-gnu\/libstdc&#043;&#043;.so.6 \/root\/miniconda3\/lib\/libstdc&#043;&#043;.so.6 <\/p>\n<h2>2\u51c6\u5907\u6e90\u7801\u548c\u6a21\u578b<\/h2>\n<p>\u6a21\u578b\u53ef\u4ee5\u662f\u81ea\u5df1\u7684&#xff0c;\u4e5f\u53ef\u4ee5\u4ece\u5b98\u7f51\u4e0a\u4e0b\u8f7d<\/p>\n<h3>2.1\u4e0b\u8f7d\u6e90\u7801<\/h3>\n<p>https:\/\/github.com\/ultralytics\/ultralytics<\/p>\n<h3>2.2\u4e0b\u8f7d\u6a21\u578b\u6743\u91cd<\/h3>\n<p>Ultralytics YOLO11 &#8211; Ultralytics YOLO \u6587\u6863<\/p>\n<h2>3\u8f6c\u6362\u6a21\u578b<\/h2>\n<h3>3.1\u5c06.pt\u683c\u5f0f\u8f6c\u6362\u4e3a.ONNX\u683c\u5f0f<\/h3>\n<p>from ultralytics import YOLO<\/p>\n<p># Load a model<br \/>\nmodel &#061; YOLO(r&#034;\/home\/ultralytics-main\/runs\/train\/yolo11n\/weights\/best.pt&#034;)<\/p>\n<p># Export onnx,\u4e00\u5b9a\u8981\u8bbe\u7f6eopset&#061;11 \u4e0d\u7136\u4f1a\u62a5\u9519<br \/>\nmodel.export(format&#061;&#034;onnx&#034;,opset&#061;11) <\/p>\n<h4>3.1.1\u67e5\u770bONNX\u2f79\u7edc\u7ed3\u6784\u548c\u8f93\u2f0a\u8f93\u51fashape<\/h4>\n<p>Netron<\/p>\n<p>\u8fdb\u5165\u7f51\u7ad9\u540e&#xff0c;\u4e0a\u4f20ONNX\u6a21\u578b\u6587\u4ef6<\/p>\n<p><img decoding=\"async\" src=\"https:\/\/www.wsisp.com\/helps\/wp-content\/uploads\/2025\/08\/20250811135917-6899f7357e31f.png\" \/><\/p>\n<h3>3.2\u5c06.ONNX\u683c\u5f0f\u8f6c\u6362\u4e3a.om\u683c\u5f0f\u00a0<\/h3>\n<p>atc \\\\<br \/>\n  &#8211;model&#061;\/root\/autodl-tmp\/ultralytics-main\/runs\/train\/exp4\/weights\/best.onnx \\\\<br \/>\n  &#8211;framework&#061;5 \\\\<br \/>\n  &#8211;output&#061;yolo11s \\\\<br \/>\n  &#8211;input_format&#061;NCHW \\\\<br \/>\n  &#8211;input_shape&#061;&#034;images:1,3,640,640&#034; \\\\<br \/>\n  &#8211;soc_version&#061;Ascend910B2 <\/p>\n<p>\u53c2\u6570\u5206\u522b\u662f&#xff1a; &#8211;model \u9700\u8981\u8f6c\u6362\u6a21\u578b\u7684\u8def\u5f84<\/p>\n<p>&#8211;framework \u6846\u67b6&#xff0c;ONNX\u5c31\u75285<\/p>\n<p>&#8211;output \u00a0\u8f93\u51fa.om\u6587\u4ef6\u7684\u540d\u5b57<\/p>\n<p>&#8211;input_shape \u6a21\u578b\u7684\u8f93\u5165shape&#xff0c;640 640\u662f\u8bad\u7ec3\u65f6\u56fe\u7247\u7684\u5927\u5c0f&#xff0c;\u6839\u636e\u9700\u8981\u4fee\u6539<\/p>\n<p>&#8211;soc_version NPU\u7684\u578b\u53f7<\/p>\n<p>\u6839\u636e\u4e0b\u9762\u7684\u6307\u4ee4\u67e5\u8be2npu\u578b\u53f7\u540e&#xff0c;\u66ff\u6362910B2<\/p>\n<p>npu-smi info   #\u7528\u8fd9\u4e2a\u6307\u4ee4\u53ef\u4ee5\u67e5\u8be2npu\u578b\u53f7 <\/p>\n<h3>4.\u63a8\u7406\u6e90\u7801<\/h3>\n<p>\u5728ultralytics\u8def\u5f84\u4e0b\u521b\u5efarun_seg.py<\/p>\n<p>\u5c06\u4e0b\u8ff0\u4ee3\u7801\u590d\u5236\u8fdb\u53bb<\/p>\n<p>import argparse<br \/>\nimport time<br \/>\nimport cv2<br \/>\nimport numpy as np<br \/>\nimport os<\/p>\n<p>from ais_bench.infer.interface import InferSession<\/p>\n<p>class YOLO:<br \/>\n    &#034;&#034;&#034;YOLO segmentation model class for handling inference&#034;&#034;&#034;<\/p>\n<p>    def __init__(self, om_model, imgsz&#061;(640, 640), device_id&#061;0, model_ndtype&#061;np.single, mode&#061;&#034;static&#034;, postprocess_type&#061;&#034;v8&#034;, aipp&#061;False):<br \/>\n        &#034;&#034;&#034;<br \/>\n        Initialization.<\/p>\n<p>        Args:<br \/>\n            om_model (str): Path to the om model.<br \/>\n        &#034;&#034;&#034;<\/p>\n<p>        # \u6784\u5efaais_bench\u63a8\u7406\u5f15\u64ce<br \/>\n        self.session &#061; InferSession(device_id&#061;device_id, model_path&#061;om_model)<\/p>\n<p>        # Numpy dtype: support both FP32(np.single) and FP16(np.half) om model<br \/>\n        self.ndtype &#061; model_ndtype<br \/>\n        self.mode &#061; mode<br \/>\n        self.postprocess_type &#061; postprocess_type<br \/>\n        self.aipp &#061; aipp <\/p>\n<p>        self.model_height, self.model_width &#061; imgsz[0], imgsz[1]  # \u56fe\u50cfresize\u5927\u5c0f<\/p>\n<p>    def __call__(self, im0, conf_threshold&#061;0.4, iou_threshold&#061;0.45):<br \/>\n        &#034;&#034;&#034;<br \/>\n        The whole pipeline: pre-process -&gt; inference -&gt; post-process.<\/p>\n<p>        Args:<br \/>\n            im0 (Numpy.ndarray): original input image.<br \/>\n            conf_threshold (float): confidence threshold for filtering predictions.<br \/>\n            iou_threshold (float): iou threshold for NMS.<\/p>\n<p>        Returns:<br \/>\n            boxes (List): list of bounding boxes.<br \/>\n        &#034;&#034;&#034;<br \/>\n        # \u524d\u5904\u7406Pre-process<br \/>\n        t1 &#061; time.time()<br \/>\n        im, ratio, (pad_w, pad_h) &#061; self.preprocess(im0)<br \/>\n        pre_time &#061; round(time.time() &#8211; t1, 3)<\/p>\n<p>        # \u63a8\u7406 inference<br \/>\n        t2 &#061; time.time()<br \/>\n        preds &#061; self.session.infer([im], mode&#061;self.mode)  # mode\u6709\u52a8\u6001&#034;dymshape&#034;\u548c\u9759\u6001&#034;static&#034;\u7b49<br \/>\n        det_time &#061; round(time.time() &#8211; t2, 3)<\/p>\n<p>        # \u540e\u5904\u7406Post-process<br \/>\n        t3 &#061; time.time()<br \/>\n        if self.postprocess_type &#061;&#061; &#034;v5&#034;:<br \/>\n            boxes, segments, masks &#061; self.postprocess_v5(preds,<br \/>\n                                    im0&#061;im0,<br \/>\n                                    ratio&#061;ratio,<br \/>\n                                    pad_w&#061;pad_w,<br \/>\n                                    pad_h&#061;pad_h,<br \/>\n                                    conf_threshold&#061;conf_threshold,<br \/>\n                                    iou_threshold&#061;iou_threshold,<br \/>\n                                    )<\/p>\n<p>        elif self.postprocess_type &#061;&#061; &#034;v8&#034;:<br \/>\n            boxes, segments, masks &#061; self.postprocess_v8(preds,<br \/>\n                                    im0&#061;im0,<br \/>\n                                    ratio&#061;ratio,<br \/>\n                                    pad_w&#061;pad_w,<br \/>\n                                    pad_h&#061;pad_h,<br \/>\n                                    conf_threshold&#061;conf_threshold,<br \/>\n                                    iou_threshold&#061;iou_threshold,<br \/>\n                                    )<\/p>\n<p>        else:<br \/>\n            boxes&#061;[], segments&#061;[], masks&#061;[]<\/p>\n<p>        post_time &#061; round(time.time() &#8211; t3, 3)<\/p>\n<p>        return boxes, segments, masks, (pre_time, det_time, post_time)<\/p>\n<p>    # \u524d\u5904\u7406&#xff0c;\u5305\u62ec&#xff1a;resize, pad, \u5176\u4e2dHWC to CHW&#xff0c;BGR to RGB&#xff0c;\u5f52\u4e00\u5316&#xff0c;\u589e\u52a0\u7ef4\u5ea6CHW -&gt; BCHW\u53ef\u9009\u62e9\u662f\u5426\u5f00\u542fAIPP\u52a0\u901f\u5904\u7406<br \/>\n    def preprocess(self, img):<br \/>\n        &#034;&#034;&#034;<br \/>\n        Pre-processes the input image.<\/p>\n<p>        Args:<br \/>\n            img (Numpy.ndarray): image about to be processed.<\/p>\n<p>        Returns:<br \/>\n            img_process (Numpy.ndarray): image preprocessed for inference.<br \/>\n            ratio (tuple): width, height ratios in letterbox.<br \/>\n            pad_w (float): width padding in letterbox.<br \/>\n            pad_h (float): height padding in letterbox.<br \/>\n        &#034;&#034;&#034;<br \/>\n        # Resize and pad input image using letterbox() (Borrowed from Ultralytics)<br \/>\n        shape &#061; img.shape[:2]  # original image shape<br \/>\n        new_shape &#061; (self.model_height, self.model_width)<br \/>\n        r &#061; min(new_shape[0] \/ shape[0], new_shape[1] \/ shape[1])<br \/>\n        ratio &#061; r, r<br \/>\n        new_unpad &#061; int(round(shape[1] * r)), int(round(shape[0] * r))<br \/>\n        pad_w, pad_h &#061; (new_shape[1] &#8211; new_unpad[0]) \/ 2, (new_shape[0] &#8211; new_unpad[1]) \/ 2  # wh padding<br \/>\n        if shape[::-1] !&#061; new_unpad:  # resize<br \/>\n            img &#061; cv2.resize(img, new_unpad, interpolation&#061;cv2.INTER_LINEAR)<\/p>\n<p>        top, bottom &#061; int(round(pad_h &#8211; 0.1)), int(round(pad_h &#043; 0.1))<br \/>\n        left, right &#061; int(round(pad_w &#8211; 0.1)), int(round(pad_w &#043; 0.1))<br \/>\n        img &#061; cv2.copyMakeBorder(img, top, bottom, left, right, cv2.BORDER_CONSTANT, value&#061;(114, 114, 114))  # \u586b\u5145<\/p>\n<p>        # \u662f\u5426\u5f00\u542faipp\u52a0\u901f\u9884\u5904\u7406&#xff0c;\u9700atc\u4e2d\u5b8c\u6210<br \/>\n        if self.aipp:<br \/>\n            return img, ratio, (pad_w, pad_h)<\/p>\n<p>        # Transforms: HWC to CHW -&gt; BGR to RGB -&gt; div(255) -&gt; contiguous -&gt; add axis(optional)<br \/>\n        img &#061; np.ascontiguousarray(np.einsum(&#039;HWC-&gt;CHW&#039;, img)[::-1], dtype&#061;self.ndtype) \/ 255.0<br \/>\n        img_process &#061; img[None] if len(img.shape) &#061;&#061; 3 else img<br \/>\n        return img_process, ratio, (pad_w, pad_h)<\/p>\n<p>    # YOLOv5\/6\/7\u901a\u7528\u540e\u5904\u7406&#xff0c;\u5305\u62ec&#xff1a;\u9608\u503c\u8fc7\u6ee4\u4e0eNMS&#043;masks\u5904\u7406<br \/>\n    def postprocess_v5(self, preds, im0, ratio, pad_w, pad_h, conf_threshold, iou_threshold, nm&#061;32):<br \/>\n        &#034;&#034;&#034;<br \/>\n        Post-process the prediction.<\/p>\n<p>        Args:<br \/>\n            preds (Numpy.ndarray): predictions come from ort.session.run().<br \/>\n            im0 (Numpy.ndarray): [h, w, c] original input image.<br \/>\n            ratio (tuple): width, height ratios in letterbox.<br \/>\n            pad_w (float): width padding in letterbox.<br \/>\n            pad_h (float): height padding in letterbox.<br \/>\n            conf_threshold (float): conf threshold.<br \/>\n            iou_threshold (float): iou threshold.<br \/>\n            nm (int): the number of masks.<\/p>\n<p>        Returns:<br \/>\n            boxes (List): list of bounding boxes.<br \/>\n            segments (List): list of segments.<br \/>\n            masks (np.ndarray): [N, H, W], output masks.<br \/>\n        &#034;&#034;&#034;<br \/>\n        # (Batch_size, Num_anchors, xywh_score_conf_cls), v5\u548cv6_1.0\u7684[&#8230;, 4]\u662f\u7f6e\u4fe1\u5ea6\u5206\u6570&#xff0c;v8v9\u91c7\u7528\u7c7b\u522b\u91cc\u9762\u6700\u5927\u7684\u6982\u7387\u4f5c\u4e3a\u7f6e\u4fe1\u5ea6score<br \/>\n        x, protos &#061; preds[0], preds[1]  # \u4e0ebbox\u533a\u522b&#xff1a;Two outputs: \u68c0\u6d4b\u5934\u7684\u8f93\u51fa(1, 8400*3, 117), \u5206\u5272\u5934\u7684\u8f93\u51fa(1, 32, 160, 160)<\/p>\n<p>        # Predictions filtering by conf-threshold<br \/>\n        x &#061; x[x[&#8230;, 4] &gt; conf_threshold]<\/p>\n<p>        # Create a new matrix which merge these(box, score, cls, nm) into one<br \/>\n        # For more details about &#096;numpy.c_()&#096;: https:\/\/numpy.org\/doc\/1.26\/reference\/generated\/numpy.c_.html<br \/>\n        x &#061; np.c_[x[&#8230;, :4], x[&#8230;, 4], np.argmax(x[&#8230;, 5:-nm], axis&#061;-1), x[&#8230;, -nm:]]<\/p>\n<p>        # NMS filtering<br \/>\n        # \u7ecf\u8fc7NMS\u540e\u7684\u503c, np.array([[x, y, w, h, conf, cls, nm], &#8230;]), shape&#061;(-1, 4 &#043; 1 &#043; 1 &#043; 32)<br \/>\n        x &#061; x[cv2.dnn.NMSBoxes(x[:, :4], x[:, 4], conf_threshold, iou_threshold)]<\/p>\n<p>        # \u91cd\u65b0\u7f29\u653e\u8fb9\u754c\u6846&#xff0c;\u4e3a\u753b\u56fe\u505a\u51c6\u5907<br \/>\n        if len(x) &gt; 0:<br \/>\n            # Bounding boxes format change: cxcywh -&gt; xyxy<br \/>\n            x[&#8230;, [0, 1]] -&#061; x[&#8230;, [2, 3]] \/ 2<br \/>\n            x[&#8230;, [2, 3]] &#043;&#061; x[&#8230;, [0, 1]]<\/p>\n<p>            # Rescales bounding boxes from model shape(model_height, model_width) to the shape of original image<br \/>\n            x[&#8230;, :4] -&#061; [pad_w, pad_h, pad_w, pad_h]<br \/>\n            x[&#8230;, :4] \/&#061; min(ratio)<\/p>\n<p>            # Bounding boxes boundary clamp<br \/>\n            x[&#8230;, [0, 2]] &#061; x[:, [0, 2]].clip(0, im0.shape[1])<br \/>\n            x[&#8230;, [1, 3]] &#061; x[:, [1, 3]].clip(0, im0.shape[0])<\/p>\n<p>            # \u4e0ebbox\u533a\u522b&#xff1a;\u589e\u52a0masks\u5904\u7406<br \/>\n            # Process masks<br \/>\n            masks &#061; self.process_mask(protos[0], x[:, 6:], x[:, :4], im0.shape)<br \/>\n            # Masks -&gt; Segments(contours)<br \/>\n            segments &#061; self.masks2segments(masks)<\/p>\n<p>            return x[&#8230;, :6], segments, masks  # boxes, segments, masks<br \/>\n        else:<br \/>\n            return [], [], []<\/p>\n<p>    def postprocess_v8(self, preds, im0, ratio, pad_w, pad_h, conf_threshold, iou_threshold):<br \/>\n        x, protos &#061; preds[0], preds[1]  # x: (1, 37, 8400), protos: (1, 32, 160, 160)<\/p>\n<p>        # \u7edf\u4e00\u4e3a (8400, 37)<br \/>\n        if x.ndim &#061;&#061; 3:<br \/>\n            if x.shape[1] &lt; x.shape[2]:    # (1, 37, 8400)<br \/>\n                x &#061; np.einsum(&#039;bcn-&gt;bnc&#039;, x)[0]  # -&gt; (8400, 37)<br \/>\n            else:                          # (1, 8400, 37)<br \/>\n                x &#061; x[0]<br \/>\n        else:<br \/>\n            raise ValueError(f&#039;unexpected pred0 shape: {x.shape}&#039;)<\/p>\n<p>        # \u52a8\u6001\u786e\u5b9a nm&#xff08;\u6b64\u6a21\u578b\u5c31\u662f 32&#xff09;<br \/>\n        nm &#061; protos.shape[1] if (protos.ndim &#061;&#061; 4 and protos.shape[1] in (16, 32, 64)) else 32<\/p>\n<p>        # \u53d6\u7c7b\u522b\u5206\u652f\u5e76\u6309\u9700\u505a sigmoid<br \/>\n        cls_blob &#061; x[:, 4:-nm]  # \u7ef4\u5ea6\u5e94\u4e3a (8400, num_classes)&#xff1b;\u4f60\u7684\u6a21\u578b num_classes&#061;1<br \/>\n        if cls_blob.size and (cls_blob.max() &gt; 1 or cls_blob.min() &lt; 0):<br \/>\n            cls_scores &#061; 1.0 \/ (1.0 &#043; np.exp(-cls_blob))<br \/>\n        else:<br \/>\n            cls_scores &#061; cls_blob<\/p>\n<p>        if cls_scores.size &#061;&#061; 0:<br \/>\n            return [], [], []<\/p>\n<p>        scores &#061; cls_scores.max(axis&#061;-1)<br \/>\n        clses  &#061; cls_scores.argmax(axis&#061;-1)<\/p>\n<p>        keep &#061; scores &gt; conf_threshold<br \/>\n        if not np.any(keep):<br \/>\n            return [], [], []<\/p>\n<p>        # \u62fc\u63a5\u4e3a\u7edf\u4e00\u6570\u7ec4<br \/>\n        x &#061; np.c_[x[keep, :4], scores[keep], clses[keep], x[keep, -nm:]]  # [cx,cy,w,h,score,cls, 32-vec]<\/p>\n<p>        # &#061;&#061;&#061; NMS \u524d\u5148\u628a cx,cy,w,h -&gt; x,y,w,h&#xff08;\u5de6\u4e0a&#043;\u5bbd\u9ad8&#xff09;&#061;&#061;&#061;<br \/>\n        tlwh &#061; np.c_[x[:, 0] &#8211; x[:, 2] \/ 2,<br \/>\n                    x[:, 1] &#8211; x[:, 3] \/ 2,<br \/>\n                    x[:, 2],<br \/>\n                    x[:, 3]]<\/p>\n<p>        idxs &#061; cv2.dnn.NMSBoxes(tlwh.tolist(), x[:, 4].astype(float).tolist(),<br \/>\n                                conf_threshold, iou_threshold)<br \/>\n        if len(idxs) &#061;&#061; 0:<br \/>\n            return [], [], []<br \/>\n        idxs &#061; np.array(idxs).reshape(-1)<br \/>\n        x &#061; x[idxs]<\/p>\n<p>        # \u8f6c\u56de xyxy \u5e76\u6620\u5c04\u56de\u539f\u56fe<br \/>\n        x[&#8230;, [0, 1]] -&#061; x[&#8230;, [2, 3]] \/ 2<br \/>\n        x[&#8230;, [2, 3]] &#043;&#061; x[&#8230;, [0, 1]]<\/p>\n<p>        x[:, :4] -&#061; [pad_w, pad_h, pad_w, pad_h]<br \/>\n        x[:, :4] \/&#061; min(ratio)<br \/>\n        x[:, [0, 2]] &#061; x[:, [0, 2]].clip(0, im0.shape[1])<br \/>\n        x[:, [1, 3]] &#061; x[:, [1, 3]].clip(0, im0.shape[0])<\/p>\n<p>        # \u5904\u7406 mask<br \/>\n        masks &#061; self.process_mask(protos[0], x[:, 6:], x[:, :4], im0.shape)<br \/>\n        segments &#061; self.masks2segments(masks)<br \/>\n        return x[:, :6], segments, masks<\/p>\n<p>    &#064;staticmethod<br \/>\n    def masks2segments(masks):<br \/>\n        &#034;&#034;&#034;<br \/>\n        It takes a list of masks(n,h,w) and returns a list of segments(n,xy) (Borrowed from<br \/>\n        https:\/\/github.com\/ultralytics\/ultralytics\/blob\/465df3024f44fa97d4fad9986530d5a13cdabdca\/ultralytics\/utils\/ops.py#L750)<\/p>\n<p>        Args:<br \/>\n            masks (numpy.ndarray): the output of the model, which is a tensor of shape (batch_size, 160, 160).<\/p>\n<p>        Returns:<br \/>\n            segments (List): list of segment masks.<br \/>\n        &#034;&#034;&#034;<br \/>\n        segments &#061; []<br \/>\n        for x in masks.astype(&#039;uint8&#039;):<br \/>\n            c &#061; cv2.findContours(x, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_NONE)[0]  # CHAIN_APPROX_SIMPLE  \u8be5\u51fd\u6570\u7528\u4e8e\u67e5\u627e\u4e8c\u503c\u56fe\u50cf\u4e2d\u7684\u8f6e\u5ed3\u3002<br \/>\n            if c:<br \/>\n                # \u8fd9\u6bb5\u4ee3\u7801\u7684\u76ee\u7684\u662f\u627e\u5230\u56fe\u50cfx\u4e2d\u7684\u6700\u5916\u5c42\u8f6e\u5ed3&#xff0c;\u5e76\u4ece\u4e2d\u9009\u62e9\u6700\u957f\u7684\u8f6e\u5ed3&#xff0c;\u7136\u540e\u5c06\u5176\u8f6c\u6362\u4e3aNumPy\u6570\u7ec4\u7684\u5f62\u5f0f\u3002<br \/>\n                c &#061; np.array(c[np.array([len(x) for x in c]).argmax()]).reshape(-1, 2)<br \/>\n            else:<br \/>\n                c &#061; np.zeros((0, 2))  # no segments found<br \/>\n            segments.append(c.astype(&#039;float32&#039;))<br \/>\n        return segments<\/p>\n<p>    def process_mask(self, protos, masks_in, bboxes, im0_shape):<br \/>\n        c, mh, mw &#061; protos.shape<br \/>\n        masks &#061; np.matmul(masks_in, protos.reshape((c, -1))).reshape((-1, mh, mw))  # [n, mh, mw]<\/p>\n<p>        # \u2605 \u5148 sigmoid&#xff0c;\u628a\u7ebf\u6027\u53e0\u52a0\u7684 logits \u8f6c\u6210\u6982\u7387<br \/>\n        masks &#061; 1.0 \/ (1.0 &#043; np.exp(-masks))<\/p>\n<p>        masks &#061; np.transpose(masks, (1, 2, 0))                  # -&gt; [mh, mw, n]<br \/>\n        masks &#061; np.ascontiguousarray(masks)<br \/>\n        masks &#061; self.scale_mask(masks, im0_shape)               # -&gt; [H, W, n]<br \/>\n        masks &#061; np.einsum(&#039;HWN -&gt; NHW&#039;, masks)                  # -&gt; [n, H, W]<br \/>\n        masks &#061; self.crop_mask(masks, bboxes)                   # \u88c1\u526a\u5230\u6846\u5185<br \/>\n        return masks &gt; 0.5<\/p>\n<p>    &#064;staticmethod<br \/>\n    def scale_mask(masks, im0_shape, ratio_pad&#061;None):<br \/>\n        &#034;&#034;&#034;<br \/>\n        Takes a mask, and resizes it to the original image size. (Borrowed from<br \/>\n        https:\/\/github.com\/ultralytics\/ultralytics\/blob\/465df3024f44fa97d4fad9986530d5a13cdabdca\/ultralytics\/utils\/ops.py#L305)<\/p>\n<p>        Args:<br \/>\n            masks (np.ndarray): resized and padded masks\/images, [h, w, num]\/[h, w, 3].<br \/>\n            im0_shape (tuple): the original image shape.<br \/>\n            ratio_pad (tuple): the ratio of the padding to the original image.<\/p>\n<p>        Returns:<br \/>\n            masks (np.ndarray): The masks that are being returned.<br \/>\n        &#034;&#034;&#034;<br \/>\n        im1_shape &#061; masks.shape[:2]<br \/>\n        if ratio_pad is None:  # calculate from im0_shape<br \/>\n            gain &#061; min(im1_shape[0] \/ im0_shape[0], im1_shape[1] \/ im0_shape[1])  # gain  &#061; old \/ new<br \/>\n            pad &#061; (im1_shape[1] &#8211; im0_shape[1] * gain) \/ 2, (im1_shape[0] &#8211; im0_shape[0] * gain) \/ 2  # wh padding<br \/>\n        else:<br \/>\n            pad &#061; ratio_pad[1]<\/p>\n<p>        # Calculate tlbr of mask<br \/>\n        top, left &#061; int(round(pad[1] &#8211; 0.1)), int(round(pad[0] &#8211; 0.1))  # y, x<br \/>\n        bottom, right &#061; int(round(im1_shape[0] &#8211; pad[1] &#043; 0.1)), int(round(im1_shape[1] &#8211; pad[0] &#043; 0.1))<br \/>\n        if len(masks.shape) &lt; 2:<br \/>\n            raise ValueError(f&#039;&#034;len of masks shape&#034; should be 2 or 3, but got {len(masks.shape)}&#039;)<br \/>\n        masks &#061; masks[top:bottom, left:right]<br \/>\n        masks &#061; cv2.resize(masks, (im0_shape[1], im0_shape[0]),<br \/>\n                           interpolation&#061;cv2.INTER_LINEAR)  # INTER_CUBIC would be better<br \/>\n        if len(masks.shape) &#061;&#061; 2:<br \/>\n            masks &#061; masks[:, :, None]<br \/>\n        return masks<\/p>\n<p>    &#064;staticmethod<br \/>\n    def crop_mask(masks, boxes):<br \/>\n        &#034;&#034;&#034;<br \/>\n        It takes a mask and a bounding box, and returns a mask that is cropped to the bounding box. (Borrowed from<br \/>\n        https:\/\/github.com\/ultralytics\/ultralytics\/blob\/465df3024f44fa97d4fad9986530d5a13cdabdca\/ultralytics\/utils\/ops.py#L599)<\/p>\n<p>        Args:<br \/>\n            masks (Numpy.ndarray): [n, h, w] tensor of masks.<br \/>\n            boxes (Numpy.ndarray): [n, 4] tensor of bbox coordinates in relative point form.<\/p>\n<p>        Returns:<br \/>\n            (Numpy.ndarray): The masks are being cropped to the bounding box.<br \/>\n        &#034;&#034;&#034;<br \/>\n        n, h, w &#061; masks.shape<br \/>\n        x1, y1, x2, y2 &#061; np.split(boxes[:, :, None], 4, 1)<br \/>\n        r &#061; np.arange(w, dtype&#061;x1.dtype)[None, None, :]<br \/>\n        c &#061; np.arange(h, dtype&#061;x1.dtype)[None, :, None]<br \/>\n        return masks * ((r &gt;&#061; x1) * (r &lt; x2) * (c &gt;&#061; y1) * (c &lt; y2))<\/p>\n<p>if __name__ &#061;&#061; &#039;__main__&#039;:<br \/>\n    # Create an argument parser to handle command-line arguments<br \/>\n    parser &#061; argparse.ArgumentParser()<br \/>\n    parser.add_argument(&#039;&#8211;seg_model&#039;, type&#061;str, default&#061;r&#034;yolov8s-seg.om&#034;, help&#061;&#039;Path to OM model&#039;)<br \/>\n    parser.add_argument(&#039;&#8211;source&#039;, type&#061;str, default&#061;r&#039;images&#039;, help&#061;&#039;Path to input image&#039;)<br \/>\n    parser.add_argument(&#039;&#8211;out_path&#039;, type&#061;str, default&#061;r&#039;results&#039;, help&#061;&#039;\u7ed3\u679c\u4fdd\u5b58\u6587\u4ef6\u5939&#039;)<br \/>\n    parser.add_argument(&#039;&#8211;imgsz_seg&#039;, type&#061;tuple, default&#061;(640, 640), help&#061;&#039;Image input size&#039;)<br \/>\n    parser.add_argument(&#039;&#8211;classes&#039;, type&#061;list, default&#061;[&#039;person&#039;, &#039;bicycle&#039;, &#039;car&#039;, &#039;motorcycle&#039;, &#039;airplane&#039;, &#039;bus&#039;, &#039;train&#039;, &#039;truck&#039;, &#039;boat&#039;, &#039;traffic light&#039;,<br \/>\n            &#039;fire hydrant&#039;, &#039;stop sign&#039;, &#039;parking meter&#039;, &#039;bench&#039;, &#039;bird&#039;, &#039;cat&#039;, &#039;dog&#039;, &#039;horse&#039;, &#039;sheep&#039;, &#039;cow&#039;,<br \/>\n              &#039;elephant&#039;, &#039;bear&#039;, &#039;zebra&#039;, &#039;giraffe&#039;, &#039;backpack&#039;, &#039;umbrella&#039;, &#039;handbag&#039;, &#039;tie&#039;, &#039;suitcase&#039;, &#039;frisbee&#039;,<br \/>\n                &#039;skis&#039;, &#039;snowboard&#039;, &#039;sports ball&#039;, &#039;kite&#039;, &#039;baseball bat&#039;, &#039;baseball glove&#039;, &#039;skateboard&#039;, &#039;surfboard&#039;,<br \/>\n                  &#039;tennis racket&#039;, &#039;bottle&#039;, &#039;wine glass&#039;, &#039;cup&#039;, &#039;fork&#039;, &#039;knife&#039;, &#039;spoon&#039;, &#039;bowl&#039;, &#039;banana&#039;, &#039;apple&#039;, &#039;sandwich&#039;,<br \/>\n                    &#039;orange&#039;, &#039;broccoli&#039;, &#039;carrot&#039;, &#039;hot dog&#039;, &#039;pizza&#039;, &#039;donut&#039;, &#039;cake&#039;, &#039;chair&#039;, &#039;couch&#039;, &#039;potted plant&#039;, &#039;bed&#039;,<br \/>\n                      &#039;dining table&#039;, &#039;toilet&#039;, &#039;tv&#039;, &#039;laptop&#039;, &#039;mouse&#039;, &#039;remote&#039;, &#039;keyboard&#039;, &#039;cell phone&#039;, &#039;microwave&#039;, &#039;oven&#039;,<br \/>\n                        &#039;toaster&#039;, &#039;sink&#039;, &#039;refrigerator&#039;, &#039;book&#039;, &#039;clock&#039;, &#039;vase&#039;, &#039;scissors&#039;, &#039;teddy bear&#039;, &#039;hair drier&#039;, &#039;toothbrush&#039;], help&#061;&#039;\u7c7b\u522b&#039;)<\/p>\n<p>    parser.add_argument(&#039;&#8211;conf&#039;, type&#061;float, default&#061;0.7, help&#061;&#039;Confidence threshold&#039;)<br \/>\n    parser.add_argument(&#039;&#8211;iou&#039;, type&#061;float, default&#061;0.7, help&#061;&#039;NMS IoU threshold&#039;)<br \/>\n    parser.add_argument(&#039;&#8211;device_id&#039;, type&#061;int, default&#061;0, help&#061;&#039;device id&#039;)<br \/>\n    parser.add_argument(&#039;&#8211;mode&#039;, default&#061;&#039;static&#039;, help&#061;&#039;om\u662f\u52a8\u6001dymshape\u6216\u9759\u6001static&#039;)<br \/>\n    parser.add_argument(&#039;&#8211;model_ndtype&#039;, default&#061;np.single, help&#061;&#039;om\u662ffp32\u6216fp16&#039;)<br \/>\n    parser.add_argument(&#039;&#8211;postprocess_type&#039;, type&#061;str, default&#061;&#039;v8&#039;, help&#061;&#039;\u540e\u5904\u7406\u65b9\u5f0f, \u5bf9\u5e94v5\/v8\u4e24\u79cd\u540e\u5904\u7406&#039;)<br \/>\n    parser.add_argument(&#039;&#8211;aipp&#039;, default&#061;False, action&#061;&#039;store_true&#039;, help&#061;&#039;\u662f\u5426\u5f00\u542faipp\u52a0\u901fYOLO\u9884\u5904\u7406, \u9700atc\u4e2d\u5b8c\u6210om\u96c6\u6210&#039;)<br \/>\n    args &#061; parser.parse_args()<\/p>\n<p>    # \u521b\u5efa\u7ed3\u679c\u4fdd\u5b58\u6587\u4ef6\u5939<br \/>\n    if not os.path.exists(args.out_path):<br \/>\n        os.mkdir(args.out_path)<\/p>\n<p>    print(&#039;\u5f00\u59cb\u8fd0\u884c&#xff1a;&#039;)<br \/>\n    # Build model<br \/>\n    seg_model &#061; YOLO(args.seg_model, args.imgsz_seg, args.device_id, args.model_ndtype, args.mode, args.postprocess_type, args.aipp)<br \/>\n    color_palette &#061; np.random.uniform(0, 255, size&#061;(len(args.classes), 3))  # \u4e3a\u6bcf\u4e2a\u7c7b\u522b\u751f\u6210\u8c03\u8272\u677f<\/p>\n<p>    for i, img_name in enumerate(os.listdir(args.source)):<br \/>\n        try:<br \/>\n            t1 &#061; time.time()<br \/>\n            # Read image by OpenCV<br \/>\n            img &#061; cv2.imread(os.path.join(args.source, img_name))<\/p>\n<p>            # \u68c0\u6d4bInference<br \/>\n            boxes, segments, _ , (pre_time, det_time, post_time) &#061; seg_model(img, conf_threshold&#061;args.conf, iou_threshold&#061;args.iou)<br \/>\n            print(&#039;{}\/{} &#061;&#061;&gt;\u603b\u8017\u65f6\u95f4: {:.3f}s, \u5176\u4e2d, \u9884\u5904\u7406: {:.3f}s, \u63a8\u7406: {:.3f}s, \u540e\u5904\u7406: {:.3f}s, \u8bc6\u522b{}\u4e2a\u76ee\u6807&#039;.format(i&#043;1, len(os.listdir(args.source)), time.time() &#8211; t1, pre_time, det_time, post_time, len(boxes)))<\/p>\n<p>            # Draw rectangles and polygons<br \/>\n            im_canvas &#061; img.copy()<br \/>\n            # \u5728\u7ed8\u5236\u5faa\u73af\u91cc\u52a0\u5065\u58ee\u6027\u5224\u65ad<br \/>\n            for (*box, conf, cls_), segment in zip(boxes, segments):<br \/>\n                # segment \u53ef\u80fd\u4e3a\u7a7a\u6216\u5f88\u77ed&#xff1b;\u4e5f\u53ef\u80fd\u662f float&#xff0c;\u9700\u8981\u8f6c int32<br \/>\n                if segment is None or len(segment) &lt; 3:<br \/>\n                    continue<br \/>\n                seg &#061; np.round(segment).astype(np.int32).reshape(-1, 1, 2)  # -&gt; Nx1x2, int32<\/p>\n<p>                # \u5148\u753b\u8fb9&#xff0c;\u518d\u586b\u5145<br \/>\n                cv2.polylines(img, [seg], True, (255, 255, 255), 2)<br \/>\n                cv2.fillPoly(im_canvas, [seg], (255, 0, 0))<\/p>\n<p>                # \u753b bbox \u548c\u6807\u7b7e<br \/>\n                x1, y1, x2, y2 &#061; map(int, box[:4])<br \/>\n                cls_i &#061; int(cls_)<br \/>\n                cv2.rectangle(img, (x1, y1), (x2, y2), color_palette[cls_i], 1, cv2.LINE_AA)<br \/>\n                label &#061; args.classes[cls_i] if 0 &lt;&#061; cls_i &lt; len(args.classes) else f&#039;cls{cls_i}&#039;<br \/>\n                cv2.putText(img, f&#039;{label}: {conf:.3f}&#039;, (x1, max(0, y1 &#8211; 9)),<br \/>\n                            cv2.FONT_HERSHEY_SIMPLEX, 0.7, color_palette[cls_i], 2, cv2.LINE_AA)<\/p>\n<p>            # Mix image<br \/>\n            img &#061; cv2.addWeighted(im_canvas, 0.3, img, 0.7, 0)<\/p>\n<p>            cv2.imwrite(os.path.join(args.out_path, img_name), img)<\/p>\n<p>        except Exception as e:<br \/>\n            print(e)<\/p>\n<h3>5.\u542f\u52a8\u547d\u4ee4\u00a0<\/h3>\n<p>python ~\/autodl-tmp\/ultralytics-main\/run_seg.py   \\\\<br \/>\n&#8211;seg_model \/root\/autodl-tmp\/ultralytics-main\/yolo11n-seg1.om   \\\\<br \/>\n&#8211;source \/root\/autodl-tmp\/ultralytics-main\/test   \\\\<br \/>\n&#8211;classes [person]  \\\\<br \/>\n&#8211;postprocess_type v8 <\/p>\n<p>&#8211;classes \u662f\u6a21\u578b\u68c0\u6d4b\u6807\u7b7e\u540d<\/p>\n<p>&#8211;source \u63a8\u7406\u6587\u4ef6\u6240\u5728\u6587\u4ef6\u5939<\/p>\n<p>&#8211;det_model om\u6a21\u578b\u53c2\u6570\u8def\u5f84<\/p>\n<p>&#8211;postprocess_type yolov8 v9 v11\u90fd\u7528v8&#xff0c;\u4f7f\u7528\u7684yolov5\u7684\u8bdd\u6539\u4e3ayolov5<\/p>\n<p>\u00a0<\/p>\n","protected":false},"excerpt":{"rendered":"<p>\u6587\u7ae0\u6d4f\u89c8\u9605\u8bfb351\u6b21\u3002\u672c\u6587\u4ecb\u7ecd\u4e86\u5728\u6607\u817eAI\u5904\u7406\u5668\u4e0a\u90e8\u7f72YOLO\u7cfb\u5217\u6a21\u578b\u7684\u5b8c\u6574\u6d41\u7a0b\u3002\u9996\u5148\u6982\u8ff0\u4e86\u534e\u4e3a\u6607\u817e\u82af\u7247\uff08Ascend 310\/910\/910C\uff09\u548cMindSpore\u6846\u67b6\u7684\u7279\u6027\uff0c\u4ee5\u53caCANN\u8ba1\u7b97\u67b6\u6784\u7684\u4f5c\u7528\u3002\u7136\u540e\u8be6\u7ec6\u8bf4\u660e\u4e86\u73af\u5883\u914d\u7f6e\u6b65\u9aa4\uff1a1)\u521b\u5efaconda\u73af\u5883\u5e76\u5b89\u88c5\u4f9d\u8d56\u5e93\uff1b2)\u51c6\u5907YOLO\u6e90\u7801\u548c\u6a21\u578b\u6743\u91cd\uff1b3)\u6a21\u578b\u8f6c\u6362\u8fc7\u7a0b\uff08.pt\u2192.onnx\u2192.om\uff09\uff1b4)\u7f16\u5199\u63a8\u7406\u4ee3\u7801\uff0c\u5305\u62ec\u9884\u5904\u7406\u3001\u63a8\u7406\u548c\u540e\u5904\u7406\u6a21\u5757\uff1b5)\u8fd0\u884c\u63a8\u7406\u547d\u4ee4\u3002\u91cd\u70b9\u4ecb\u7ecd\u4e86\u5982\u4f55\u5229\u7528ais_bench\u5de5\u5177\u8fdb\u884c\u9ad8\u6027\u80fd\u63a8\u7406\uff0c\u5e76\u5904\u7406\u4e86\u53ef\u80fd\u51fa\u73b0\u7684\u73af\u5883\u914d\u7f6e\u95ee\u9898\u3002\u8be5\u65b9\u6848\u5b9e\u73b0\u4e86YOLO\u6a21\u578b\u5728\u6607\u817e<\/p>\n","protected":false},"author":2,"featured_media":52956,"comment_status":"open","ping_status":"open","sticky":false,"template":"","format":"standard","meta":{"footnotes":""},"categories":[1],"tags":[156,50,3829,86,1665,427,523],"topic":[],"class_list":["post-52960","post","type-post","status-publish","format-standard","has-post-thumbnail","hentry","category-server","tag-yolo","tag-50","tag-3829","tag-86","tag-1665","tag-427","tag-523"],"yoast_head":"<!-- This site is optimized with the Yoast SEO plugin v20.3 - https:\/\/yoast.com\/wordpress\/plugins\/seo\/ -->\n<title>Autudl\u534e\u4e3a\u6607\u817e\u7cfb\u5217NPU\u7b80\u4ecb\u548c\u90e8\u7f72\u63a8\u7406yolo11 yolov8 yolov5\u5206\u5272\u6a21\u578b - \u7f51\u7855\u4e92\u8054\u5e2e\u52a9\u4e2d\u5fc3<\/title>\n<meta name=\"robots\" content=\"index, follow, max-snippet:-1, max-image-preview:large, max-video-preview:-1\" \/>\n<link rel=\"canonical\" href=\"https:\/\/www.wsisp.com\/helps\/52960.html\" \/>\n<meta property=\"og:locale\" content=\"zh_CN\" \/>\n<meta property=\"og:type\" content=\"article\" \/>\n<meta property=\"og:title\" content=\"Autudl\u534e\u4e3a\u6607\u817e\u7cfb\u5217NPU\u7b80\u4ecb\u548c\u90e8\u7f72\u63a8\u7406yolo11 yolov8 yolov5\u5206\u5272\u6a21\u578b - \u7f51\u7855\u4e92\u8054\u5e2e\u52a9\u4e2d\u5fc3\" \/>\n<meta property=\"og:description\" content=\"\u6587\u7ae0\u6d4f\u89c8\u9605\u8bfb351\u6b21\u3002\u672c\u6587\u4ecb\u7ecd\u4e86\u5728\u6607\u817eAI\u5904\u7406\u5668\u4e0a\u90e8\u7f72YOLO\u7cfb\u5217\u6a21\u578b\u7684\u5b8c\u6574\u6d41\u7a0b\u3002\u9996\u5148\u6982\u8ff0\u4e86\u534e\u4e3a\u6607\u817e\u82af\u7247\uff08Ascend 310\/910\/910C\uff09\u548cMindSpore\u6846\u67b6\u7684\u7279\u6027\uff0c\u4ee5\u53caCANN\u8ba1\u7b97\u67b6\u6784\u7684\u4f5c\u7528\u3002\u7136\u540e\u8be6\u7ec6\u8bf4\u660e\u4e86\u73af\u5883\u914d\u7f6e\u6b65\u9aa4\uff1a1)\u521b\u5efaconda\u73af\u5883\u5e76\u5b89\u88c5\u4f9d\u8d56\u5e93\uff1b2)\u51c6\u5907YOLO\u6e90\u7801\u548c\u6a21\u578b\u6743\u91cd\uff1b3)\u6a21\u578b\u8f6c\u6362\u8fc7\u7a0b\uff08.pt\u2192.onnx\u2192.om\uff09\uff1b4)\u7f16\u5199\u63a8\u7406\u4ee3\u7801\uff0c\u5305\u62ec\u9884\u5904\u7406\u3001\u63a8\u7406\u548c\u540e\u5904\u7406\u6a21\u5757\uff1b5)\u8fd0\u884c\u63a8\u7406\u547d\u4ee4\u3002\u91cd\u70b9\u4ecb\u7ecd\u4e86\u5982\u4f55\u5229\u7528ais_bench\u5de5\u5177\u8fdb\u884c\u9ad8\u6027\u80fd\u63a8\u7406\uff0c\u5e76\u5904\u7406\u4e86\u53ef\u80fd\u51fa\u73b0\u7684\u73af\u5883\u914d\u7f6e\u95ee\u9898\u3002\u8be5\u65b9\u6848\u5b9e\u73b0\u4e86YOLO\u6a21\u578b\u5728\u6607\u817e\" \/>\n<meta property=\"og:url\" content=\"https:\/\/www.wsisp.com\/helps\/52960.html\" \/>\n<meta property=\"og:site_name\" content=\"\u7f51\u7855\u4e92\u8054\u5e2e\u52a9\u4e2d\u5fc3\" \/>\n<meta property=\"article:published_time\" content=\"2025-08-11T13:59:18+00:00\" \/>\n<meta property=\"og:image\" content=\"https:\/\/www.wsisp.com\/helps\/wp-content\/uploads\/2025\/08\/20250811135915-6899f7339d14e.png\" \/>\n<meta name=\"author\" content=\"admin\" \/>\n<meta name=\"twitter:card\" content=\"summary_large_image\" \/>\n<meta name=\"twitter:label1\" content=\"\u4f5c\u8005\" \/>\n\t<meta name=\"twitter:data1\" content=\"admin\" \/>\n\t<meta name=\"twitter:label2\" content=\"\u9884\u8ba1\u9605\u8bfb\u65f6\u95f4\" \/>\n\t<meta name=\"twitter:data2\" content=\"13 \u5206\" \/>\n<script type=\"application\/ld+json\" class=\"yoast-schema-graph\">{\"@context\":\"https:\/\/schema.org\",\"@graph\":[{\"@type\":\"WebPage\",\"@id\":\"https:\/\/www.wsisp.com\/helps\/52960.html\",\"url\":\"https:\/\/www.wsisp.com\/helps\/52960.html\",\"name\":\"Autudl\u534e\u4e3a\u6607\u817e\u7cfb\u5217NPU\u7b80\u4ecb\u548c\u90e8\u7f72\u63a8\u7406yolo11 yolov8 yolov5\u5206\u5272\u6a21\u578b - \u7f51\u7855\u4e92\u8054\u5e2e\u52a9\u4e2d\u5fc3\",\"isPartOf\":{\"@id\":\"https:\/\/www.wsisp.com\/helps\/#website\"},\"datePublished\":\"2025-08-11T13:59:18+00:00\",\"dateModified\":\"2025-08-11T13:59:18+00:00\",\"author\":{\"@id\":\"https:\/\/www.wsisp.com\/helps\/#\/schema\/person\/358e386c577a3ab51c4493330a20ad41\"},\"breadcrumb\":{\"@id\":\"https:\/\/www.wsisp.com\/helps\/52960.html#breadcrumb\"},\"inLanguage\":\"zh-Hans\",\"potentialAction\":[{\"@type\":\"ReadAction\",\"target\":[\"https:\/\/www.wsisp.com\/helps\/52960.html\"]}]},{\"@type\":\"BreadcrumbList\",\"@id\":\"https:\/\/www.wsisp.com\/helps\/52960.html#breadcrumb\",\"itemListElement\":[{\"@type\":\"ListItem\",\"position\":1,\"name\":\"\u9996\u9875\",\"item\":\"https:\/\/www.wsisp.com\/helps\"},{\"@type\":\"ListItem\",\"position\":2,\"name\":\"Autudl\u534e\u4e3a\u6607\u817e\u7cfb\u5217NPU\u7b80\u4ecb\u548c\u90e8\u7f72\u63a8\u7406yolo11 yolov8 yolov5\u5206\u5272\u6a21\u578b\"}]},{\"@type\":\"WebSite\",\"@id\":\"https:\/\/www.wsisp.com\/helps\/#website\",\"url\":\"https:\/\/www.wsisp.com\/helps\/\",\"name\":\"\u7f51\u7855\u4e92\u8054\u5e2e\u52a9\u4e2d\u5fc3\",\"description\":\"\u9999\u6e2f\u670d\u52a1\u5668_\u9999\u6e2f\u4e91\u670d\u52a1\u5668\u8d44\u8baf_\u670d\u52a1\u5668\u5e2e\u52a9\u6587\u6863_\u670d\u52a1\u5668\u6559\u7a0b\",\"potentialAction\":[{\"@type\":\"SearchAction\",\"target\":{\"@type\":\"EntryPoint\",\"urlTemplate\":\"https:\/\/www.wsisp.com\/helps\/?s={search_term_string}\"},\"query-input\":\"required name=search_term_string\"}],\"inLanguage\":\"zh-Hans\"},{\"@type\":\"Person\",\"@id\":\"https:\/\/www.wsisp.com\/helps\/#\/schema\/person\/358e386c577a3ab51c4493330a20ad41\",\"name\":\"admin\",\"image\":{\"@type\":\"ImageObject\",\"inLanguage\":\"zh-Hans\",\"@id\":\"https:\/\/www.wsisp.com\/helps\/#\/schema\/person\/image\/\",\"url\":\"https:\/\/gravatar.wp-china-yes.net\/avatar\/?s=96&d=mystery\",\"contentUrl\":\"https:\/\/gravatar.wp-china-yes.net\/avatar\/?s=96&d=mystery\",\"caption\":\"admin\"},\"sameAs\":[\"http:\/\/wp.wsisp.com\"],\"url\":\"https:\/\/www.wsisp.com\/helps\/author\/admin\"}]}<\/script>\n<!-- \/ Yoast SEO plugin. -->","yoast_head_json":{"title":"Autudl\u534e\u4e3a\u6607\u817e\u7cfb\u5217NPU\u7b80\u4ecb\u548c\u90e8\u7f72\u63a8\u7406yolo11 yolov8 yolov5\u5206\u5272\u6a21\u578b - \u7f51\u7855\u4e92\u8054\u5e2e\u52a9\u4e2d\u5fc3","robots":{"index":"index","follow":"follow","max-snippet":"max-snippet:-1","max-image-preview":"max-image-preview:large","max-video-preview":"max-video-preview:-1"},"canonical":"https:\/\/www.wsisp.com\/helps\/52960.html","og_locale":"zh_CN","og_type":"article","og_title":"Autudl\u534e\u4e3a\u6607\u817e\u7cfb\u5217NPU\u7b80\u4ecb\u548c\u90e8\u7f72\u63a8\u7406yolo11 yolov8 yolov5\u5206\u5272\u6a21\u578b - \u7f51\u7855\u4e92\u8054\u5e2e\u52a9\u4e2d\u5fc3","og_description":"\u6587\u7ae0\u6d4f\u89c8\u9605\u8bfb351\u6b21\u3002\u672c\u6587\u4ecb\u7ecd\u4e86\u5728\u6607\u817eAI\u5904\u7406\u5668\u4e0a\u90e8\u7f72YOLO\u7cfb\u5217\u6a21\u578b\u7684\u5b8c\u6574\u6d41\u7a0b\u3002\u9996\u5148\u6982\u8ff0\u4e86\u534e\u4e3a\u6607\u817e\u82af\u7247\uff08Ascend 310\/910\/910C\uff09\u548cMindSpore\u6846\u67b6\u7684\u7279\u6027\uff0c\u4ee5\u53caCANN\u8ba1\u7b97\u67b6\u6784\u7684\u4f5c\u7528\u3002\u7136\u540e\u8be6\u7ec6\u8bf4\u660e\u4e86\u73af\u5883\u914d\u7f6e\u6b65\u9aa4\uff1a1)\u521b\u5efaconda\u73af\u5883\u5e76\u5b89\u88c5\u4f9d\u8d56\u5e93\uff1b2)\u51c6\u5907YOLO\u6e90\u7801\u548c\u6a21\u578b\u6743\u91cd\uff1b3)\u6a21\u578b\u8f6c\u6362\u8fc7\u7a0b\uff08.pt\u2192.onnx\u2192.om\uff09\uff1b4)\u7f16\u5199\u63a8\u7406\u4ee3\u7801\uff0c\u5305\u62ec\u9884\u5904\u7406\u3001\u63a8\u7406\u548c\u540e\u5904\u7406\u6a21\u5757\uff1b5)\u8fd0\u884c\u63a8\u7406\u547d\u4ee4\u3002\u91cd\u70b9\u4ecb\u7ecd\u4e86\u5982\u4f55\u5229\u7528ais_bench\u5de5\u5177\u8fdb\u884c\u9ad8\u6027\u80fd\u63a8\u7406\uff0c\u5e76\u5904\u7406\u4e86\u53ef\u80fd\u51fa\u73b0\u7684\u73af\u5883\u914d\u7f6e\u95ee\u9898\u3002\u8be5\u65b9\u6848\u5b9e\u73b0\u4e86YOLO\u6a21\u578b\u5728\u6607\u817e","og_url":"https:\/\/www.wsisp.com\/helps\/52960.html","og_site_name":"\u7f51\u7855\u4e92\u8054\u5e2e\u52a9\u4e2d\u5fc3","article_published_time":"2025-08-11T13:59:18+00:00","og_image":[{"url":"https:\/\/www.wsisp.com\/helps\/wp-content\/uploads\/2025\/08\/20250811135915-6899f7339d14e.png"}],"author":"admin","twitter_card":"summary_large_image","twitter_misc":{"\u4f5c\u8005":"admin","\u9884\u8ba1\u9605\u8bfb\u65f6\u95f4":"13 \u5206"},"schema":{"@context":"https:\/\/schema.org","@graph":[{"@type":"WebPage","@id":"https:\/\/www.wsisp.com\/helps\/52960.html","url":"https:\/\/www.wsisp.com\/helps\/52960.html","name":"Autudl\u534e\u4e3a\u6607\u817e\u7cfb\u5217NPU\u7b80\u4ecb\u548c\u90e8\u7f72\u63a8\u7406yolo11 yolov8 yolov5\u5206\u5272\u6a21\u578b - \u7f51\u7855\u4e92\u8054\u5e2e\u52a9\u4e2d\u5fc3","isPartOf":{"@id":"https:\/\/www.wsisp.com\/helps\/#website"},"datePublished":"2025-08-11T13:59:18+00:00","dateModified":"2025-08-11T13:59:18+00:00","author":{"@id":"https:\/\/www.wsisp.com\/helps\/#\/schema\/person\/358e386c577a3ab51c4493330a20ad41"},"breadcrumb":{"@id":"https:\/\/www.wsisp.com\/helps\/52960.html#breadcrumb"},"inLanguage":"zh-Hans","potentialAction":[{"@type":"ReadAction","target":["https:\/\/www.wsisp.com\/helps\/52960.html"]}]},{"@type":"BreadcrumbList","@id":"https:\/\/www.wsisp.com\/helps\/52960.html#breadcrumb","itemListElement":[{"@type":"ListItem","position":1,"name":"\u9996\u9875","item":"https:\/\/www.wsisp.com\/helps"},{"@type":"ListItem","position":2,"name":"Autudl\u534e\u4e3a\u6607\u817e\u7cfb\u5217NPU\u7b80\u4ecb\u548c\u90e8\u7f72\u63a8\u7406yolo11 yolov8 yolov5\u5206\u5272\u6a21\u578b"}]},{"@type":"WebSite","@id":"https:\/\/www.wsisp.com\/helps\/#website","url":"https:\/\/www.wsisp.com\/helps\/","name":"\u7f51\u7855\u4e92\u8054\u5e2e\u52a9\u4e2d\u5fc3","description":"\u9999\u6e2f\u670d\u52a1\u5668_\u9999\u6e2f\u4e91\u670d\u52a1\u5668\u8d44\u8baf_\u670d\u52a1\u5668\u5e2e\u52a9\u6587\u6863_\u670d\u52a1\u5668\u6559\u7a0b","potentialAction":[{"@type":"SearchAction","target":{"@type":"EntryPoint","urlTemplate":"https:\/\/www.wsisp.com\/helps\/?s={search_term_string}"},"query-input":"required name=search_term_string"}],"inLanguage":"zh-Hans"},{"@type":"Person","@id":"https:\/\/www.wsisp.com\/helps\/#\/schema\/person\/358e386c577a3ab51c4493330a20ad41","name":"admin","image":{"@type":"ImageObject","inLanguage":"zh-Hans","@id":"https:\/\/www.wsisp.com\/helps\/#\/schema\/person\/image\/","url":"https:\/\/gravatar.wp-china-yes.net\/avatar\/?s=96&d=mystery","contentUrl":"https:\/\/gravatar.wp-china-yes.net\/avatar\/?s=96&d=mystery","caption":"admin"},"sameAs":["http:\/\/wp.wsisp.com"],"url":"https:\/\/www.wsisp.com\/helps\/author\/admin"}]}},"_links":{"self":[{"href":"https:\/\/www.wsisp.com\/helps\/wp-json\/wp\/v2\/posts\/52960","targetHints":{"allow":["GET"]}}],"collection":[{"href":"https:\/\/www.wsisp.com\/helps\/wp-json\/wp\/v2\/posts"}],"about":[{"href":"https:\/\/www.wsisp.com\/helps\/wp-json\/wp\/v2\/types\/post"}],"author":[{"embeddable":true,"href":"https:\/\/www.wsisp.com\/helps\/wp-json\/wp\/v2\/users\/2"}],"replies":[{"embeddable":true,"href":"https:\/\/www.wsisp.com\/helps\/wp-json\/wp\/v2\/comments?post=52960"}],"version-history":[{"count":0,"href":"https:\/\/www.wsisp.com\/helps\/wp-json\/wp\/v2\/posts\/52960\/revisions"}],"wp:featuredmedia":[{"embeddable":true,"href":"https:\/\/www.wsisp.com\/helps\/wp-json\/wp\/v2\/media\/52956"}],"wp:attachment":[{"href":"https:\/\/www.wsisp.com\/helps\/wp-json\/wp\/v2\/media?parent=52960"}],"wp:term":[{"taxonomy":"category","embeddable":true,"href":"https:\/\/www.wsisp.com\/helps\/wp-json\/wp\/v2\/categories?post=52960"},{"taxonomy":"post_tag","embeddable":true,"href":"https:\/\/www.wsisp.com\/helps\/wp-json\/wp\/v2\/tags?post=52960"},{"taxonomy":"topic","embeddable":true,"href":"https:\/\/www.wsisp.com\/helps\/wp-json\/wp\/v2\/topic?post=52960"}],"curies":[{"name":"wp","href":"https:\/\/api.w.org\/{rel}","templated":true}]}}