天天看点

61、Window11+Clion+MinGW32编译MNN使用

基本思想:使用MinGW32编译一下MNN进行推理使用

一、下载代码

D:\>git clone https://github.com/alibaba/MNN.git
Cloning into 'MNN'...
remote: Enumerating objects: 21338, done.
remote: Counting objects: 100% (3153/3153), done.
remote: Compressing objects: 100% (1451/1451), done.
remote: Total 21338 (delta 1711), reused 3016 (delta 1670), pack-reused 18185 eceiving objects: 100% (21338/21338), 163.Receiving objects: 100% (21338/21338), 163.96 MiB | 473.00 KiB/s, done.

Resolving deltas: 100% (14511/14511), done.
Updating files: 100% (3709/3709), done.

D:\>cd MNN

D:\MNN>mkdir buildMinGW

D:\MNN>cd buildMinGW      

二、进行编译,因为我使用的MinGW32编译器,所以需要修改一下 (20220308新版本之后,可以直接编译,不用修改了)D:\MNN\source\backend\cpu\x86_x64\cmakelists.txt,否则编译的静态库在MinGW32使用过程中会崩溃。将下列对应的红框下的options的-DMNN_X86_USE_ASM都去掉

61、Window11+Clion+MinGW32编译MNN使用

开始编译

D:\MNN\buildMinGW>D:\MNN\buildMinGW>cmake -G"MinGW Makefiles" -DCMAKE_INSTALL_PREFIX=%cd%/install -DMNN_SEP_BUILD=OFF -DNATIVE_LIBRARY_OUTPUT=. -DNATIVE_INCLUDE_OUTPUT=. -DMNN_BUILD_SHARED_LIBS=OFF ..

D:\MNN\buildMinGW>mingw32-make -j8
Scanning dependencies of target MNN_CL
Scanning dependencies of target MNNCV
Scanning dependencies of target MNNCPU
Scanning dependencies of target MNNX8664
Scanning dependencies of target MNNAVXFMA
Scanning dependencies of target MNNAVX
[  0%] Building CXX object CMakeFiles/MNNCore.dir/source/core/Interpreter.cpp.obj
[  0%] Building CXX object CMakeFiles/MNNCV.dir/source/cv/ImageProcess.cpp.obj
[  0%] Building CXX object CMakeFiles/MNNX8664.dir/source/backend/cpu/x86_x64/AVX2Backend.cpp.obj
[  1%] Building CXX object CMakeFiles/MNNAVXFMA.dir/source/backend/cpu/x86_x64/avxfma/GemmAVX2FMA.cpp.obj
[  1%] Building CXX object CMakeFiles/MNNAVX.dir/source/backend/cpu/x86_x64/avx/GemmAVX2.cpp.obj
[  1%] Building CXX object source/backend/opencl/CMakeFiles/MNN_CL.dir/core/BufferConvertor.cpp.obj
[  1%] Building CXX object CMakeFiles/MNNCPU.dir/source/backend/cpu/CPUArgMax.cpp.obj
[  1%] Building CXX object CMakeFiles/MNNCV.dir/source/cv/Matrix_CV.cpp.obj

[ 99%] Building CXX object CMakeFiles/aoa_nlu_decoder2.out.dir/tools/cpp/revertMNNModel.cpp.obj
[ 99%] Built target aoa_nlu_decoder1.out
[ 99%] Built target testTrain.out
[ 99%] Built target MNNV2Basic.out
[ 99%] Linking CXX executable aoa_nlu_encoder.out.exe
[ 99%] Built target aoa_nlu_encoder.out
[100%] Linking CXX executable timeProfile.out.exe
[100%] Built target timeProfile.out
[100%] Linking CXX executable aoa_nlu_decoder2.out.exe
[100%] Built target aoa_nlu_decoder2.out

D:\MNN\buildMinGW>mingw32-make install

Install the project...
-- Install configuration: "Release"
-- Installing: D:/MNN/buildMinGW/install/include/MNN/MNNDefine.h
-- Installing: D:/MNN/buildMinGW/install/include/MNN/Interpreter.hpp
-- Installing: D:/MNN/buildMinGW/install/include/MNN/HalideRuntime.h
-- Installing: D:/MNN/buildMinGW/install/include/MNN/Tensor.hpp
-- Installing: D:/MNN/buildMinGW/install/include/MNN/ErrorCode.hpp
-- Installing: D:/MNN/buildMinGW/install/include/MNN/ImageProcess.hpp
-- Installing: D:/MNN/buildMinGW/install/include/MNN/Matrix.h
-- Installing: D:/MNN/buildMinGW/install/include/MNN/Rect.h
-- Installing: D:/MNN/buildMinGW/install/include/MNN/MNNForwardType.h
-- Installing: D:/MNN/buildMinGW/install/include/MNN/AutoTime.hpp
-- Installing: D:/MNN/buildMinGW/install/include/MNN/MNNSharedContext.h
-- Installing: D:/MNN/buildMinGW/install/include/MNN/expr/Expr.hpp
-- Installing: D:/MNN/buildMinGW/install/include/MNN/expr/ExprCreator.hpp
-- Installing: D:/MNN/buildMinGW/install/include/MNN/expr/MathOp.hpp
-- Installing: D:/MNN/buildMinGW/install/include/MNN/expr/NeuralNetWorkOp.hpp
-- Installing: D:/MNN/buildMinGW/install/include/MNN/expr/Optimizer.hpp
-- Installing: D:/MNN/buildMinGW/install/include/MNN/expr/Executor.hpp
-- Installing: D:/MNN/buildMinGW/install/include/MNN/expr/Module.hpp
-- Up-to-date: D:/MNN/buildMinGW/install/include/MNN/expr/NeuralNetWorkOp.hpp
-- Installing: D:/MNN/buildMinGW/install/include/MNN/expr/ExecutorScope.hpp
-- Installing: D:/MNN/buildMinGW/install/include/MNN/expr/Scope.hpp
-- Installing: D:/MNN/buildMinGW/install/lib/libMNN.a      

要是生成mnn转换工具

D:\MNN\buildMinGW>cmake -G"MinGW Makefiles" -DCMAKE_INSTALL_PREFIX=%cd%/install -DMNN_SEP_BUILD=OFF -DNATIVE_LIBRARY_OUTPUT=. -DNATIVE_INCLUDE_OUTPUT=. -DMNN_BUILD_SHARED_LIBS=OFF MNN_BUILD_CONVERTER=ON  ..      

三、测试一下

cmakelists.txt

cmake_minimum_required(VERSION 3.16)
project(untitled9)

set(CMAKE_CXX_STANDARD 11)
include_directories(${CMAKE_SOURCE_DIR}/include)

add_library(libMNN STATIC IMPORTED)
set_target_properties(libMNN PROPERTIES IMPORTED_LOCATION ${CMAKE_SOURCE_DIR}/lib/libMNN.a)

add_executable(untitled9 main.cpp)

target_link_libraries(untitled9 libMNN)
#target_link_libraries(untitled9
#        -Wl,--whole-archive
#        libMNN
#        -Wl,--no-whole-archive
#        z m
#        )      
#include <iostream>

#include<MNN/Interpreter.hpp>
#include<MNN/ImageProcess.hpp>

using namespace std;

using namespace MNN;
int main()
{


    auto net = std::shared_ptr<MNN::Interpreter>(MNN::Interpreter::createFromFile("F:\\untitled9\\model-87_384_sim.mnn"));//创建解释器
    cout << "Interpreter created" << endl;
    ScheduleConfig config;
    config.numThread = 8;
    config.type = MNN_FORWARD_CPU;
    MNN::BackendConfig backendConfig;
    int precision  = 2;
    int power      = 0;
    int memory     = 0;
    // memory、power、precision分别为内存、功耗和精度偏好
    backendConfig.precision = (MNN::BackendConfig::PrecisionMode)precision;
    backendConfig.power = (MNN::BackendConfig::PowerMode) power;
    backendConfig.memory = (MNN::BackendConfig::MemoryMode) memory;
    config.backendConfig = &backendConfig;

    auto session = net->createSession(config);//创建session
    cout << "session created" << endl;

    auto inTensor = net->getSessionInput(session, NULL);
    auto outTensor = net->getSessionInput(session, NULL);
    auto _Tensor = MNN::Tensor::create<float>(inTensor->shape(), NULL, MNN::Tensor::CAFFE);

    for (int i = 0; i < _Tensor->elementSize(); i++) {
        _Tensor->host<float>()[i] = 1.f;

    }
    inTensor->copyFromHostTensor(_Tensor);

    //推理
    net->runSession(session);
    auto output= net->getSessionOutput(session, NULL);
//    MNN::Tensor feat_tensor(output, output->getDimensionType());
//    output->copyToHostTensor(&feat_tensor);
//    feat_tensor.print();


    MNN::Tensor score_host(output, output->getDimensionType());
    output->copyToHostTensor(&score_host);


    auto score_ptr = score_host.host<float>();
    std::vector<std::pair<float, int>> scores;
    for (int i = 0; i < score_host.elementSize(); ++i) {
        float score = score_ptr[i];
        if(i%5!=0||i==0){
            std::cout<<score<<" ,";
        }else {
            std::cout<<score<<std::endl;
        }
    }


    return 0;
}      
session created
1.02838 ,0.0331761 ,1.20096 ,0.465227 ,0.0782377 ,0.32542
0.266992 ,0.626033 ,-1.99909 ,1.51457 ,-1.55075
-1.02152 ,1.86524 ,-0.728613 ,-0.827451 ,2.82252
2.0388 ,-1.62309 ,-0.837221 ,1.10814 ,0.417276
0.0954745 ,0.550863 ,-2.34457 ,0.324847 ,1.90036
0.0733785 ,-0.607764 ,-0.915544 ,0.458481 ,-0.445877
-0.617788 ,0.0879285 ,-1.10013 ,-0.0815222 ,-1.22926
0.223109 ,-1.53992 ,-2.24226 ,-0.0978978 ,
Process finished with exit code 0      

继续阅读