Created
March 22, 2024 00:00
-
-
Save navono/e12489298f2fc924a6a2cd3d3bd7fba2 to your computer and use it in GitHub Desktop.
PaddleOCR 的 cpp_infer 的 CMakeLists 修改版
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
cmake_minimum_required(VERSION 3.14) | |
list(APPEND CMAKE_MODULE_PATH ${CMAKE_SOURCE_DIR}/cmake) | |
include(get_platform) | |
include(CPM) | |
include(import_library) | |
project(ppocr LANGUAGES CXX CUDA) | |
message(STATUS "CMAKE_SOURCE_DIR: ${CMAKE_SOURCE_DIR}") | |
message(STATUS "TARGET_PLATFORM_WINDOWS: ${TARGET_PLATFORM_WINDOWS}") | |
option(WITH_MKL "Compile demo with MKL/OpenBlas support, default use MKL." ON) | |
option(WITH_GPU "Compile demo with GPU/CPU, default use CPU." ON) | |
option(WITH_STATIC_LIB "Compile demo with static/shared library, default use static." ON) | |
option(WITH_TENSORRT "Compile demo with TensorRT." ON) | |
SET(PADDLE_LIB "D:\\data\\lib\\paddle_inference_v2.6_GPU" CACHE PATH "Location of libraries") | |
SET(OPENCV_DIR "D:\\data\\lib\\opencv\\4.5.5\\build\\x64\\vc15\\lib" CACHE PATH "Location of libraries") | |
SET(CUDA_LIB "C:\\Program Files\\NVIDIA GPU Computing Toolkit\\CUDA\\v12.0\\lib\\x64" CACHE PATH "Location of libraries") | |
SET(CUDNN_LIB "C:\\Program Files\\NVIDIA GPU Computing Toolkit\\CUDA\\v12.0\\lib\\x64" CACHE PATH "Location of libraries") | |
SET(TENSORRT_DIR "D:\\data\\lib\\TensorRT-8.6.1.6" CACHE PATH "Compile demo with TensorRT") | |
set(OpenCV_DIR "D:\\data\\lib\\opencv\\4.5.5\\build\\x64\\vc15\\lib") | |
set(DEMO_NAME "ppocr") | |
set(CMAKE_ARCHIVE_OUTPUT_DIRECTORY $<1:${CMAKE_SOURCE_DIR}/bin>) | |
set(CMAKE_RUNTIME_OUTPUT_DIRECTORY $<1:${CMAKE_SOURCE_DIR}/bin>) | |
#find_package(CUDA REQUIRED) | |
#get_filename_component(CUDA_LIB_DIR ${CUDA_LIBRARIES} DIRECTORY) | |
CPMAddPackage("gh:gflags/gflags#v2.2.2") | |
CPMAddPackage("gh:LDOUBLEV/AutoLog#ba37c37b7d20607e5f675b57a6aa1202c60593e4") | |
CPMAddPackage("gh:google/glog#v0.7.0") | |
if (TARGET_PLATFORM_WINDOWS) | |
CPMAddPackage("gh:tronkko/dirent#1.24") | |
endif () | |
find_package(OpenCV REQUIRED) | |
set(PADDLE_LIB "D:\\data\\lib\\paddle_inference_v2.6_GPU") | |
#message(STATUS "AutoLog_SOURCE_DIR: ${AutoLog_SOURCE_DIR}") | |
list(APPEND PROJECT_INCLUDE_DIRS | |
"${PADDLE_LIB}/paddle/include" | |
${AutoLog_SOURCE_DIR}) | |
list(APPEND PROJECT_LIB_DIR | |
"${PADDLE_LIB}/paddle/lib") | |
list(APPEND PROJECT_LIBRARIES | |
gflags | |
glog) | |
if (TARGET_PLATFORM_WINDOWS) | |
list(APPEND PROJECT_INCLUDE_DIRS | |
${dirent_SOURCE_DIR}/include) | |
endif () | |
macro(safe_set_static_flag) | |
foreach (flag_var | |
CMAKE_CXX_FLAGS CMAKE_CXX_FLAGS_DEBUG CMAKE_CXX_FLAGS_RELEASE | |
CMAKE_CXX_FLAGS_MINSIZEREL CMAKE_CXX_FLAGS_RELWITHDEBINFO) | |
if (${flag_var} MATCHES "/MD") | |
string(REGEX REPLACE "/MD" "/MT" ${flag_var} "${${flag_var}}") | |
endif (${flag_var} MATCHES "/MD") | |
endforeach (flag_var) | |
endmacro() | |
if (WITH_MKL) | |
ADD_DEFINITIONS(-DUSE_MKL) | |
endif () | |
message(STATUS "PADDLE_LIB: ${PADDLE_LIB}") | |
if (NOT DEFINED PADDLE_LIB) | |
message(FATAL_ERROR "please set PADDLE_LIB with -DPADDLE_LIB=/path/paddle/lib") | |
endif () | |
if (NOT DEFINED OPENCV_DIR) | |
message(FATAL_ERROR "please set OPENCV_DIR with -DOPENCV_DIR=/path/opencv") | |
endif () | |
if (WIN32) | |
include_directories("${PADDLE_LIB}/paddle/include") | |
link_directories("${PADDLE_LIB}/paddle/lib") | |
find_package(OpenCV REQUIRED PATHS ${OPENCV_DIR}/build/ NO_DEFAULT_PATH) | |
else () | |
find_package(OpenCV REQUIRED PATHS ${OPENCV_DIR}/share/OpenCV NO_DEFAULT_PATH) | |
include_directories("${PADDLE_LIB}/paddle/include") | |
link_directories("${PADDLE_LIB}/paddle/lib") | |
endif () | |
include_directories(${OpenCV_INCLUDE_DIRS}) | |
if (WIN32) | |
add_definitions("/DGOOGLE_GLOG_DLL_DECL=") | |
if (WITH_MKL) | |
set(FLAG_OPENMP "/openmp") | |
endif () | |
set(CMAKE_C_FLAGS_DEBUG "${CMAKE_C_FLAGS_DEBUG} /bigobj /MTd ${FLAG_OPENMP}") | |
set(CMAKE_C_FLAGS_RELEASE "${CMAKE_C_FLAGS_RELEASE} /bigobj /MT ${FLAG_OPENMP}") | |
set(CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} /bigobj /MTd ${FLAG_OPENMP}") | |
set(CMAKE_CXX_FLAGS_RELEASE "${CMAKE_CXX_FLAGS_RELEASE} /bigobj /MT ${FLAG_OPENMP}") | |
if (WITH_STATIC_LIB) | |
safe_set_static_flag() | |
add_definitions(-DSTATIC_LIB) | |
endif () | |
message("cmake c debug flags " ${CMAKE_C_FLAGS_DEBUG}) | |
message("cmake c release flags " ${CMAKE_C_FLAGS_RELEASE}) | |
message("cmake cxx debug flags " ${CMAKE_CXX_FLAGS_DEBUG}) | |
message("cmake cxx release flags " ${CMAKE_CXX_FLAGS_RELEASE}) | |
else () | |
if (WITH_MKL) | |
set(FLAG_OPENMP "-fopenmp") | |
endif () | |
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -g -o3 ${FLAG_OPENMP} -std=c++11") | |
set(CMAKE_STATIC_LIBRARY_PREFIX "") | |
message("cmake cxx flags" ${CMAKE_CXX_FLAGS}) | |
endif () | |
if (WITH_GPU) | |
# if (NOT DEFINED CUDA_LIB OR ${CUDA_LIB} STREQUAL "") | |
# message(FATAL_ERROR "please set CUDA_LIB with -DCUDA_LIB=/path/cuda-8.0/lib64") | |
# endif() | |
if (NOT WIN32) | |
if (NOT DEFINED CUDNN_LIB) | |
message(FATAL_ERROR "please set CUDNN_LIB with -DCUDNN_LIB=/path/cudnn_v7.4/cuda/lib64") | |
endif () | |
endif (NOT WIN32) | |
endif () | |
include_directories("${PADDLE_LIB}/third_party/install/protobuf/include") | |
include_directories("${PADDLE_LIB}/third_party/install/glog/include") | |
include_directories("${PADDLE_LIB}/third_party/install/gflags/include") | |
include_directories("${PADDLE_LIB}/third_party/install/xxhash/include") | |
include_directories("${PADDLE_LIB}/third_party/install/zlib/include") | |
include_directories("${PADDLE_LIB}/third_party/install/onnxruntime/include") | |
include_directories("${PADDLE_LIB}/third_party/install/paddle2onnx/include") | |
include_directories("${PADDLE_LIB}/third_party/boost") | |
include_directories("${PADDLE_LIB}/third_party/eigen3") | |
include_directories("${CMAKE_SOURCE_DIR}/") | |
if (NOT WIN32) | |
if (WITH_TENSORRT AND WITH_GPU) | |
include_directories("${TENSORRT_DIR}/include") | |
link_directories("${TENSORRT_DIR}/lib") | |
endif () | |
endif (NOT WIN32) | |
link_directories("${PADDLE_LIB}/third_party/install/zlib/lib") | |
link_directories("${PADDLE_LIB}/third_party/install/protobuf/lib") | |
link_directories("${PADDLE_LIB}/third_party/install/glog/lib") | |
link_directories("${PADDLE_LIB}/third_party/install/gflags/lib") | |
link_directories("${PADDLE_LIB}/third_party/install/xxhash/lib") | |
link_directories("${PADDLE_LIB}/third_party/install/onnxruntime/lib") | |
link_directories("${PADDLE_LIB}/third_party/install/paddle2onnx/lib") | |
link_directories("${PADDLE_LIB}/paddle/lib") | |
if (WITH_MKL) | |
include_directories("${PADDLE_LIB}/third_party/install/mklml/include") | |
if (WIN32) | |
set(MATH_LIB ${PADDLE_LIB}/third_party/install/mklml/lib/mklml.lib | |
${PADDLE_LIB}/third_party/install/mklml/lib/libiomp5md.lib) | |
else () | |
set(MATH_LIB ${PADDLE_LIB}/third_party/install/mklml/lib/libmklml_intel${CMAKE_SHARED_LIBRARY_SUFFIX} | |
${PADDLE_LIB}/third_party/install/mklml/lib/libiomp5${CMAKE_SHARED_LIBRARY_SUFFIX}) | |
execute_process(COMMAND cp -r ${PADDLE_LIB}/third_party/install/mklml/lib/libmklml_intel${CMAKE_SHARED_LIBRARY_SUFFIX} /usr/lib) | |
endif () | |
set(MKLDNN_PATH "${PADDLE_LIB}/third_party/install/mkldnn") | |
if (EXISTS ${MKLDNN_PATH}) | |
include_directories("${MKLDNN_PATH}/include") | |
if (WIN32) | |
set(MKLDNN_LIB ${MKLDNN_PATH}/lib/mkldnn.lib) | |
else () | |
set(MKLDNN_LIB ${MKLDNN_PATH}/lib/libmkldnn.so.0) | |
endif () | |
endif () | |
else () | |
if (WIN32) | |
set(MATH_LIB ${PADDLE_LIB}/third_party/install/openblas/lib/openblas${CMAKE_STATIC_LIBRARY_SUFFIX}) | |
else () | |
set(MATH_LIB ${PADDLE_LIB}/third_party/install/openblas/lib/libopenblas${CMAKE_STATIC_LIBRARY_SUFFIX}) | |
endif () | |
endif () | |
# Note: libpaddle_inference_api.so/a must put before libpaddle_inference.so/a | |
if (WITH_STATIC_LIB) | |
if (WIN32) | |
set(DEPS | |
${PADDLE_LIB}/paddle/lib/paddle_inference${CMAKE_STATIC_LIBRARY_SUFFIX}) | |
else () | |
set(DEPS | |
${PADDLE_LIB}/paddle/lib/libpaddle_inference${CMAKE_STATIC_LIBRARY_SUFFIX}) | |
endif () | |
else () | |
if (WIN32) | |
set(DEPS | |
${PADDLE_LIB}/paddle/lib/paddle_inference${CMAKE_SHARED_LIBRARY_SUFFIX}) | |
else () | |
set(DEPS | |
${PADDLE_LIB}/paddle/lib/libpaddle_inference${CMAKE_SHARED_LIBRARY_SUFFIX}) | |
endif () | |
endif (WITH_STATIC_LIB) | |
if (NOT WIN32) | |
set(DEPS ${DEPS} | |
${MATH_LIB} ${MKLDNN_LIB} | |
glog gflags protobuf z xxhash | |
) | |
if (EXISTS "${PADDLE_LIB}/third_party/install/snappystream/lib") | |
set(DEPS ${DEPS} snappystream) | |
endif () | |
if (EXISTS "${PADDLE_LIB}/third_party/install/snappy/lib") | |
set(DEPS ${DEPS} snappy) | |
endif () | |
else () | |
set(DEPS ${DEPS} | |
${MATH_LIB} ${MKLDNN_LIB} | |
glog gflags_static libprotobuf xxhash) | |
set(DEPS ${DEPS} libcmt shlwapi) | |
if (EXISTS "${PADDLE_LIB}/third_party/install/snappy/lib") | |
set(DEPS ${DEPS} snappy) | |
endif () | |
if (EXISTS "${PADDLE_LIB}/third_party/install/snappystream/lib") | |
set(DEPS ${DEPS} snappystream) | |
endif () | |
endif (NOT WIN32) | |
if (WITH_GPU) | |
if (NOT WIN32) | |
if (WITH_TENSORRT) | |
set(DEPS ${DEPS} ${TENSORRT_DIR}/lib/libnvinfer${CMAKE_SHARED_LIBRARY_SUFFIX}) | |
set(DEPS ${DEPS} ${TENSORRT_DIR}/lib/libnvinfer_plugin${CMAKE_SHARED_LIBRARY_SUFFIX}) | |
endif () | |
set(DEPS ${DEPS} ${CUDA_LIB}/libcudart${CMAKE_SHARED_LIBRARY_SUFFIX}) | |
set(DEPS ${DEPS} ${CUDNN_LIB}/libcudnn${CMAKE_SHARED_LIBRARY_SUFFIX}) | |
else () | |
set(DEPS ${DEPS} ${CUDA_LIB}/cudart${CMAKE_STATIC_LIBRARY_SUFFIX}) | |
set(DEPS ${DEPS} ${CUDA_LIB}/cublas${CMAKE_STATIC_LIBRARY_SUFFIX}) | |
set(DEPS ${DEPS} ${CUDNN_LIB}/cudnn${CMAKE_STATIC_LIBRARY_SUFFIX}) | |
endif () | |
endif () | |
if (NOT WIN32) | |
set(EXTERNAL_LIB "-ldl -lrt -lgomp -lz -lm -lpthread") | |
set(DEPS ${DEPS} ${EXTERNAL_LIB}) | |
endif () | |
set(DEPS ${DEPS} ${OpenCV_LIBS}) | |
include(FetchContent) | |
#include(external-cmake/auto-log.cmake) | |
#include_directories(${FETCHCONTENT_BASE_DIR}/extern_autolog-src) | |
include_directories(${PROJECT_INCLUDE_DIRS}) | |
link_directories(${PROJECT_LIB_DIR}) | |
link_libraries(${PROJECT_LIBRARIES}) | |
message(STATUS "Project lib dirs: \n${PROJECT_LIB_DIR}\n") | |
message(STATUS "Project include dirs: \n${PROJECT_INCLUDE_DIRS}\n") | |
message(STATUS "Project depends libs: \n${PROJECT_LIBRARIES}\n") | |
AUX_SOURCE_DIRECTORY(./src SRCS) | |
add_executable(${DEMO_NAME} ${SRCS}) | |
target_link_libraries(${DEMO_NAME} ${DEPS}) | |
if (WIN32 AND WITH_MKL) | |
add_custom_command(TARGET ${DEMO_NAME} POST_BUILD | |
COMMAND ${CMAKE_COMMAND} -E copy_if_different ${PADDLE_LIB}/third_party/install/mklml/lib/mklml.dll ./mklml.dll | |
COMMAND ${CMAKE_COMMAND} -E copy_if_different ${PADDLE_LIB}/third_party/install/mklml/lib/libiomp5md.dll ./libiomp5md.dll | |
COMMAND ${CMAKE_COMMAND} -E copy_if_different ${PADDLE_LIB}/third_party/install/mkldnn/lib/mkldnn.dll ./mkldnn.dll | |
COMMAND ${CMAKE_COMMAND} -E copy_if_different ${PADDLE_LIB}/third_party/install/mklml/lib/mklml.dll ./release/mklml.dll | |
COMMAND ${CMAKE_COMMAND} -E copy_if_different ${PADDLE_LIB}/third_party/install/mklml/lib/libiomp5md.dll ./release/libiomp5md.dll | |
COMMAND ${CMAKE_COMMAND} -E copy_if_different ${PADDLE_LIB}/third_party/install/mkldnn/lib/mkldnn.dll ./release/mkldnn.dll | |
) | |
endif () | |
# copy third party dll | |
add_custom_command(TARGET ${PROJECT_NAME} POST_BUILD | |
COMMAND ${CMAKE_COMMAND} -E copy | |
$<TARGET_RUNTIME_DLLS:${PROJECT_NAME}> | |
$<TARGET_FILE_DIR:${PROJECT_NAME}> | |
COMMAND_EXPAND_LISTS) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment
增加了 cmake 文件夹用于存放 .cmake 工具