Skip to content

Yolov7 #274

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 10 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 8 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
*.bmp
*.plan
.clang-format
.vscode

# Byte-compiled / optimized / DLL files
__pycache__/
Expand Down Expand Up @@ -141,3 +142,10 @@ dmypy.json

# Pyre type checker
.pyre/

# yolov7 things
/yolov7
data/
data/*
csrc/yolov7/jetson/output.mp4
*.trt
74 changes: 74 additions & 0 deletions csrc/yolov7/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,74 @@
## YOLOv7 tiny End2End using TensorRT

YOLOv7 tiny using TensorRT accelerate, using [WongKinYiu/yolov7.git](https://github.com/WongKinYiu/yolov7.git)

### Export ONNX with NMS

Pytorch to TensorRT with NMS (and inference)

```shell
wget https://github.com/WongKinYiu/yolov7/releases/download/v0.1/yolov7-tiny.pt
```

```shell
python export.py --weights ./yolov7-tiny.pt --grid --end2end --simplify --topk-all 100 --iou-thres 0.65 --conf-thres 0.35 --img-size 640 640
```

### Export TensorRT Engine

using [Linaom1214/tensorrt-python.git](https://github.com/Linaom1214/tensorrt-python.git) to export engine

```shell
git clone https://github.com/Linaom1214/tensorrt-python.git
python ./tensorrt-python/export.py -o yolov7-tiny.onnx -e yolov7-tiny-nms.trt -p fp16
```

or export by `trtexec` tools.

Usage:

```shell
/usr/src/tensorrt/bin/trtexec \
--onnx=yolov7-tiny.onnx \
--saveEngine=yolov7-tiny-nms.trt \
--fp16
```

and if on jeston, could set flag `--memPoolSize` lower:

```shell
/usr/src/tensorrt/bin/trtexec \
--onnx=yolov7-tiny.onnx \
--saveEngine=yolov7-tiny-nms.trt \
--fp16 \
--memPoolSize=workspace:1024MiB
```

### Inference with c++

You can infer with c++ in [`csrc/detect/end2end`](https://github.com/nypyp/YOLOv8-TensorRT/blob/main/csrc/detect/end2end)

#### Build:

Please set you own librarys in [`CMakeLists.txt`](detect/end2end/CMakeLists.txt) and modify `CLASS_NAMES` and `COLORS` in [`main.cpp`](detect/end2end/main.cpp).

```shell
export root=${PWD}
cd csrc/detect/end2end
mkdir -p build && cd build
cmake ..
make
mv yolov7 ${root}
cd ${root}
```

Usage:

```shell
# infer image
./yolov7 yolov7-tiny-nms.trt data/bus.jpg
# infer images
./yolov8 yolov7-tiny-nms.trt data
# infer video
./yolov8 yolov7-tiny-nms.trt data/test.mp4 # the video path
```
85 changes: 85 additions & 0 deletions csrc/yolov7/detect/end2end/CMakeLists.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,85 @@
cmake_minimum_required(VERSION 3.12)

set(CMAKE_CUDA_ARCHITECTURES 60 61 62 70 72 75 86 89 90)
set(CMAKE_CUDA_COMPILER /usr/local/cuda/bin/nvcc)

project(yolov7 LANGUAGES CXX CUDA)

set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++14 -O3")
set(CMAKE_CXX_STANDARD 14)
set(CMAKE_BUILD_TYPE Release)
option(CUDA_USE_STATIC_CUDA_RUNTIME OFF)

list(APPEND CMAKE_MODULE_PATH "${CMAKE_CURRENT_SOURCE_DIR}/cmake")
include(Function)

# CUDA
find_package(CUDA REQUIRED)
print_var(CUDA_LIBRARIES)
print_var(CUDA_INCLUDE_DIRS)
get_filename_component(CUDA_LIB_DIR ${CUDA_LIBRARIES} DIRECTORY)
print_var(CUDA_LIB_DIR)

# OpenCV
find_package(OpenCV 4 REQUIRED)
print_var(OpenCV_LIBS)
print_var(OpenCV_LIBRARIES)
print_var(OpenCV_INCLUDE_DIRS)

# TensorRT
find_package(TensorRT REQUIRED)
print_var(TensorRT_LIBRARIES)
print_var(TensorRT_INCLUDE_DIRS)
print_var(TensorRT_LIB_DIR)
if (TensorRT_VERSION_MAJOR GREATER_EQUAL 10)
message(STATUS "Build with -DTRT_10")
add_definitions(-DTRT_10)
endif ()

list(APPEND ALL_INCLUDE_DIRS
${CUDA_INCLUDE_DIRS}
${OpenCV_INCLUDE_DIRS}
${TensorRT_INCLUDE_DIRS}
${CMAKE_CURRENT_SOURCE_DIR}/include
)

list(APPEND ALL_LIBS
${CUDA_LIBRARIES}
${OpenCV_LIBRARIES}
${TensorRT_LIBRARIES}
)

list(APPEND ALL_LIB_DIRS
${CUDA_LIB_DIR}
${TensorRT_LIB_DIR}
)

print_var(ALL_INCLUDE_DIRS)
print_var(ALL_LIBS)
print_var(ALL_LIB_DIRS)


add_executable(
${PROJECT_NAME}
${CMAKE_CURRENT_SOURCE_DIR}/main.cpp
${CMAKE_CURRENT_SOURCE_DIR}/include/yolov8.hpp
${CMAKE_CURRENT_SOURCE_DIR}/include/common.hpp
)

target_include_directories(
${PROJECT_NAME}
PUBLIC
${ALL_INCLUDE_DIRS}
)

target_link_directories(
${PROJECT_NAME}
PUBLIC
${ALL_LIB_DIRS}
)

target_link_libraries(
${PROJECT_NAME}
PRIVATE
${ALL_LIBS}
)
138 changes: 138 additions & 0 deletions csrc/yolov7/detect/end2end/cmake/FindTensorRT.cmake
Original file line number Diff line number Diff line change
@@ -0,0 +1,138 @@
# This module defines the following variables:
#
# ::
#
# TensorRT_INCLUDE_DIRS
# TensorRT_LIBRARIES
# TensorRT_FOUND
#
# ::
#
# TensorRT_VERSION_STRING - version (x.y.z)
# TensorRT_VERSION_MAJOR - major version (x)
# TensorRT_VERSION_MINOR - minor version (y)
# TensorRT_VERSION_PATCH - patch version (z)
#
# Hints
# ^^^^^
# A user may set ``TensorRT_ROOT`` to an installation root to tell this module where to look.
#
set(_TensorRT_SEARCHES)

if(TensorRT_ROOT)
set(_TensorRT_SEARCH_ROOT PATHS ${TensorRT_ROOT} NO_DEFAULT_PATH)
list(APPEND _TensorRT_SEARCHES _TensorRT_SEARCH_ROOT)
endif()

# appends some common paths
set(_TensorRT_SEARCH_NORMAL
PATHS "/usr"
)
list(APPEND _TensorRT_SEARCHES _TensorRT_SEARCH_NORMAL)

# Include dir
foreach(search ${_TensorRT_SEARCHES})
find_path(TensorRT_INCLUDE_DIR NAMES NvInfer.h ${${search}} PATH_SUFFIXES include)
endforeach()

if(NOT TensorRT_LIBRARY)
foreach(search ${_TensorRT_SEARCHES})
find_library(TensorRT_LIBRARY NAMES nvinfer ${${search}} PATH_SUFFIXES lib)
if(NOT TensorRT_LIB_DIR)
get_filename_component(TensorRT_LIB_DIR ${TensorRT_LIBRARY} DIRECTORY)
endif ()
endforeach()
endif()

if(NOT TensorRT_nvinfer_plugin_LIBRARY)
foreach(search ${_TensorRT_SEARCHES})
find_library(TensorRT_nvinfer_plugin_LIBRARY NAMES nvinfer_plugin ${${search}} PATH_SUFFIXES lib)
endforeach()
endif()

mark_as_advanced(TensorRT_INCLUDE_DIR)

if(TensorRT_INCLUDE_DIR AND EXISTS "${TensorRT_INCLUDE_DIR}/NvInfer.h")
if(EXISTS "${TensorRT_INCLUDE_DIR}/NvInferVersion.h")
set(_VersionSearchFile "${TensorRT_INCLUDE_DIR}/NvInferVersion.h")
else ()
set(_VersionSearchFile "${TensorRT_INCLUDE_DIR}/NvInfer.h")
endif ()
file(STRINGS "${_VersionSearchFile}" TensorRT_MAJOR REGEX "^#define NV_TENSORRT_MAJOR [0-9]+.*$")
file(STRINGS "${_VersionSearchFile}" TensorRT_MINOR REGEX "^#define NV_TENSORRT_MINOR [0-9]+.*$")
file(STRINGS "${_VersionSearchFile}" TensorRT_PATCH REGEX "^#define NV_TENSORRT_PATCH [0-9]+.*$")

string(REGEX REPLACE "^#define NV_TENSORRT_MAJOR ([0-9]+).*$" "\\1" TensorRT_VERSION_MAJOR "${TensorRT_MAJOR}")
string(REGEX REPLACE "^#define NV_TENSORRT_MINOR ([0-9]+).*$" "\\1" TensorRT_VERSION_MINOR "${TensorRT_MINOR}")
string(REGEX REPLACE "^#define NV_TENSORRT_PATCH ([0-9]+).*$" "\\1" TensorRT_VERSION_PATCH "${TensorRT_PATCH}")
set(TensorRT_VERSION_STRING "${TensorRT_VERSION_MAJOR}.${TensorRT_VERSION_MINOR}.${TensorRT_VERSION_PATCH}")
endif()

include(FindPackageHandleStandardArgs)
FIND_PACKAGE_HANDLE_STANDARD_ARGS(TensorRT REQUIRED_VARS TensorRT_LIBRARY TensorRT_INCLUDE_DIR VERSION_VAR TensorRT_VERSION_STRING)

if(TensorRT_FOUND)
set(TensorRT_INCLUDE_DIRS ${TensorRT_INCLUDE_DIR})

if(NOT TensorRT_LIBRARIES)
set(TensorRT_LIBRARIES ${TensorRT_LIBRARY})
if (TensorRT_nvinfer_plugin_LIBRARY)
list(APPEND TensorRT_LIBRARIES ${TensorRT_nvinfer_plugin_LIBRARY})
endif()
endif()

if(NOT TARGET TensorRT::TensorRT)
add_library(TensorRT INTERFACE IMPORTED)
add_library(TensorRT::TensorRT ALIAS TensorRT)
endif()

if(NOT TARGET TensorRT::nvinfer)
add_library(TensorRT::nvinfer SHARED IMPORTED)
if (WIN32)
foreach(search ${_TensorRT_SEARCHES})
find_file(TensorRT_LIBRARY_DLL
NAMES nvinfer.dll
PATHS ${${search}}
PATH_SUFFIXES bin
)
endforeach()

set_target_properties(TensorRT::nvinfer PROPERTIES
INTERFACE_INCLUDE_DIRECTORIES "${TensorRT_INCLUDE_DIRS}"
IMPORTED_LOCATION "${TensorRT_LIBRARY_DLL}"
IMPORTED_IMPLIB "${TensorRT_LIBRARY}"
)
else()
set_target_properties(TensorRT::nvinfer PROPERTIES
INTERFACE_INCLUDE_DIRECTORIES "${TensorRT_INCLUDE_DIRS}"
IMPORTED_LOCATION "${TensorRT_LIBRARY}"
)
endif()
target_link_libraries(TensorRT INTERFACE TensorRT::nvinfer)
endif()

if(NOT TARGET TensorRT::nvinfer_plugin AND TensorRT_nvinfer_plugin_LIBRARY)
add_library(TensorRT::nvinfer_plugin SHARED IMPORTED)
if (WIN32)
foreach(search ${_TensorRT_SEARCHES})
find_file(TensorRT_nvinfer_plugin_LIBRARY_DLL
NAMES nvinfer_plugin.dll
PATHS ${${search}}
PATH_SUFFIXES bin
)
endforeach()

set_target_properties(TensorRT::nvinfer_plugin PROPERTIES
INTERFACE_INCLUDE_DIRECTORIES "${TensorRT_INCLUDE_DIRS}"
IMPORTED_LOCATION "${TensorRT_nvinfer_plugin_LIBRARY_DLL}"
IMPORTED_IMPLIB "${TensorRT_nvinfer_plugin_LIBRARY}"
)
else()
set_target_properties(TensorRT::nvinfer_plugin PROPERTIES
INTERFACE_INCLUDE_DIRECTORIES "${TensorRT_INCLUDE_DIRS}"
IMPORTED_LOCATION "${TensorRT_nvinfer_plugin_LIBRARY}"
)
endif()
target_link_libraries(TensorRT INTERFACE TensorRT::nvinfer_plugin)
endif()
endif()
14 changes: 14 additions & 0 deletions csrc/yolov7/detect/end2end/cmake/Function.cmake
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@

function(print_var var)
set(value "${${var}}")
string(LENGTH "${value}" value_length)
if(value_length GREATER 0)
math(EXPR last_index "${value_length} - 1")
string(SUBSTRING "${value}" ${last_index} ${last_index} last_char)
endif()

if(NOT "${last_char}" STREQUAL "\n")
set(value "${value}\n")
endif()
message(STATUS "${var}:\n ${value}")
endfunction()
Loading