忽略细节较多,只做一个简单的说明。

  1. 系统:ubuntu16.04
  2. python2.7
  3. opencv3.3.3

主要内容

1.安装 ssd
2.准备数据
3.训练

1.安装 ssd

可以根据官网的Toturial安装

#1.get the code
git clone https://github.com/weiliu89/caffe.git
cd caffe
git checkout ssd
#2.Build the code
# Modify Makefile.config according to your Caffe installation.
cp Makefile.config.example Makefile.config
make -j8
# Make sure to include $CAFFE_ROOT/python to your PYTHONPATH.
make py
make test -j8
# (Optional)
make runtest -j8

在编译过程( ‘make -j8’)可能出现各种错误,可以先编辑一下makefile.config

#修改内容
INCLUDE_DIRS := $(PYTHON_INCLUDE) /usr/local/include /usr/include/hdf5/serial
LIBRARY_DIRS := $(PYTHON_LIB) /usr/local/lib /usr/lib /usr/lib/x86-linux-gnu/hdf5/serial
如果是opencv3.3.0还会出现各种比较新的问题。

需要修改ssd一些代码,参考博文
/caffe/src/caffe/layers/video_data_layer.cpp

//加上一个头文件
#include <opencv2/videoio.hpp>//加上cv命名空间
using namespace cv;//去掉CV_CAP_PROP_FRAME_COUNT,CV_CAP_PROP_POS_FRAMES前面的CV_

/caffe/src/caffe/util/bbox_util.cpp

//修改CV_RGB为cv::Scalar,但应该注意两者的区别,前者为RGB,后者为BGR//修改CV_FILLED为cv::FILLED

/caffe/src/caffe/util/im_transforms.cpp

//加上以下两句
#define CV_BGR2HSV cv::COLOR_BGR2HSV
#define CV_BGR2Lab cv::COLOR_BGR2Lab

还需要标记Makefile文件(不是Makefile.config)

#修改一下内容
LIBRARIES += glog gflags protobuf boost_system boost_filesystem boost_regex m hdf5_serial_hl hdf5_serial opencv_imgcodecs
#修改以下代码内容
ifeq ($(USE_OPENCV), 1)LIBRARIES += opencv_core opencv_highgui opencv_imgproc opencv_videoioifeq ($(OPENCV_VERSION), 3)LIBRARIES += glog gflags protobuf leveldb snappy lmdb boost_system boost_filesystem hdf5_hl hdf5 m opencv_core opencv_highgui opencv_imgproc opencv_imgcodecs opencv_videoio#LIBRARIES += opencv_imgcodecs opencv_videoioendif

完整的Makefile文件如下:

PROJECT := caffeCONFIG_FILE := Makefile.config
# Explicitly check for the config file, otherwise make -k will proceed anyway.
ifeq ($(wildcard $(CONFIG_FILE)),)
$(error $(CONFIG_FILE) not found. See $(CONFIG_FILE).example.)
endif
include $(CONFIG_FILE)BUILD_DIR_LINK := $(BUILD_DIR)
ifeq ($(RELEASE_BUILD_DIR),)RELEASE_BUILD_DIR := .$(BUILD_DIR)_release
endif
ifeq ($(DEBUG_BUILD_DIR),)DEBUG_BUILD_DIR := .$(BUILD_DIR)_debug
endifDEBUG ?= 0
ifeq ($(DEBUG), 1)BUILD_DIR := $(DEBUG_BUILD_DIR)OTHER_BUILD_DIR := $(RELEASE_BUILD_DIR)
elseBUILD_DIR := $(RELEASE_BUILD_DIR)OTHER_BUILD_DIR := $(DEBUG_BUILD_DIR)
endif# All of the directories containing code.
SRC_DIRS := $(shell find * -type d -exec bash -c "find {} -maxdepth 1 \\( -name '*.cpp' -o -name '*.proto' \) | grep -q ." \; -print)# The target shared library name
LIBRARY_NAME := $(PROJECT)
LIB_BUILD_DIR := $(BUILD_DIR)/lib
STATIC_NAME := $(LIB_BUILD_DIR)/lib$(LIBRARY_NAME).a
DYNAMIC_VERSION_MAJOR       := 1
DYNAMIC_VERSION_MINOR       := 0
DYNAMIC_VERSION_REVISION    := 0-rc3
DYNAMIC_NAME_SHORT := lib$(LIBRARY_NAME).so
#DYNAMIC_SONAME_SHORT := $(DYNAMIC_NAME_SHORT).$(DYNAMIC_VERSION_MAJOR)
DYNAMIC_VERSIONED_NAME_SHORT := $(DYNAMIC_NAME_SHORT).$(DYNAMIC_VERSION_MAJOR).$(DYNAMIC_VERSION_MINOR).$(DYNAMIC_VERSION_REVISION)
DYNAMIC_NAME := $(LIB_BUILD_DIR)/$(DYNAMIC_VERSIONED_NAME_SHORT)
COMMON_FLAGS += -DCAFFE_VERSION=$(DYNAMIC_VERSION_MAJOR).$(DYNAMIC_VERSION_MINOR).$(DYNAMIC_VERSION_REVISION)##############################
# Get all source files
##############################
# CXX_SRCS are the source files excluding the test ones.
CXX_SRCS := $(shell find src/$(PROJECT) ! -name "test_*.cpp" -name "*.cpp")
# CU_SRCS are the cuda source files
CU_SRCS := $(shell find src/$(PROJECT) ! -name "test_*.cu" -name "*.cu")
# TEST_SRCS are the test source files
TEST_MAIN_SRC := src/$(PROJECT)/test/test_caffe_main.cpp
TEST_SRCS := $(shell find src/$(PROJECT) -name "test_*.cpp")
TEST_SRCS := $(filter-out $(TEST_MAIN_SRC), $(TEST_SRCS))
TEST_CU_SRCS := $(shell find src/$(PROJECT) -name "test_*.cu")
GTEST_SRC := src/gtest/gtest-all.cpp
# TOOL_SRCS are the source files for the tool binaries
TOOL_SRCS := $(shell find tools -name "*.cpp")
# EXAMPLE_SRCS are the source files for the example binaries
EXAMPLE_SRCS := $(shell find examples -name "*.cpp")
# BUILD_INCLUDE_DIR contains any generated header files we want to include.
BUILD_INCLUDE_DIR := $(BUILD_DIR)/src
# PROTO_SRCS are the protocol buffer definitions
PROTO_SRC_DIR := src/$(PROJECT)/proto
PROTO_SRCS := $(wildcard $(PROTO_SRC_DIR)/*.proto)
# PROTO_BUILD_DIR will contain the .cc and obj files generated from
# PROTO_SRCS; PROTO_BUILD_INCLUDE_DIR will contain the .h header files
PROTO_BUILD_DIR := $(BUILD_DIR)/$(PROTO_SRC_DIR)
PROTO_BUILD_INCLUDE_DIR := $(BUILD_INCLUDE_DIR)/$(PROJECT)/proto
# NONGEN_CXX_SRCS includes all source/header files except those generated
# automatically (e.g., by proto).
NONGEN_CXX_SRCS := $(shell find \src/$(PROJECT) \include/$(PROJECT) \python/$(PROJECT) \matlab/+$(PROJECT)/private \examples \tools \-name "*.cpp" -or -name "*.hpp" -or -name "*.cu" -or -name "*.cuh")
LINT_SCRIPT := scripts/cpp_lint.py
LINT_OUTPUT_DIR := $(BUILD_DIR)/.lint
LINT_EXT := lint.txt
LINT_OUTPUTS := $(addsuffix .$(LINT_EXT), $(addprefix $(LINT_OUTPUT_DIR)/, $(NONGEN_CXX_SRCS)))
EMPTY_LINT_REPORT := $(BUILD_DIR)/.$(LINT_EXT)
NONEMPTY_LINT_REPORT := $(BUILD_DIR)/$(LINT_EXT)
# PY$(PROJECT)_SRC is the python wrapper for $(PROJECT)
PY$(PROJECT)_SRC := python/$(PROJECT)/_$(PROJECT).cpp
PY$(PROJECT)_SO := python/$(PROJECT)/_$(PROJECT).so
PY$(PROJECT)_HXX := include/$(PROJECT)/layers/python_layer.hpp
# MAT$(PROJECT)_SRC is the mex entrance point of matlab package for $(PROJECT)
MAT$(PROJECT)_SRC := matlab/+$(PROJECT)/private/$(PROJECT)_.cpp
ifneq ($(MATLAB_DIR),)MAT_SO_EXT := $(shell $(MATLAB_DIR)/bin/mexext)
endif
MAT$(PROJECT)_SO := matlab/+$(PROJECT)/private/$(PROJECT)_.$(MAT_SO_EXT)##############################
# Derive generated files
##############################
# The generated files for protocol buffers
PROTO_GEN_HEADER_SRCS := $(addprefix $(PROTO_BUILD_DIR)/, \$(notdir ${PROTO_SRCS:.proto=.pb.h}))
PROTO_GEN_HEADER := $(addprefix $(PROTO_BUILD_INCLUDE_DIR)/, \$(notdir ${PROTO_SRCS:.proto=.pb.h}))
PROTO_GEN_CC := $(addprefix $(BUILD_DIR)/, ${PROTO_SRCS:.proto=.pb.cc})
PY_PROTO_BUILD_DIR := python/$(PROJECT)/proto
PY_PROTO_INIT := python/$(PROJECT)/proto/__init__.py
PROTO_GEN_PY := $(foreach file,${PROTO_SRCS:.proto=_pb2.py}, \$(PY_PROTO_BUILD_DIR)/$(notdir $(file)))
# The objects corresponding to the source files
# These objects will be linked into the final shared library, so we
# exclude the tool, example, and test objects.
CXX_OBJS := $(addprefix $(BUILD_DIR)/, ${CXX_SRCS:.cpp=.o})
CU_OBJS := $(addprefix $(BUILD_DIR)/cuda/, ${CU_SRCS:.cu=.o})
PROTO_OBJS := ${PROTO_GEN_CC:.cc=.o}
OBJS := $(PROTO_OBJS) $(CXX_OBJS) $(CU_OBJS)
# tool, example, and test objects
TOOL_OBJS := $(addprefix $(BUILD_DIR)/, ${TOOL_SRCS:.cpp=.o})
TOOL_BUILD_DIR := $(BUILD_DIR)/tools
TEST_CXX_BUILD_DIR := $(BUILD_DIR)/src/$(PROJECT)/test
TEST_CU_BUILD_DIR := $(BUILD_DIR)/cuda/src/$(PROJECT)/test
TEST_CXX_OBJS := $(addprefix $(BUILD_DIR)/, ${TEST_SRCS:.cpp=.o})
TEST_CU_OBJS := $(addprefix $(BUILD_DIR)/cuda/, ${TEST_CU_SRCS:.cu=.o})
TEST_OBJS := $(TEST_CXX_OBJS) $(TEST_CU_OBJS)
GTEST_OBJ := $(addprefix $(BUILD_DIR)/, ${GTEST_SRC:.cpp=.o})
EXAMPLE_OBJS := $(addprefix $(BUILD_DIR)/, ${EXAMPLE_SRCS:.cpp=.o})
# Output files for automatic dependency generation
DEPS := ${CXX_OBJS:.o=.d} ${CU_OBJS:.o=.d} ${TEST_CXX_OBJS:.o=.d} \${TEST_CU_OBJS:.o=.d} $(BUILD_DIR)/${MAT$(PROJECT)_SO:.$(MAT_SO_EXT)=.d}
# tool, example, and test bins
TOOL_BINS := ${TOOL_OBJS:.o=.bin}
EXAMPLE_BINS := ${EXAMPLE_OBJS:.o=.bin}
# symlinks to tool bins without the ".bin" extension
TOOL_BIN_LINKS := ${TOOL_BINS:.bin=}
# Put the test binaries in build/test for convenience.
TEST_BIN_DIR := $(BUILD_DIR)/test
TEST_CU_BINS := $(addsuffix .testbin,$(addprefix $(TEST_BIN_DIR)/, \$(foreach obj,$(TEST_CU_OBJS),$(basename $(notdir $(obj))))))
TEST_CXX_BINS := $(addsuffix .testbin,$(addprefix $(TEST_BIN_DIR)/, \$(foreach obj,$(TEST_CXX_OBJS),$(basename $(notdir $(obj))))))
TEST_BINS := $(TEST_CXX_BINS) $(TEST_CU_BINS)
# TEST_ALL_BIN is the test binary that links caffe dynamically.
TEST_ALL_BIN := $(TEST_BIN_DIR)/test_all.testbin##############################
# Derive compiler warning dump locations
##############################
WARNS_EXT := warnings.txt
CXX_WARNS := $(addprefix $(BUILD_DIR)/, ${CXX_SRCS:.cpp=.o.$(WARNS_EXT)})
CU_WARNS := $(addprefix $(BUILD_DIR)/cuda/, ${CU_SRCS:.cu=.o.$(WARNS_EXT)})
TOOL_WARNS := $(addprefix $(BUILD_DIR)/, ${TOOL_SRCS:.cpp=.o.$(WARNS_EXT)})
EXAMPLE_WARNS := $(addprefix $(BUILD_DIR)/, ${EXAMPLE_SRCS:.cpp=.o.$(WARNS_EXT)})
TEST_WARNS := $(addprefix $(BUILD_DIR)/, ${TEST_SRCS:.cpp=.o.$(WARNS_EXT)})
TEST_CU_WARNS := $(addprefix $(BUILD_DIR)/cuda/, ${TEST_CU_SRCS:.cu=.o.$(WARNS_EXT)})
ALL_CXX_WARNS := $(CXX_WARNS) $(TOOL_WARNS) $(EXAMPLE_WARNS) $(TEST_WARNS)
ALL_CU_WARNS := $(CU_WARNS) $(TEST_CU_WARNS)
ALL_WARNS := $(ALL_CXX_WARNS) $(ALL_CU_WARNS)EMPTY_WARN_REPORT := $(BUILD_DIR)/.$(WARNS_EXT)
NONEMPTY_WARN_REPORT := $(BUILD_DIR)/$(WARNS_EXT)##############################
# Derive include and lib directories
##############################
CUDA_INCLUDE_DIR := $(CUDA_DIR)/includeCUDA_LIB_DIR :=
# add <cuda>/lib64 only if it exists
ifneq ("$(wildcard $(CUDA_DIR)/lib64)","")CUDA_LIB_DIR += $(CUDA_DIR)/lib64
endif
CUDA_LIB_DIR += $(CUDA_DIR)/libINCLUDE_DIRS += $(BUILD_INCLUDE_DIR) ./src ./include
ifneq ($(CPU_ONLY), 1)INCLUDE_DIRS += $(CUDA_INCLUDE_DIR)LIBRARY_DIRS += $(CUDA_LIB_DIR)LIBRARIES := cudart cublas curand
endif#LIBRARIES += glog gflags protobuf boost_system boost_filesystem boost_regex m hdf5_hl hdf5
LIBRARIES += glog gflags protobuf boost_system boost_filesystem boost_regex m hdf5_serial_hl hdf5_serial opencv_imgcodecs
# handle IO dependencies
USE_LEVELDB ?= 1
USE_LMDB ?= 1
USE_OPENCV ?= 1ifeq ($(USE_LEVELDB), 1)LIBRARIES += leveldb snappy
endif
ifeq ($(USE_LMDB), 1)LIBRARIES += lmdb
endif
ifeq ($(USE_OPENCV), 1)LIBRARIES += opencv_core opencv_highgui opencv_imgproc opencv_videoioifeq ($(OPENCV_VERSION), 3)LIBRARIES += glog gflags protobuf leveldb snappy lmdb boost_system boost_filesystem hdf5_hl hdf5 m opencv_core opencv_highgui opencv_imgproc opencv_imgcodecs opencv_videoio#LIBRARIES += opencv_imgcodecs opencv_videoioendifendif
PYTHON_LIBRARIES ?= boost_python python2.7
WARNINGS := -Wall -Wno-sign-compare##############################
# Set build directories
##############################DISTRIBUTE_DIR ?= distribute
DISTRIBUTE_SUBDIRS := $(DISTRIBUTE_DIR)/bin $(DISTRIBUTE_DIR)/lib
DIST_ALIASES := dist
ifneq ($(strip $(DISTRIBUTE_DIR)),distribute)DIST_ALIASES += distribute
endifALL_BUILD_DIRS := $(sort $(BUILD_DIR) $(addprefix $(BUILD_DIR)/, $(SRC_DIRS)) \$(addprefix $(BUILD_DIR)/cuda/, $(SRC_DIRS)) \$(LIB_BUILD_DIR) $(TEST_BIN_DIR) $(PY_PROTO_BUILD_DIR) $(LINT_OUTPUT_DIR) \$(DISTRIBUTE_SUBDIRS) $(PROTO_BUILD_INCLUDE_DIR))##############################
# Set directory for Doxygen-generated documentation
##############################
DOXYGEN_CONFIG_FILE ?= ./.Doxyfile
# should be the same as OUTPUT_DIRECTORY in the .Doxyfile
DOXYGEN_OUTPUT_DIR ?= ./doxygen
DOXYGEN_COMMAND ?= doxygen
# All the files that might have Doxygen documentation.
DOXYGEN_SOURCES := $(shell find \src/$(PROJECT) \include/$(PROJECT) \python/ \matlab/ \examples \tools \-name "*.cpp" -or -name "*.hpp" -or -name "*.cu" -or -name "*.cuh" -or \-name "*.py" -or -name "*.m")
DOXYGEN_SOURCES += $(DOXYGEN_CONFIG_FILE)##############################
# Configure build
############################### Determine platform
UNAME := $(shell uname -s)
ifeq ($(UNAME), Linux)LINUX := 1
else ifeq ($(UNAME), Darwin)OSX := 1OSX_MAJOR_VERSION := $(shell sw_vers -productVersion | cut -f 1 -d .)OSX_MINOR_VERSION := $(shell sw_vers -productVersion | cut -f 2 -d .)
endif# Linux
ifeq ($(LINUX), 1)CXX ?= /usr/bin/g++GCCVERSION := $(shell $(CXX) -dumpversion | cut -f1,2 -d.)# older versions of gcc are too dumb to build boost with -Wuninitalizedifeq ($(shell echo | awk '{exit $(GCCVERSION) < 4.6;}'), 1)WARNINGS += -Wno-uninitializedendif# boost::thread is reasonably called boost_thread (compare OS X)# We will also explicitly add stdc++ to the link target.LIBRARIES += boost_thread stdc++VERSIONFLAGS += -Wl,-soname,$(DYNAMIC_VERSIONED_NAME_SHORT) -Wl,-rpath,$(ORIGIN)/../lib
endif# OS X:
# clang++ instead of g++
# libstdc++ for NVCC compatibility on OS X >= 10.9 with CUDA < 7.0
ifeq ($(OSX), 1)CXX := /usr/bin/clang++ifneq ($(CPU_ONLY), 1)CUDA_VERSION := $(shell $(CUDA_DIR)/bin/nvcc -V | grep -o 'release [0-9.]*' | tr -d '[a-z ]')ifeq ($(shell echo | awk '{exit $(CUDA_VERSION) < 7.0;}'), 1)CXXFLAGS += -stdlib=libstdc++LINKFLAGS += -stdlib=libstdc++endif# clang throws this warning for cuda headersWARNINGS += -Wno-unneeded-internal-declaration# 10.11 strips DYLD_* env vars so link CUDA (rpath is available on 10.5+)OSX_10_OR_LATER   := $(shell [ $(OSX_MAJOR_VERSION) -ge 10 ] && echo true)OSX_10_5_OR_LATER := $(shell [ $(OSX_MINOR_VERSION) -ge 5 ] && echo true)ifeq ($(OSX_10_OR_LATER),true)ifeq ($(OSX_10_5_OR_LATER),true)LDFLAGS += -Wl,-rpath,$(CUDA_LIB_DIR)endifendifendif# gtest needs to use its own tuple to not conflict with clangCOMMON_FLAGS += -DGTEST_USE_OWN_TR1_TUPLE=1# boost::thread is called boost_thread-mt to mark multithreading on OS XLIBRARIES += boost_thread-mt# we need to explicitly ask for the rpath to be obeyedORIGIN := @loader_pathVERSIONFLAGS += -Wl,-install_name,@rpath/$(DYNAMIC_VERSIONED_NAME_SHORT) -Wl,-rpath,$(ORIGIN)/../../build/lib
elseORIGIN := \$$ORIGIN
endif# Custom compiler
ifdef CUSTOM_CXXCXX := $(CUSTOM_CXX)
endif# Static linking
ifneq (,$(findstring clang++,$(CXX)))STATIC_LINK_COMMAND := -Wl,-force_load $(STATIC_NAME)
else ifneq (,$(findstring g++,$(CXX)))STATIC_LINK_COMMAND := -Wl,--whole-archive $(STATIC_NAME) -Wl,--no-whole-archive
else# The following line must not be indented with a tab, since we are not inside a target$(error Cannot static link with the $(CXX) compiler)
endif# Debugging
ifeq ($(DEBUG), 1)COMMON_FLAGS += -DDEBUG -g -O0NVCCFLAGS += -G
elseCOMMON_FLAGS += -DNDEBUG -O2
endif# cuDNN acceleration configuration.
ifeq ($(USE_CUDNN), 1)LIBRARIES += cudnnCOMMON_FLAGS += -DUSE_CUDNN
endif# configure IO libraries
ifeq ($(USE_OPENCV), 1)COMMON_FLAGS += -DUSE_OPENCV
endif
ifeq ($(USE_LEVELDB), 1)COMMON_FLAGS += -DUSE_LEVELDB
endif
ifeq ($(USE_LMDB), 1)COMMON_FLAGS += -DUSE_LMDB
ifeq ($(ALLOW_LMDB_NOLOCK), 1)COMMON_FLAGS += -DALLOW_LMDB_NOLOCK
endif
endif# CPU-only configuration
ifeq ($(CPU_ONLY), 1)OBJS := $(PROTO_OBJS) $(CXX_OBJS)TEST_OBJS := $(TEST_CXX_OBJS)TEST_BINS := $(TEST_CXX_BINS)ALL_WARNS := $(ALL_CXX_WARNS)TEST_FILTER := --gtest_filter="-*GPU*"COMMON_FLAGS += -DCPU_ONLY
endif# Python layer support
ifeq ($(WITH_PYTHON_LAYER), 1)COMMON_FLAGS += -DWITH_PYTHON_LAYERLIBRARIES += $(PYTHON_LIBRARIES)
endif# BLAS configuration (default = ATLAS)
BLAS ?= atlas
ifeq ($(BLAS), mkl)# MKLLIBRARIES += mkl_rtCOMMON_FLAGS += -DUSE_MKLMKLROOT ?= /opt/intel/mklBLAS_INCLUDE ?= $(MKLROOT)/includeBLAS_LIB ?= $(MKLROOT)/lib $(MKLROOT)/lib/intel64
else ifeq ($(BLAS), open)# OpenBLASLIBRARIES += openblas
else# ATLASifeq ($(LINUX), 1)ifeq ($(BLAS), atlas)# Linux simply has cblas and atlasLIBRARIES += cblas atlasendifelse ifeq ($(OSX), 1)# OS X packages atlas as the vecLib frameworkLIBRARIES += cblas# 10.10 has accelerate while 10.9 has veclibXCODE_CLT_VER := $(shell pkgutil --pkg-info=com.apple.pkg.CLTools_Executables | grep 'version' | sed 's/[^0-9]*\([0-9]\).*/\1/')XCODE_CLT_GEQ_7 := $(shell [ $(XCODE_CLT_VER) -gt 6 ] && echo 1)XCODE_CLT_GEQ_6 := $(shell [ $(XCODE_CLT_VER) -gt 5 ] && echo 1)ifeq ($(XCODE_CLT_GEQ_7), 1)BLAS_INCLUDE ?= /Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/$(shell ls /Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/ | sort | tail -1)/System/Library/Frameworks/Accelerate.framework/Versions/A/Frameworks/vecLib.framework/Versions/A/Headerselse ifeq ($(XCODE_CLT_GEQ_6), 1)BLAS_INCLUDE ?= /System/Library/Frameworks/Accelerate.framework/Versions/Current/Frameworks/vecLib.framework/Headers/LDFLAGS += -framework AccelerateelseBLAS_INCLUDE ?= /System/Library/Frameworks/vecLib.framework/Versions/Current/Headers/LDFLAGS += -framework vecLibendifendif
endif
INCLUDE_DIRS += $(BLAS_INCLUDE)
LIBRARY_DIRS += $(BLAS_LIB)LIBRARY_DIRS += $(LIB_BUILD_DIR)# Automatic dependency generation (nvcc is handled separately)
CXXFLAGS += -MMD -MP# Complete build flags.
COMMON_FLAGS += $(foreach includedir,$(INCLUDE_DIRS),-isystem $(includedir))
CXXFLAGS += -pthread -fPIC $(COMMON_FLAGS) $(WARNINGS)
NVCCFLAGS += -ccbin=$(CXX) -Xcompiler -fPIC $(COMMON_FLAGS)
# mex may invoke an older gcc that is too liberal with -Wuninitalized
MATLAB_CXXFLAGS := $(CXXFLAGS) -Wno-uninitialized
LINKFLAGS += -pthread -fPIC $(COMMON_FLAGS) $(WARNINGS)USE_PKG_CONFIG ?= 0
ifeq ($(USE_PKG_CONFIG), 1)PKG_CONFIG := $(shell pkg-config opencv --libs)
elsePKG_CONFIG :=
endif
LDFLAGS += $(foreach librarydir,$(LIBRARY_DIRS),-L$(librarydir)) $(PKG_CONFIG) \$(foreach library,$(LIBRARIES),-l$(library))
PYTHON_LDFLAGS := $(LDFLAGS) $(foreach library,$(PYTHON_LIBRARIES),-l$(library))# 'superclean' target recursively* deletes all files ending with an extension
# in $(SUPERCLEAN_EXTS) below.  This may be useful if you've built older
# versions of Caffe that do not place all generated files in a location known
# to the 'clean' target.
#
# 'supercleanlist' will list the files to be deleted by make superclean.
#
# * Recursive with the exception that symbolic links are never followed, per the
# default behavior of 'find'.
SUPERCLEAN_EXTS := .so .a .o .bin .testbin .pb.cc .pb.h _pb2.py .cuo# Set the sub-targets of the 'everything' target.
EVERYTHING_TARGETS := all py$(PROJECT) test warn lint
# Only build matcaffe as part of "everything" if MATLAB_DIR is specified.
ifneq ($(MATLAB_DIR),)EVERYTHING_TARGETS += mat$(PROJECT)
endif##############################
# Define build targets
##############################
.PHONY: all lib test clean docs linecount lint lintclean tools examples $(DIST_ALIASES) \py mat py$(PROJECT) mat$(PROJECT) proto runtest \superclean supercleanlist supercleanfiles warn everythingall: lib tools exampleslib: $(STATIC_NAME) $(DYNAMIC_NAME)everything: $(EVERYTHING_TARGETS)linecount:cloc --read-lang-def=$(PROJECT).cloc \src/$(PROJECT) include/$(PROJECT) tools examples \python matlablint: $(EMPTY_LINT_REPORT)lintclean:@ $(RM) -r $(LINT_OUTPUT_DIR) $(EMPTY_LINT_REPORT) $(NONEMPTY_LINT_REPORT)docs: $(DOXYGEN_OUTPUT_DIR)@ cd ./docs ; ln -sfn ../$(DOXYGEN_OUTPUT_DIR)/html doxygen$(DOXYGEN_OUTPUT_DIR): $(DOXYGEN_CONFIG_FILE) $(DOXYGEN_SOURCES)$(DOXYGEN_COMMAND) $(DOXYGEN_CONFIG_FILE)$(EMPTY_LINT_REPORT): $(LINT_OUTPUTS) | $(BUILD_DIR)@ cat $(LINT_OUTPUTS) > $@@ if [ -s "$@" ]; then \cat $@; \mv $@ $(NONEMPTY_LINT_REPORT); \echo "Found one or more lint errors."; \exit 1; \fi; \$(RM) $(NONEMPTY_LINT_REPORT); \echo "No lint errors!";$(LINT_OUTPUTS): $(LINT_OUTPUT_DIR)/%.lint.txt : % $(LINT_SCRIPT) | $(LINT_OUTPUT_DIR)@ mkdir -p $(dir $@)@ python $(LINT_SCRIPT) $< 2>&1 \| grep -v "^Done processing " \| grep -v "^Total errors found: 0" \> $@ \|| truetest: $(TEST_ALL_BIN) $(TEST_ALL_DYNLINK_BIN) $(TEST_BINS)tools: $(TOOL_BINS) $(TOOL_BIN_LINKS)examples: $(EXAMPLE_BINS)py$(PROJECT): pypy: $(PY$(PROJECT)_SO) $(PROTO_GEN_PY)$(PY$(PROJECT)_SO): $(PY$(PROJECT)_SRC) $(PY$(PROJECT)_HXX) | $(DYNAMIC_NAME)@ echo CXX/LD -o $@ $<$(Q)$(CXX) -shared -o $@ $(PY$(PROJECT)_SRC) \-o $@ $(LINKFLAGS) -l$(LIBRARY_NAME) $(PYTHON_LDFLAGS) \-Wl,-rpath,$(ORIGIN)/../../build/libmat$(PROJECT): matmat: $(MAT$(PROJECT)_SO)$(MAT$(PROJECT)_SO): $(MAT$(PROJECT)_SRC) $(STATIC_NAME)@ if [ -z "$(MATLAB_DIR)" ]; then \echo "MATLAB_DIR must be specified in $(CONFIG_FILE)" \"to build mat$(PROJECT)."; \exit 1; \fi@ echo MEX $<$(Q)$(MATLAB_DIR)/bin/mex $(MAT$(PROJECT)_SRC) \CXX="$(CXX)" \CXXFLAGS="\$$CXXFLAGS $(MATLAB_CXXFLAGS)" \CXXLIBS="\$$CXXLIBS $(STATIC_LINK_COMMAND) $(LDFLAGS)" -output $@@ if [ -f "$(PROJECT)_.d" ]; then \mv -f $(PROJECT)_.d $(BUILD_DIR)/${MAT$(PROJECT)_SO:.$(MAT_SO_EXT)=.d}; \firuntest: $(TEST_ALL_BIN)$(TOOL_BUILD_DIR)/caffe$(TEST_ALL_BIN) $(TEST_GPUID) --gtest_shuffle $(TEST_FILTER)pytest: pycd python; python -m unittest discover -s caffe/testmattest: matcd matlab; $(MATLAB_DIR)/bin/matlab -nodisplay -r 'caffe.run_tests(), exit()'warn: $(EMPTY_WARN_REPORT)$(EMPTY_WARN_REPORT): $(ALL_WARNS) | $(BUILD_DIR)@ cat $(ALL_WARNS) > $@@ if [ -s "$@" ]; then \cat $@; \mv $@ $(NONEMPTY_WARN_REPORT); \echo "Compiler produced one or more warnings."; \exit 1; \fi; \$(RM) $(NONEMPTY_WARN_REPORT); \echo "No compiler warnings!";$(ALL_WARNS): %.o.$(WARNS_EXT) : %.o$(BUILD_DIR_LINK): $(BUILD_DIR)/.linked# Create a target ".linked" in this BUILD_DIR to tell Make that the "build" link
# is currently correct, then delete the one in the OTHER_BUILD_DIR in case it
# exists and $(DEBUG) is toggled later.
$(BUILD_DIR)/.linked:@ mkdir -p $(BUILD_DIR)@ $(RM) $(OTHER_BUILD_DIR)/.linked@ $(RM) -r $(BUILD_DIR_LINK)@ ln -s $(BUILD_DIR) $(BUILD_DIR_LINK)@ touch $@$(ALL_BUILD_DIRS): | $(BUILD_DIR_LINK)@ mkdir -p $@$(DYNAMIC_NAME): $(OBJS) | $(LIB_BUILD_DIR)@ echo LD -o $@$(Q)$(CXX) -shared -o $@ $(OBJS) $(VERSIONFLAGS) $(LINKFLAGS) $(LDFLAGS)@ cd $(BUILD_DIR)/lib; rm -f $(DYNAMIC_NAME_SHORT);   ln -s $(DYNAMIC_VERSIONED_NAME_SHORT) $(DYNAMIC_NAME_SHORT)$(STATIC_NAME): $(OBJS) | $(LIB_BUILD_DIR)@ echo AR -o $@$(Q)ar rcs $@ $(OBJS)$(BUILD_DIR)/%.o: %.cpp | $(ALL_BUILD_DIRS)@ echo CXX $<$(Q)$(CXX) $< $(CXXFLAGS) -c -o $@ 2> $@.$(WARNS_EXT) \|| (cat $@.$(WARNS_EXT); exit 1)@ cat $@.$(WARNS_EXT)$(PROTO_BUILD_DIR)/%.pb.o: $(PROTO_BUILD_DIR)/%.pb.cc $(PROTO_GEN_HEADER) \| $(PROTO_BUILD_DIR)@ echo CXX $<$(Q)$(CXX) $< $(CXXFLAGS) -c -o $@ 2> $@.$(WARNS_EXT) \|| (cat $@.$(WARNS_EXT); exit 1)@ cat $@.$(WARNS_EXT)$(BUILD_DIR)/cuda/%.o: %.cu | $(ALL_BUILD_DIRS)@ echo NVCC $<$(Q)$(CUDA_DIR)/bin/nvcc $(NVCCFLAGS) $(CUDA_ARCH) -M $< -o ${@:.o=.d} \-odir $(@D)$(Q)$(CUDA_DIR)/bin/nvcc $(NVCCFLAGS) $(CUDA_ARCH) -c $< -o $@ 2> $@.$(WARNS_EXT) \|| (cat $@.$(WARNS_EXT); exit 1)@ cat $@.$(WARNS_EXT)$(TEST_ALL_BIN): $(TEST_MAIN_SRC) $(TEST_OBJS) $(GTEST_OBJ) \| $(DYNAMIC_NAME) $(TEST_BIN_DIR)@ echo CXX/LD -o $@ $<$(Q)$(CXX) $(TEST_MAIN_SRC) $(TEST_OBJS) $(GTEST_OBJ) \-o $@ $(LINKFLAGS) $(LDFLAGS) -l$(LIBRARY_NAME) -Wl,-rpath,$(ORIGIN)/../lib$(TEST_CU_BINS): $(TEST_BIN_DIR)/%.testbin: $(TEST_CU_BUILD_DIR)/%.o \$(GTEST_OBJ) | $(DYNAMIC_NAME) $(TEST_BIN_DIR)@ echo LD $<$(Q)$(CXX) $(TEST_MAIN_SRC) $< $(GTEST_OBJ) \-o $@ $(LINKFLAGS) $(LDFLAGS) -l$(LIBRARY_NAME) -Wl,-rpath,$(ORIGIN)/../lib$(TEST_CXX_BINS): $(TEST_BIN_DIR)/%.testbin: $(TEST_CXX_BUILD_DIR)/%.o \$(GTEST_OBJ) | $(DYNAMIC_NAME) $(TEST_BIN_DIR)@ echo LD $<$(Q)$(CXX) $(TEST_MAIN_SRC) $< $(GTEST_OBJ) \-o $@ $(LINKFLAGS) $(LDFLAGS) -l$(LIBRARY_NAME) -Wl,-rpath,$(ORIGIN)/../lib# Target for extension-less symlinks to tool binaries with extension '*.bin'.
$(TOOL_BUILD_DIR)/%: $(TOOL_BUILD_DIR)/%.bin | $(TOOL_BUILD_DIR)@ $(RM) $@@ ln -s $(notdir $<) $@$(TOOL_BINS): %.bin : %.o | $(DYNAMIC_NAME)@ echo CXX/LD -o $@$(Q)$(CXX) $< -o $@ $(LINKFLAGS) -l$(LIBRARY_NAME) $(LDFLAGS) \-Wl,-rpath,$(ORIGIN)/../lib$(EXAMPLE_BINS): %.bin : %.o | $(DYNAMIC_NAME)@ echo CXX/LD -o $@$(Q)$(CXX) $< -o $@ $(LINKFLAGS) -l$(LIBRARY_NAME) $(LDFLAGS) \-Wl,-rpath,$(ORIGIN)/../../libproto: $(PROTO_GEN_CC) $(PROTO_GEN_HEADER)$(PROTO_BUILD_DIR)/%.pb.cc $(PROTO_BUILD_DIR)/%.pb.h : \$(PROTO_SRC_DIR)/%.proto | $(PROTO_BUILD_DIR)@ echo PROTOC $<$(Q)protoc --proto_path=$(PROTO_SRC_DIR) --cpp_out=$(PROTO_BUILD_DIR) $<$(PY_PROTO_BUILD_DIR)/%_pb2.py : $(PROTO_SRC_DIR)/%.proto \$(PY_PROTO_INIT) | $(PY_PROTO_BUILD_DIR)@ echo PROTOC \(python\) $<$(Q)protoc --proto_path=$(PROTO_SRC_DIR) --python_out=$(PY_PROTO_BUILD_DIR) $<$(PY_PROTO_INIT): | $(PY_PROTO_BUILD_DIR)touch $(PY_PROTO_INIT)clean:@- $(RM) -rf $(ALL_BUILD_DIRS)@- $(RM) -rf $(OTHER_BUILD_DIR)@- $(RM) -rf $(BUILD_DIR_LINK)@- $(RM) -rf $(DISTRIBUTE_DIR)@- $(RM) $(PY$(PROJECT)_SO)@- $(RM) $(MAT$(PROJECT)_SO)supercleanfiles:$(eval SUPERCLEAN_FILES := $(strip \$(foreach ext,$(SUPERCLEAN_EXTS), $(shell find . -name '*$(ext)' \-not -path './data/*'))))supercleanlist: supercleanfiles@ \if [ -z "$(SUPERCLEAN_FILES)" ]; then \echo "No generated files found."; \else \echo $(SUPERCLEAN_FILES) | tr ' ' '\n'; \fisuperclean: clean supercleanfiles@ \if [ -z "$(SUPERCLEAN_FILES)" ]; then \echo "No generated files found."; \else \echo "Deleting the following generated files:"; \echo $(SUPERCLEAN_FILES) | tr ' ' '\n'; \$(RM) $(SUPERCLEAN_FILES); \fi$(DIST_ALIASES): $(DISTRIBUTE_DIR)$(DISTRIBUTE_DIR): all py | $(DISTRIBUTE_SUBDIRS)# add protocp -r src/caffe/proto $(DISTRIBUTE_DIR)/# add includecp -r include $(DISTRIBUTE_DIR)/mkdir -p $(DISTRIBUTE_DIR)/include/caffe/protocp $(PROTO_GEN_HEADER_SRCS) $(DISTRIBUTE_DIR)/include/caffe/proto# add tool and example binariescp $(TOOL_BINS) $(DISTRIBUTE_DIR)/bincp $(EXAMPLE_BINS) $(DISTRIBUTE_DIR)/bin# add librariescp $(STATIC_NAME) $(DISTRIBUTE_DIR)/libinstall -m 644 $(DYNAMIC_NAME) $(DISTRIBUTE_DIR)/libcd $(DISTRIBUTE_DIR)/lib; rm -f $(DYNAMIC_NAME_SHORT);   ln -s $(DYNAMIC_VERSIONED_NAME_SHORT) $(DYNAMIC_NAME_SHORT)# add python - it's not the standard way, indeed...cp -r python $(DISTRIBUTE_DIR)/python-include $(DEPS)

准备数据

把数据放到/home/data (具体包含的文件夹参考官方安装说明)

 cd ..cd caffe/./data/VOC0712/create_list.sh  ./data/VOC0712/create_data.sh

训练

#if loss = Nan ,[reduce learning rate](https://zhuanlan.zhihu.com/p/25110930)
python examples/ssd/ssd_pascal.py

图片数据集上测试

python examples/ssd/score_ssd_pascal.py

利用caffe/examples/ssd_detect.ipynb文件可以用单张图片测试检测效果(注意文件内加载文件的路径,如果报错修改为绝对路径):

参考文献:

  1. http://blog.csdn.net/u013915633/article/details/52530130
  2. http://www.jianshu.com/p/4eaedaeafcb4
  3. https://zhuanlan.zhihu.com/p/25110930

SSD 训练自己的数据相关推荐

  1. 基于caffe用SSD训练自己的数据

    题记:因需要用到caffe的检测模型,之前在mxnet上用ssd训练过,故还用ssd来训练.从环境到训练,踩了不少坑,记录一下,方便以后翻阅.环境问题,因之前配过,用的docker,并未记录.当初也是 ...

  2. fddb库上使用ssd训练的人脸检测器

    fddb库上使用ssd训练的人脸检测器 如果你正在使用opencv3.3及以上的版本,而且想直接看结果而不是训练的话,可以直接跳过第一节的caffe-ssd配置 1. 下载并编译ssd git clo ...

  3. SSD训练数据集流程(学习记录)

    关于理论部分我看的是b站"霹雳吧啦Wz"的SSD理论讲解,作为入门小白表示能听懂,需要的同学可以自行观看 目录 1.训练环境 2.训练步骤 1.训练环境 我的环境是win11+an ...

  4. YOLO-v5训练自己的数据+TensorRT推理部署(2)

    YOLO-v5训练自己的数据+TensorRT推理部署(2) 代码下载地址:下载地址 YOLO v5转TensorRT模型并调用 0.pt模型转wts模型 python3 gen_wts.py # 注 ...

  5. YOLO-v5训练自己的数据+TensorRT推理部署(1)

    YOLO-v5训练自己的数据+TensorRT推理部署(1) 代码下载地址:下载地址 YOLO v5在医疗领域中消化内镜目标检测的应用 YOLO v5训练自己数据集详细教程

  6. YOLOv3: 训练自己的数据(绝对经典版本1)

    为什么80%的码农都做不了架构师?>>>    windows版本:请参考:https://github.com/AlexeyAB/darknet linux       版本:请参 ...

  7. DL之LSTM之MvP:基于TF利用LSTM基于DIY时间训练csv文件数据预测后100个数据(多值预测)状态

    DL之LSTM之MvP:基于TF利用LSTM基于DIY时间训练csv文件数据预测后100个数据(多值预测)状态 目录 数据集csv文件内容 输出结果 设计思路 训练记录全过程 数据集csv文件内容 输 ...

  8. DL之LSTM之UvP:基于TF利用LSTM基于DIY时间训练1200个数据预测后200个数据状态

    DL之LSTM之UvP:基于TF利用LSTM基于DIY时间训练1200个数据预测后200个数据状态 目录 输出结果 设计思路 训练记录全过程 输出结果 设计思路 训练记录全过程 INFO:tensor ...

  9. Dataset之图片数据增强:基于TF实现图片数据增强(原始的训练图片reshaped_image→数据增强→distorted_image(训练时直接使用))

    Dataset之图片数据增强:基于TF实现图片数据增强(原始的训练图片reshaped_image→数据增强→distorted_image(训练时直接使用)) 目录 数据增强步骤 数据增强实现代码 ...

最新文章

  1. SQL中varchar和nvarchar有什么区别?
  2. 2019春季暑期实习生正式批招聘笔试【腾讯】(回忆版)第二题
  3. ELK日志管理之——kibana部署
  4. golang unrecognized import path golang.org/x/net 完美解决方案
  5. 2017-03-01 Oracle10g的安装与配置使用
  6. (转) Hibernate检索方式概述
  7. MCU VR班會(07)記錄
  8. 选择软件测试作为你的职业,一个无经验的大学毕业生,可以转行做软件测试吗?
  9. 概念模型——分析模式学习笔记
  10. 在CentOS/RHEL/Scientific Linux 6 7 上安装Telnet
  11. android 出现在默认应用程序,如何将我的应用程序设置为默认的SMS应用程序?
  12. 内置googletts并且内置中文语音包
  13. CMD命令行查询电脑硬件信息
  14. excel2007不显示文件名
  15. swiper鼠标经过切换对应轮播图
  16. ArcGIS Server 服务启动停止解决方法
  17. 征途对计算机配置要求,征途2配置要求是什么?
  18. SAP Gateway与OData用法简介
  19. ios手机如何安装charles抓包工具证书
  20. P4944 PION贪吃蛇 题解

热门文章

  1. Matlab 高光谱影像信息熵/信噪比计算
  2. DaoCloud docker加速器地址获取以及mac系统如何配置镜像加速
  3. VUE的前端开发规范文档整合
  4. 使用Obj2gltf 将Obj格式转换为glft格式
  5. https网站请求下载http的资源会被拦截
  6. 如何在2020年任意设备上刷入Nethunter
  7. JUnit 5 简介
  8. 【★】致全球第一批全帧3D游戏!
  9. ASCII(美国信息交换标准编码)表
  10. 计算机关机整人,怎么发个软件让他人电脑自动关机?顺便帮帮讲解 谢谢 整人的 ......