summarylogtreecommitdiffstats
diff options
context:
space:
mode:
authorpalkeo2015-07-17 13:21:06 +0200
committerpalkeo2015-07-17 13:23:39 +0200
commit31261aa218f5d3a88e15dbf7b6d3d1af81d47997 (patch)
tree34fca4659d69d020cbc4dda55b7412f785af6082
downloadaur-31261aa218f5d3a88e15dbf7b6d3d1af81d47997.tar.gz
Initial commit.
-rw-r--r--.SRCINFO47
-rw-r--r--Makefile636
-rw-r--r--Makefile.config93
-rw-r--r--PKGBUILD95
4 files changed, 871 insertions, 0 deletions
diff --git a/.SRCINFO b/.SRCINFO
new file mode 100644
index 000000000000..7f4ed6252f7f
--- /dev/null
+++ b/.SRCINFO
@@ -0,0 +1,47 @@
+pkgbase = apollo-git
+ pkgdesc = Framework for deep learning in python build on top of Caffe
+ pkgver = r3125.c9e7761
+ pkgrel = 1
+ url = https://github.com/Russell91/apollo
+ arch = x86_64
+ license = custom
+ makedepends = git
+ makedepends = python2-setuptools
+ depends = cuda
+ depends = opencv
+ depends = openblas-lapack
+ depends = google-glog
+ depends = gflags
+ depends = liblmdb
+ depends = cython2
+ depends = ipython2
+ depends = python2-pillow
+ depends = python2-numpy
+ depends = python2-yaml
+ depends = python2-numpy
+ depends = python2-scipy
+ depends = python2-scikit-image
+ depends = python2-scikit-learn
+ depends = python2-matplotlib
+ depends = python2-h5py
+ depends = python2-leveldb-svn
+ depends = python2-networkx
+ depends = python2-nose
+ depends = python2-pandas
+ depends = python2-dateutil
+ depends = python2-protobuf
+ depends = python2-gflags
+ depends = python2-pandas
+ depends = boost
+ depends = boost-libs
+ depends = bc
+ conflicts = caffe
+ source = git+https://github.com/Russell91/apollo.git
+ source = Makefile.config
+ source = Makefile
+ sha256sums = SKIP
+ sha256sums = SKIP
+ sha256sums = SKIP
+
+pkgname = apollo-git
+
diff --git a/Makefile b/Makefile
new file mode 100644
index 000000000000..f70b41e6daa9
--- /dev/null
+++ b/Makefile
@@ -0,0 +1,636 @@
+PROJECT := caffe
+
+CONFIG_FILE := Makefile.config
+# Explicitly check for the config file, otherwise make -k will proceed anyway.
+ifeq ($(wildcard $(CONFIG_FILE)),)
+$(error $(CONFIG_FILE) not found. See $(CONFIG_FILE).example.)
+endif
+include $(CONFIG_FILE)
+
+BUILD_DIR_LINK := $(BUILD_DIR)
+ifeq ($(RELEASE_BUILD_DIR),)
+ RELEASE_BUILD_DIR := .$(BUILD_DIR)_release
+endif
+ifeq ($(DEBUG_BUILD_DIR),)
+ DEBUG_BUILD_DIR := .$(BUILD_DIR)_debug
+endif
+
+DEBUG ?= 0
+ifeq ($(DEBUG), 1)
+ BUILD_DIR := $(DEBUG_BUILD_DIR)
+ OTHER_BUILD_DIR := $(RELEASE_BUILD_DIR)
+else
+ BUILD_DIR := $(RELEASE_BUILD_DIR)
+ OTHER_BUILD_DIR := $(DEBUG_BUILD_DIR)
+endif
+
+# All of the directories containing code.
+SRC_DIRS := $(shell find * -type d -exec bash -c "find {} -maxdepth 1 \
+ \( -name '*.cpp' -o -name '*.proto' \) | grep -q ." \; -print)
+
+# The target shared library name
+LIB_BUILD_DIR := $(BUILD_DIR)/lib
+STATIC_NAME := $(LIB_BUILD_DIR)/lib$(PROJECT).a
+DYNAMIC_NAME := $(LIB_BUILD_DIR)/lib$(PROJECT).so
+
+##############################
+# Get all source files
+##############################
+# CXX_SRCS are the source files excluding the test ones.
+CXX_SRCS := $(shell find src/$(PROJECT) ! -name "test_*.cpp" -name "*.cpp")
+# CU_SRCS are the cuda source files
+CU_SRCS := $(shell find src/$(PROJECT) ! -name "test_*.cu" -name "*.cu")
+# TEST_SRCS are the test source files
+TEST_MAIN_SRC := src/$(PROJECT)/test/test_caffe_main.cpp
+TEST_SRCS := $(shell find src/$(PROJECT) -name "test_*.cpp")
+TEST_SRCS := $(filter-out $(TEST_MAIN_SRC), $(TEST_SRCS))
+TEST_CU_SRCS := $(shell find src/$(PROJECT) -name "test_*.cu")
+GTEST_SRC := src/gtest/gtest-all.cpp
+# TOOL_SRCS are the source files for the tool binaries
+TOOL_SRCS := $(shell find tools -name "*.cpp")
+# EXAMPLE_SRCS are the source files for the example binaries
+EXAMPLE_SRCS := $(shell find examples -name "*.cpp")
+# BUILD_INCLUDE_DIR contains any generated header files we want to include.
+BUILD_INCLUDE_DIR := $(BUILD_DIR)/src
+# PROTO_SRCS are the protocol buffer definitions
+PROTO_SRC_DIR := src/$(PROJECT)/proto
+PROTO_SRCS := $(wildcard $(PROTO_SRC_DIR)/*.proto)
+# PROTO_BUILD_DIR will contain the .cc and obj files generated from
+# PROTO_SRCS; PROTO_BUILD_INCLUDE_DIR will contain the .h header files
+PROTO_BUILD_DIR := $(BUILD_DIR)/$(PROTO_SRC_DIR)
+PROTO_BUILD_INCLUDE_DIR := $(BUILD_INCLUDE_DIR)/$(PROJECT)/proto
+# NONGEN_CXX_SRCS includes all source/header files except those generated
+# automatically (e.g., by proto).
+NONGEN_CXX_SRCS := $(shell find \
+ src/$(PROJECT) \
+ include/$(PROJECT) \
+ python/$(PROJECT) \
+ matlab/+$(PROJECT)/private \
+ examples \
+ tools \
+ -name "*.cpp" -or -name "*.hpp" -or -name "*.cu" -or -name "*.cuh")
+LINT_SCRIPT := scripts/cpp_lint.py
+LINT_OUTPUT_DIR := $(BUILD_DIR)/.lint
+LINT_EXT := lint.txt
+LINT_OUTPUTS := $(addsuffix .$(LINT_EXT), $(addprefix $(LINT_OUTPUT_DIR)/, $(NONGEN_CXX_SRCS)))
+EMPTY_LINT_REPORT := $(BUILD_DIR)/.$(LINT_EXT)
+NONEMPTY_LINT_REPORT := $(BUILD_DIR)/$(LINT_EXT)
+# PY$(PROJECT)_SRC is the python wrapper for $(PROJECT)
+PY$(PROJECT)_SRC := python/$(PROJECT)/_$(PROJECT).cpp
+PY$(PROJECT)_SO := python/$(PROJECT)/_$(PROJECT).so
+PY$(PROJECT)_HXX := include/$(PROJECT)/python_layer.hpp
+# MAT$(PROJECT)_SRC is the mex entrance point of matlab package for $(PROJECT)
+MAT$(PROJECT)_SRC := matlab/+$(PROJECT)/private/$(PROJECT)_.cpp
+ifneq ($(MATLAB_DIR),)
+ MAT_SO_EXT := $(shell $(MATLAB_DIR)/bin/mexext)
+endif
+MAT$(PROJECT)_SO := matlab/+$(PROJECT)/private/$(PROJECT)_.$(MAT_SO_EXT)
+
+##############################
+# Derive generated files
+##############################
+# The generated files for protocol buffers
+PROTO_GEN_HEADER_SRCS := $(addprefix $(PROTO_BUILD_DIR)/, \
+ $(notdir ${PROTO_SRCS:.proto=.pb.h}))
+PROTO_GEN_HEADER := $(addprefix $(PROTO_BUILD_INCLUDE_DIR)/, \
+ $(notdir ${PROTO_SRCS:.proto=.pb.h}))
+PROTO_GEN_CC := $(addprefix $(BUILD_DIR)/, ${PROTO_SRCS:.proto=.pb.cc})
+PY_PROTO_BUILD_DIR := python/$(PROJECT)/proto
+PY_PROTO_INIT := python/$(PROJECT)/proto/__init__.py
+PROTO_GEN_PY := $(foreach file,${PROTO_SRCS:.proto=_pb2.py}, \
+ $(PY_PROTO_BUILD_DIR)/$(notdir $(file)))
+# The objects corresponding to the source files
+# These objects will be linked into the final shared library, so we
+# exclude the tool, example, and test objects.
+CXX_OBJS := $(addprefix $(BUILD_DIR)/, ${CXX_SRCS:.cpp=.o})
+CU_OBJS := $(addprefix $(BUILD_DIR)/cuda/, ${CU_SRCS:.cu=.o})
+PROTO_OBJS := ${PROTO_GEN_CC:.cc=.o}
+OBJS := $(PROTO_OBJS) $(CXX_OBJS) $(CU_OBJS)
+# tool, example, and test objects
+TOOL_OBJS := $(addprefix $(BUILD_DIR)/, ${TOOL_SRCS:.cpp=.o})
+TOOL_BUILD_DIR := $(BUILD_DIR)/tools
+TEST_CXX_BUILD_DIR := $(BUILD_DIR)/src/$(PROJECT)/test
+TEST_CU_BUILD_DIR := $(BUILD_DIR)/cuda/src/$(PROJECT)/test
+TEST_CXX_OBJS := $(addprefix $(BUILD_DIR)/, ${TEST_SRCS:.cpp=.o})
+TEST_CU_OBJS := $(addprefix $(BUILD_DIR)/cuda/, ${TEST_CU_SRCS:.cu=.o})
+TEST_OBJS := $(TEST_CXX_OBJS) $(TEST_CU_OBJS)
+GTEST_OBJ := $(addprefix $(BUILD_DIR)/, ${GTEST_SRC:.cpp=.o})
+EXAMPLE_OBJS := $(addprefix $(BUILD_DIR)/, ${EXAMPLE_SRCS:.cpp=.o})
+# Output files for automatic dependency generation
+DEPS := ${CXX_OBJS:.o=.d} ${CU_OBJS:.o=.d} ${TEST_CXX_OBJS:.o=.d} \
+ ${TEST_CU_OBJS:.o=.d} $(BUILD_DIR)/${MAT$(PROJECT)_SO:.$(MAT_SO_EXT)=.d}
+# tool, example, and test bins
+TOOL_BINS := ${TOOL_OBJS:.o=.bin}
+EXAMPLE_BINS := ${EXAMPLE_OBJS:.o=.bin}
+# symlinks to tool bins without the ".bin" extension
+TOOL_BIN_LINKS := ${TOOL_BINS:.bin=}
+# Put the test binaries in build/test for convenience.
+TEST_BIN_DIR := $(BUILD_DIR)/test
+TEST_CU_BINS := $(addsuffix .testbin,$(addprefix $(TEST_BIN_DIR)/, \
+ $(foreach obj,$(TEST_CU_OBJS),$(basename $(notdir $(obj))))))
+TEST_CXX_BINS := $(addsuffix .testbin,$(addprefix $(TEST_BIN_DIR)/, \
+ $(foreach obj,$(TEST_CXX_OBJS),$(basename $(notdir $(obj))))))
+TEST_BINS := $(TEST_CXX_BINS) $(TEST_CU_BINS)
+# TEST_ALL_BIN is the test binary that links caffe dynamically.
+TEST_ALL_BIN := $(TEST_BIN_DIR)/test_all.testbin
+
+##############################
+# Derive compiler warning dump locations
+##############################
+WARNS_EXT := warnings.txt
+CXX_WARNS := $(addprefix $(BUILD_DIR)/, ${CXX_SRCS:.cpp=.o.$(WARNS_EXT)})
+CU_WARNS := $(addprefix $(BUILD_DIR)/cuda/, ${CU_SRCS:.cu=.o.$(WARNS_EXT)})
+TOOL_WARNS := $(addprefix $(BUILD_DIR)/, ${TOOL_SRCS:.cpp=.o.$(WARNS_EXT)})
+EXAMPLE_WARNS := $(addprefix $(BUILD_DIR)/, ${EXAMPLE_SRCS:.cpp=.o.$(WARNS_EXT)})
+TEST_WARNS := $(addprefix $(BUILD_DIR)/, ${TEST_SRCS:.cpp=.o.$(WARNS_EXT)})
+TEST_CU_WARNS := $(addprefix $(BUILD_DIR)/cuda/, ${TEST_CU_SRCS:.cu=.o.$(WARNS_EXT)})
+ALL_CXX_WARNS := $(CXX_WARNS) $(TOOL_WARNS) $(EXAMPLE_WARNS) $(TEST_WARNS)
+ALL_CU_WARNS := $(CU_WARNS) $(TEST_CU_WARNS)
+ALL_WARNS := $(ALL_CXX_WARNS) $(ALL_CU_WARNS)
+
+EMPTY_WARN_REPORT := $(BUILD_DIR)/.$(WARNS_EXT)
+NONEMPTY_WARN_REPORT := $(BUILD_DIR)/$(WARNS_EXT)
+
+##############################
+# Derive include and lib directories
+##############################
+CUDA_INCLUDE_DIR := $(CUDA_DIR)/include
+
+CUDA_LIB_DIR :=
+# add <cuda>/lib64 only if it exists
+ifneq ("$(wildcard $(CUDA_DIR)/lib64)","")
+ CUDA_LIB_DIR += $(CUDA_DIR)/lib64
+endif
+CUDA_LIB_DIR += $(CUDA_DIR)/lib
+
+INCLUDE_DIRS += $(BUILD_INCLUDE_DIR) ./src ./include
+ifneq ($(CPU_ONLY), 1)
+ INCLUDE_DIRS += $(CUDA_INCLUDE_DIR)
+ LIBRARY_DIRS += $(CUDA_LIB_DIR)
+ LIBRARIES := cudart cublas curand
+endif
+LIBRARIES += glog gflags protobuf leveldb snappy \
+ lmdb boost_system hdf5_hl hdf5 m \
+ opencv_core opencv_highgui opencv_imgproc
+PYTHON_LIBRARIES := boost_python python2.7
+WARNINGS := -Wall -Wno-sign-compare
+
+##############################
+# Set build directories
+##############################
+
+DISTRIBUTE_DIR ?= distribute
+DISTRIBUTE_SUBDIRS := $(DISTRIBUTE_DIR)/bin $(DISTRIBUTE_DIR)/lib
+DIST_ALIASES := dist
+ifneq ($(strip $(DISTRIBUTE_DIR)),distribute)
+ DIST_ALIASES += distribute
+endif
+
+ALL_BUILD_DIRS := $(sort $(BUILD_DIR) $(addprefix $(BUILD_DIR)/, $(SRC_DIRS)) \
+ $(addprefix $(BUILD_DIR)/cuda/, $(SRC_DIRS)) \
+ $(LIB_BUILD_DIR) $(TEST_BIN_DIR) $(PY_PROTO_BUILD_DIR) $(LINT_OUTPUT_DIR) \
+ $(DISTRIBUTE_SUBDIRS) $(PROTO_BUILD_INCLUDE_DIR))
+
+##############################
+# Set directory for Doxygen-generated documentation
+##############################
+DOXYGEN_CONFIG_FILE ?= ./.Doxyfile
+# should be the same as OUTPUT_DIRECTORY in the .Doxyfile
+DOXYGEN_OUTPUT_DIR ?= ./doxygen
+DOXYGEN_COMMAND ?= doxygen
+# All the files that might have Doxygen documentation.
+DOXYGEN_SOURCES := $(shell find \
+ src/$(PROJECT) \
+ include/$(PROJECT) \
+ python/ \
+ matlab/ \
+ examples \
+ tools \
+ -name "*.cpp" -or -name "*.hpp" -or -name "*.cu" -or -name "*.cuh" -or \
+ -name "*.py" -or -name "*.m")
+DOXYGEN_SOURCES += $(DOXYGEN_CONFIG_FILE)
+
+
+##############################
+# Configure build
+##############################
+
+# Determine platform
+UNAME := $(shell uname -s)
+ifeq ($(UNAME), Linux)
+ LINUX := 1
+else ifeq ($(UNAME), Darwin)
+ OSX := 1
+endif
+
+# Linux
+ifeq ($(LINUX), 1)
+ CXX ?= /usr/bin/g++
+ GCCVERSION := $(shell $(CXX) -dumpversion | cut -f1,2 -d.)
+ # older versions of gcc are too dumb to build boost with -Wuninitalized
+ ifeq ($(shell echo $(GCCVERSION) \< 4.6 | bc), 1)
+ WARNINGS += -Wno-uninitialized
+ endif
+ # boost::thread is reasonably called boost_thread (compare OS X)
+ # We will also explicitly add stdc++ to the link target.
+ LIBRARIES += boost_thread stdc++
+endif
+
+# OS X:
+# clang++ instead of g++
+# libstdc++ for NVCC compatibility on OS X >= 10.9 with CUDA < 7.0
+ifeq ($(OSX), 1)
+ CXX := /usr/bin/clang++
+ ifneq ($(CPU_ONLY), 1)
+ CUDA_VERSION := $(shell $(CUDA_DIR)/bin/nvcc -V | grep -o 'release \d' | grep -o '\d')
+ ifeq ($(shell echo $(CUDA_VERSION) \< 7.0 | bc), 1)
+ CXXFLAGS += -stdlib=libstdc++
+ LINKFLAGS += -stdlib=libstdc++
+ endif
+ # clang throws this warning for cuda headers
+ WARNINGS += -Wno-unneeded-internal-declaration
+ endif
+ # gtest needs to use its own tuple to not conflict with clang
+ COMMON_FLAGS += -DGTEST_USE_OWN_TR1_TUPLE=1
+ # boost::thread is called boost_thread-mt to mark multithreading on OS X
+ LIBRARIES += boost_thread-mt
+ # we need to explicitly ask for the rpath to be obeyed
+ DYNAMIC_FLAGS := -install_name @rpath/libcaffe.so
+ ORIGIN := @loader_path
+else
+ ORIGIN := \$$ORIGIN
+endif
+
+# Custom compiler
+ifdef CUSTOM_CXX
+ CXX := $(CUSTOM_CXX)
+endif
+
+# Static linking
+ifneq (,$(findstring clang++,$(CXX)))
+ STATIC_LINK_COMMAND := -Wl,-force_load $(STATIC_NAME)
+else ifneq (,$(findstring g++,$(CXX)))
+ STATIC_LINK_COMMAND := -Wl,--whole-archive $(STATIC_NAME) -Wl,--no-whole-archive
+else
+ # The following line must not be indented with a tab, since we are not inside a target
+ $(error Cannot static link with the $(CXX) compiler)
+endif
+
+# Debugging
+ifeq ($(DEBUG), 1)
+ COMMON_FLAGS += -DDEBUG -g -O0
+ NVCCFLAGS += -G
+else
+ COMMON_FLAGS += -DNDEBUG -O2
+endif
+
+# cuDNN acceleration configuration.
+ifeq ($(USE_CUDNN), 1)
+ LIBRARIES += cudnn
+ COMMON_FLAGS += -DUSE_CUDNN
+endif
+
+# CPU-only configuration
+ifeq ($(CPU_ONLY), 1)
+ OBJS := $(PROTO_OBJS) $(CXX_OBJS)
+ TEST_OBJS := $(TEST_CXX_OBJS)
+ TEST_BINS := $(TEST_CXX_BINS)
+ ALL_WARNS := $(ALL_CXX_WARNS)
+ TEST_FILTER := --gtest_filter="-*GPU*"
+ COMMON_FLAGS += -DCPU_ONLY
+endif
+
+# Python layer support
+ifeq ($(WITH_PYTHON_LAYER), 1)
+ COMMON_FLAGS += -DWITH_PYTHON_LAYER
+ LIBRARIES += $(PYTHON_LIBRARIES)
+endif
+
+# BLAS configuration (default = ATLAS)
+BLAS ?= atlas
+ifeq ($(BLAS), mkl)
+ # MKL
+ LIBRARIES += mkl_rt
+ COMMON_FLAGS += -DUSE_MKL
+ MKL_DIR ?= /opt/intel/mkl
+ BLAS_INCLUDE ?= $(MKL_DIR)/include
+ BLAS_LIB ?= $(MKL_DIR)/lib $(MKL_DIR)/lib/intel64
+else ifeq ($(BLAS), open)
+ # OpenBLAS
+ LIBRARIES += openblas
+else
+ # ATLAS
+ ifeq ($(LINUX), 1)
+ ifeq ($(BLAS), atlas)
+ # Linux simply has cblas and atlas
+ LIBRARIES += cblas atlas
+ endif
+ else ifeq ($(OSX), 1)
+ # OS X packages atlas as the vecLib framework
+ LIBRARIES += cblas
+ # 10.10 has accelerate while 10.9 has veclib
+ XCODE_CLT_VER := $(shell pkgutil --pkg-info=com.apple.pkg.CLTools_Executables | grep -o 'version: 6')
+ ifneq (,$(findstring version: 6,$(XCODE_CLT_VER)))
+ BLAS_INCLUDE ?= /System/Library/Frameworks/Accelerate.framework/Versions/Current/Frameworks/vecLib.framework/Headers/
+ LDFLAGS += -framework Accelerate
+ else
+ BLAS_INCLUDE ?= /System/Library/Frameworks/vecLib.framework/Versions/Current/Headers/
+ LDFLAGS += -framework vecLib
+ endif
+ endif
+endif
+INCLUDE_DIRS += $(BLAS_INCLUDE)
+LIBRARY_DIRS += $(BLAS_LIB)
+
+LIBRARY_DIRS += $(LIB_BUILD_DIR)
+
+# Automatic dependency generation (nvcc is handled separately)
+CXXFLAGS += -MMD -MP
+
+# Complete build flags.
+COMMON_FLAGS += $(foreach includedir,$(INCLUDE_DIRS),-I$(includedir))
+CXXFLAGS += -pthread -fPIC $(COMMON_FLAGS) $(WARNINGS)
+NVCCFLAGS += -ccbin=$(CXX) -Xcompiler -fPIC $(COMMON_FLAGS)
+# mex may invoke an older gcc that is too liberal with -Wuninitalized
+MATLAB_CXXFLAGS := $(CXXFLAGS) -Wno-uninitialized
+LINKFLAGS += -pthread -fPIC $(COMMON_FLAGS) $(WARNINGS)
+
+USE_PKG_CONFIG ?= 0
+ifeq ($(USE_PKG_CONFIG), 1)
+ PKG_CONFIG := $(shell pkg-config opencv --libs)
+else
+ PKG_CONFIG :=
+endif
+LDFLAGS += $(foreach librarydir,$(LIBRARY_DIRS),-L$(librarydir)) $(PKG_CONFIG) \
+ $(foreach library,$(LIBRARIES),-l$(library))
+PYTHON_LDFLAGS := $(LDFLAGS) $(foreach library,$(PYTHON_LIBRARIES),-l$(library))
+
+# 'superclean' target recursively* deletes all files ending with an extension
+# in $(SUPERCLEAN_EXTS) below. This may be useful if you've built older
+# versions of Caffe that do not place all generated files in a location known
+# to the 'clean' target.
+#
+# 'supercleanlist' will list the files to be deleted by make superclean.
+#
+# * Recursive with the exception that symbolic links are never followed, per the
+# default behavior of 'find'.
+SUPERCLEAN_EXTS := .so .a .o .bin .testbin .pb.cc .pb.h _pb2.py .cuo
+
+# Set the sub-targets of the 'everything' target.
+EVERYTHING_TARGETS := all py$(PROJECT) test warn lint
+# Only build matcaffe as part of "everything" if MATLAB_DIR is specified.
+ifneq ($(MATLAB_DIR),)
+ EVERYTHING_TARGETS += mat$(PROJECT)
+endif
+
+##############################
+# Define build targets
+##############################
+.PHONY: all test clean docs linecount lint lintclean tools examples $(DIST_ALIASES) \
+ py mat py$(PROJECT) mat$(PROJECT) proto runtest \
+ superclean supercleanlist supercleanfiles warn everything
+
+all: $(STATIC_NAME) $(DYNAMIC_NAME) tools examples
+
+everything: $(EVERYTHING_TARGETS)
+
+linecount:
+ cloc --read-lang-def=$(PROJECT).cloc \
+ src/$(PROJECT) include/$(PROJECT) tools examples \
+ python matlab
+
+lint: $(EMPTY_LINT_REPORT)
+
+lintclean:
+ @ $(RM) -r $(LINT_OUTPUT_DIR) $(EMPTY_LINT_REPORT) $(NONEMPTY_LINT_REPORT)
+
+docs: $(DOXYGEN_OUTPUT_DIR)
+ @ cd ./docs ; ln -sfn ../$(DOXYGEN_OUTPUT_DIR)/html doxygen
+
+$(DOXYGEN_OUTPUT_DIR): $(DOXYGEN_CONFIG_FILE) $(DOXYGEN_SOURCES)
+ $(DOXYGEN_COMMAND) $(DOXYGEN_CONFIG_FILE)
+
+$(EMPTY_LINT_REPORT): $(LINT_OUTPUTS) | $(BUILD_DIR)
+ @ cat $(LINT_OUTPUTS) > $@
+ @ if [ -s "$@" ]; then \
+ cat $@; \
+ mv $@ $(NONEMPTY_LINT_REPORT); \
+ echo "Found one or more lint errors."; \
+ exit 1; \
+ fi; \
+ $(RM) $(NONEMPTY_LINT_REPORT); \
+ echo "No lint errors!";
+
+$(LINT_OUTPUTS): $(LINT_OUTPUT_DIR)/%.lint.txt : % $(LINT_SCRIPT) | $(LINT_OUTPUT_DIR)
+ @ mkdir -p $(dir $@)
+ @ python $(LINT_SCRIPT) $< 2>&1 \
+ | grep -v "^Done processing " \
+ | grep -v "^Total errors found: 0" \
+ > $@ \
+ || true
+
+test: $(TEST_ALL_BIN) $(TEST_ALL_DYNLINK_BIN) $(TEST_BINS)
+
+tools: $(TOOL_BINS) $(TOOL_BIN_LINKS)
+
+examples: $(EXAMPLE_BINS)
+
+py$(PROJECT): py
+
+py: $(PY$(PROJECT)_SO) $(PROTO_GEN_PY)
+
+$(PY$(PROJECT)_SO): $(PY$(PROJECT)_SRC) $(PY$(PROJECT)_HXX) | $(DYNAMIC_NAME)
+ @ echo CXX/LD -o $@ $<
+ $(Q)$(CXX) -shared -o $@ $(PY$(PROJECT)_SRC) \
+ -o $@ $(LINKFLAGS) -l$(PROJECT) $(PYTHON_LDFLAGS) \
+ -Wl,-rpath,$(ORIGIN)/../../build/lib
+
+mat$(PROJECT): mat
+
+mat: $(MAT$(PROJECT)_SO)
+
+$(MAT$(PROJECT)_SO): $(MAT$(PROJECT)_SRC) $(STATIC_NAME)
+ @ if [ -z "$(MATLAB_DIR)" ]; then \
+ echo "MATLAB_DIR must be specified in $(CONFIG_FILE)" \
+ "to build mat$(PROJECT)."; \
+ exit 1; \
+ fi
+ @ echo MEX $<
+ $(Q)$(MATLAB_DIR)/bin/mex $(MAT$(PROJECT)_SRC) \
+ CXX="$(CXX)" \
+ CXXFLAGS="\$$CXXFLAGS $(MATLAB_CXXFLAGS)" \
+ CXXLIBS="\$$CXXLIBS $(STATIC_LINK_COMMAND) $(LDFLAGS)" -output $@
+ @ if [ -f "$(PROJECT)_.d" ]; then \
+ mv -f $(PROJECT)_.d $(BUILD_DIR)/${MAT$(PROJECT)_SO:.$(MAT_SO_EXT)=.d}; \
+ fi
+
+runtest: $(TEST_ALL_BIN)
+ $(TOOL_BUILD_DIR)/caffe
+ $(TEST_ALL_BIN) $(TEST_GPUID) --gtest_shuffle $(TEST_FILTER)
+
+pytest: py
+ cd python; python -m unittest discover -s caffe/test
+
+mattest: mat
+ cd matlab; $(MATLAB_DIR)/bin/matlab -nodisplay -r 'caffe.run_tests(), exit()'
+
+warn: $(EMPTY_WARN_REPORT)
+
+$(EMPTY_WARN_REPORT): $(ALL_WARNS) | $(BUILD_DIR)
+ @ cat $(ALL_WARNS) > $@
+ @ if [ -s "$@" ]; then \
+ cat $@; \
+ mv $@ $(NONEMPTY_WARN_REPORT); \
+ echo "Compiler produced one or more warnings."; \
+ exit 1; \
+ fi; \
+ $(RM) $(NONEMPTY_WARN_REPORT); \
+ echo "No compiler warnings!";
+
+$(ALL_WARNS): %.o.$(WARNS_EXT) : %.o
+
+$(BUILD_DIR_LINK): $(BUILD_DIR)/.linked
+
+# Create a target ".linked" in this BUILD_DIR to tell Make that the "build" link
+# is currently correct, then delete the one in the OTHER_BUILD_DIR in case it
+# exists and $(DEBUG) is toggled later.
+$(BUILD_DIR)/.linked:
+ @ mkdir -p $(BUILD_DIR)
+ @ $(RM) $(OTHER_BUILD_DIR)/.linked
+ @ $(RM) -r $(BUILD_DIR_LINK)
+ @ ln -s $(BUILD_DIR) $(BUILD_DIR_LINK)
+ @ touch $@
+
+$(ALL_BUILD_DIRS): | $(BUILD_DIR_LINK)
+ @ mkdir -p $@
+
+$(DYNAMIC_NAME): $(OBJS) | $(LIB_BUILD_DIR)
+ @ echo LD -o $@
+ $(Q)$(CXX) -shared -o $@ $(OBJS) $(LINKFLAGS) $(LDFLAGS) $(DYNAMIC_FLAGS)
+
+$(STATIC_NAME): $(OBJS) | $(LIB_BUILD_DIR)
+ @ echo AR -o $@
+ $(Q)ar rcs $@ $(OBJS)
+
+$(BUILD_DIR)/%.o: %.cpp | $(ALL_BUILD_DIRS)
+ @ echo CXX $<
+ $(Q)$(CXX) $< $(CXXFLAGS) -c -o $@ 2> $@.$(WARNS_EXT) \
+ || (cat $@.$(WARNS_EXT); exit 1)
+ @ cat $@.$(WARNS_EXT)
+
+$(PROTO_BUILD_DIR)/%.pb.o: $(PROTO_BUILD_DIR)/%.pb.cc $(PROTO_GEN_HEADER) \
+ | $(PROTO_BUILD_DIR)
+ @ echo CXX $<
+ $(Q)$(CXX) $< $(CXXFLAGS) -c -o $@ 2> $@.$(WARNS_EXT) \
+ || (cat $@.$(WARNS_EXT); exit 1)
+ @ cat $@.$(WARNS_EXT)
+
+$(BUILD_DIR)/cuda/%.o: %.cu | $(ALL_BUILD_DIRS)
+ @ echo NVCC $<
+ $(Q)$(CUDA_DIR)/bin/nvcc $(NVCCFLAGS) $(CUDA_ARCH) -M $< -o ${@:.o=.d} \
+ -odir $(@D)
+ $(Q)$(CUDA_DIR)/bin/nvcc $(NVCCFLAGS) $(CUDA_ARCH) -c $< -o $@ 2> $@.$(WARNS_EXT) \
+ || (cat $@.$(WARNS_EXT); exit 1)
+ @ cat $@.$(WARNS_EXT)
+
+$(TEST_ALL_BIN): $(TEST_MAIN_SRC) $(TEST_OBJS) $(GTEST_OBJ) \
+ | $(DYNAMIC_NAME) $(TEST_BIN_DIR)
+ @ echo CXX/LD -o $@ $<
+ $(Q)$(CXX) $(TEST_MAIN_SRC) $(TEST_OBJS) $(GTEST_OBJ) \
+ -o $@ $(LINKFLAGS) $(LDFLAGS) -l$(PROJECT) -Wl,-rpath,$(ORIGIN)/../lib
+
+$(TEST_CU_BINS): $(TEST_BIN_DIR)/%.testbin: $(TEST_CU_BUILD_DIR)/%.o \
+ $(GTEST_OBJ) | $(DYNAMIC_NAME) $(TEST_BIN_DIR)
+ @ echo LD $<
+ $(Q)$(CXX) $(TEST_MAIN_SRC) $< $(GTEST_OBJ) \
+ -o $@ $(LINKFLAGS) $(LDFLAGS) -l$(PROJECT) -Wl,-rpath,$(ORIGIN)/../lib
+
+$(TEST_CXX_BINS): $(TEST_BIN_DIR)/%.testbin: $(TEST_CXX_BUILD_DIR)/%.o \
+ $(GTEST_OBJ) | $(DYNAMIC_NAME) $(TEST_BIN_DIR)
+ @ echo LD $<
+ $(Q)$(CXX) $(TEST_MAIN_SRC) $< $(GTEST_OBJ) \
+ -o $@ $(LINKFLAGS) $(LDFLAGS) -l$(PROJECT) -Wl,-rpath,$(ORIGIN)/../lib
+
+# Target for extension-less symlinks to tool binaries with extension '*.bin'.
+$(TOOL_BUILD_DIR)/%: $(TOOL_BUILD_DIR)/%.bin | $(TOOL_BUILD_DIR)
+ @ $(RM) $@
+ @ ln -s $(abspath $<) $@
+
+$(TOOL_BINS): %.bin : %.o | $(DYNAMIC_NAME)
+ @ echo CXX/LD -o $@
+ $(Q)$(CXX) $< -o $@ $(LINKFLAGS) -l$(PROJECT) $(LDFLAGS) \
+ -Wl,-rpath,$(ORIGIN)/../lib
+
+$(EXAMPLE_BINS): %.bin : %.o | $(DYNAMIC_NAME)
+ @ echo CXX/LD -o $@
+ $(Q)$(CXX) $< -o $@ $(LINKFLAGS) -l$(PROJECT) $(LDFLAGS) \
+ -Wl,-rpath,$(ORIGIN)/../../lib
+
+proto: $(PROTO_GEN_CC) $(PROTO_GEN_HEADER)
+
+$(PROTO_BUILD_DIR)/%.pb.cc $(PROTO_BUILD_DIR)/%.pb.h : \
+ $(PROTO_SRC_DIR)/%.proto | $(PROTO_BUILD_DIR)
+ @ echo PROTOC $<
+ $(Q)protoc --proto_path=$(PROTO_SRC_DIR) --cpp_out=$(PROTO_BUILD_DIR) $<
+
+$(PY_PROTO_BUILD_DIR)/%_pb2.py : $(PROTO_SRC_DIR)/%.proto \
+ $(PY_PROTO_INIT) | $(PY_PROTO_BUILD_DIR)
+ @ echo PROTOC \(python\) $<
+ $(Q)protoc --proto_path=$(PROTO_SRC_DIR) --python_out=$(PY_PROTO_BUILD_DIR) $<
+
+$(PY_PROTO_INIT): | $(PY_PROTO_BUILD_DIR)
+ touch $(PY_PROTO_INIT)
+
+clean:
+ @- $(RM) -rf $(ALL_BUILD_DIRS)
+ @- $(RM) -rf $(OTHER_BUILD_DIR)
+ @- $(RM) -rf $(BUILD_DIR_LINK)
+ @- $(RM) -rf $(DISTRIBUTE_DIR)
+ @- $(RM) $(PY$(PROJECT)_SO)
+ @- $(RM) $(MAT$(PROJECT)_SO)
+
+supercleanfiles:
+ $(eval SUPERCLEAN_FILES := $(strip \
+ $(foreach ext,$(SUPERCLEAN_EXTS), $(shell find . -name '*$(ext)' \
+ -not -path './data/*'))))
+
+supercleanlist: supercleanfiles
+ @ \
+ if [ -z "$(SUPERCLEAN_FILES)" ]; then \
+ echo "No generated files found."; \
+ else \
+ echo $(SUPERCLEAN_FILES) | tr ' ' '\n'; \
+ fi
+
+superclean: clean supercleanfiles
+ @ \
+ if [ -z "$(SUPERCLEAN_FILES)" ]; then \
+ echo "No generated files found."; \
+ else \
+ echo "Deleting the following generated files:"; \
+ echo $(SUPERCLEAN_FILES) | tr ' ' '\n'; \
+ $(RM) $(SUPERCLEAN_FILES); \
+ fi
+
+$(DIST_ALIASES): $(DISTRIBUTE_DIR)
+
+$(DISTRIBUTE_DIR): all py | $(DISTRIBUTE_SUBDIRS)
+ # add include
+ cp -r include $(DISTRIBUTE_DIR)/
+ mkdir -p $(DISTRIBUTE_DIR)/include/caffe/proto
+ cp $(PROTO_GEN_HEADER_SRCS) $(DISTRIBUTE_DIR)/include/caffe/proto
+ # add tool and example binaries
+ cp $(TOOL_BINS) $(DISTRIBUTE_DIR)/bin
+ cp $(EXAMPLE_BINS) $(DISTRIBUTE_DIR)/bin
+ # add libraries
+ cp $(STATIC_NAME) $(DISTRIBUTE_DIR)/lib
+ cp $(DYNAMIC_NAME) $(DISTRIBUTE_DIR)/lib
+ # add python - it's not the standard way, indeed...
+ cp -r python $(DISTRIBUTE_DIR)/python
+
+-include $(DEPS)
+
+apollo: all pycaffe
+ cython apollo/_apollo.pyx --cplus
+ g++ -shared -pthread -fPIC -fwrapv -O2 -Wall -Wno-unused-function -fno-strict-aliasing `pkg-config python --libs --cflags` -o apollo/_apollo.so apollo/_apollo.cpp $(CXXFLAGS) $(PYTHON_LDFLAGS) -lcaffe
+ rm apollo/_apollo.h apollo/_apollo.cpp
diff --git a/Makefile.config b/Makefile.config
new file mode 100644
index 000000000000..da8812beec89
--- /dev/null
+++ b/Makefile.config
@@ -0,0 +1,93 @@
+## Refer to http://caffe.berkeleyvision.org/installation.html
+# Contributions simplifying and improving our build system are welcome!
+
+# cuDNN acceleration switch (uncomment to build with cuDNN).
+# USE_CUDNN := 1
+
+# CPU-only switch (uncomment to build without GPU support).
+# CPU_ONLY := 1
+
+# To customize your choice of compiler, uncomment and set the following.
+# N.B. the default for Linux is g++ and the default for OSX is clang++
+# CUSTOM_CXX := g++
+
+# CUDA directory contains bin/ and lib/ directories that we need.
+CUDA_DIR := /opt/cuda/
+# On Ubuntu 14.04, if cuda tools are installed via
+# "sudo apt-get install nvidia-cuda-toolkit" then use this instead:
+# CUDA_DIR := /usr
+
+# CUDA architecture setting: going with all of them.
+# For CUDA < 6.0, comment the *_50 lines for compatibility.
+CUDA_ARCH := -gencode arch=compute_20,code=sm_20 \
+ -gencode arch=compute_20,code=sm_21 \
+ -gencode arch=compute_30,code=sm_30 \
+ -gencode arch=compute_35,code=sm_35 \
+ -gencode arch=compute_50,code=sm_50 \
+ -gencode arch=compute_50,code=compute_50
+
+# BLAS choice:
+# atlas for ATLAS (default)
+# mkl for MKL
+# open for OpenBlas
+BLAS := open
+# Custom (MKL/ATLAS/OpenBLAS) include and lib directories.
+# Leave commented to accept the defaults for your choice of BLAS
+# (which should work)!
+# BLAS_INCLUDE := /path/to/your/blas
+# BLAS_LIB := /path/to/your/blas
+
+# Homebrew puts openblas in a directory that is not on the standard search path
+# BLAS_INCLUDE := $(shell brew --prefix openblas)/include
+# BLAS_LIB := $(shell brew --prefix openblas)/lib
+
+# This is required only if you will compile the matlab interface.
+# MATLAB directory should contain the mex binary in /bin.
+# MATLAB_DIR := /usr/local
+# MATLAB_DIR := /Applications/MATLAB_R2012b.app
+
+# NOTE: this is required only if you will compile the python interface.
+# We need to be able to find Python.h and numpy/arrayobject.h.
+PYTHON_INCLUDE := /usr/include/python2.7 \
+ /usr/lib/python2.7/dist-packages/numpy/core/include
+# Anaconda Python distribution is quite popular. Include path:
+# Verify anaconda location, sometimes it's in root.
+# ANACONDA_HOME := $(HOME)/anaconda
+# PYTHON_INCLUDE := $(ANACONDA_HOME)/include \
+ # $(ANACONDA_HOME)/include/python2.7 \
+ # $(ANACONDA_HOME)/lib/python2.7/site-packages/numpy/core/include \
+
+# We need to be able to find libpythonX.X.so or .dylib.
+PYTHON_LIB := /usr/lib
+# PYTHON_LIB := $(ANACONDA_HOME)/lib
+
+# Homebrew installs numpy in a non standard path (keg only)
+# PYTHON_INCLUDE += $(dir $(shell python -c 'import numpy.core; print(numpy.core.__file__)'))/include
+# PYTHON_LIB += $(shell brew --prefix numpy)/lib
+
+# Uncomment to support layers written in Python (will link against Python libs)
+# WITH_PYTHON_LAYER := 1
+
+# Whatever else you find you need goes here.
+INCLUDE_DIRS := $(PYTHON_INCLUDE) /usr/local/include
+LIBRARY_DIRS := $(PYTHON_LIB) /usr/local/lib /usr/lib
+
+# If Homebrew is installed at a non standard location (for example your home directory) and you use it for general dependencies
+# INCLUDE_DIRS += $(shell brew --prefix)/include
+# LIBRARY_DIRS += $(shell brew --prefix)/lib
+
+# Uncomment to use `pkg-config` to specify OpenCV library paths.
+# (Usually not necessary -- OpenCV libraries are normally installed in one of the above $LIBRARY_DIRS.)
+# USE_PKG_CONFIG := 1
+
+BUILD_DIR := build
+DISTRIBUTE_DIR := distribute
+
+# Uncomment for debugging. Does not work on OSX due to https://github.com/BVLC/caffe/issues/171
+# DEBUG := 1
+
+# The ID of the GPU that 'make runtest' will use to run unit tests.
+TEST_GPUID := 0
+
+# enable pretty build (comment to see full commands)
+Q ?= @
diff --git a/PKGBUILD b/PKGBUILD
new file mode 100644
index 000000000000..396e28eeaca7
--- /dev/null
+++ b/PKGBUILD
@@ -0,0 +1,95 @@
+# Maintainer: palkeo <contact@palkeo.com>
+
+pkgname=apollo-git
+pkgver=r3125.c9e7761
+pkgrel=1
+pkgdesc='Framework for deep learning in python build on top of Caffe'
+arch=(x86_64)
+url='https://github.com/Russell91/apollo'
+license=('custom')
+
+depends=(
+ 'cuda'
+ 'opencv'
+ 'openblas-lapack'
+ 'google-glog'
+ 'gflags'
+ 'liblmdb'
+ 'cython2'
+ 'ipython2'
+ 'python2-pillow'
+ 'python2-numpy'
+ 'python2-yaml'
+ 'python2-numpy'
+ 'python2-scipy'
+ 'python2-scikit-image'
+ 'python2-scikit-learn'
+ 'python2-matplotlib'
+ 'python2-h5py'
+ 'python2-leveldb-svn'
+ 'python2-networkx'
+ 'python2-nose'
+ 'python2-pandas'
+ 'python2-dateutil'
+ 'python2-protobuf'
+ 'python2-gflags'
+ 'python2-pandas'
+ 'boost'
+ 'boost-libs'
+ 'bc'
+)
+conflicts=('caffe')
+
+source=('git+https://github.com/Russell91/apollo.git'
+ 'Makefile.config'
+ 'Makefile')
+
+makedepends=('git' 'python2-setuptools')
+sha256sums=('SKIP'
+ 'SKIP'
+ 'SKIP')
+
+build() {
+ cp Makefile Makefile.config apollo/
+
+ cd apollo
+
+ # Patch any #!/usr/bin/python to #!/usr/bin/python2
+ for file in $(find . -name '*.py' -print); do
+ sed -r -i 's_^#!.*/usr/bin/python(\s|$)_#!/usr/bin/python2_' $file
+ sed -r -i 's_^#!.*/usr/bin/env(\s)*python(\s|$)_#!/usr/bin/env python2_' $file
+ done
+ # Do the same for python examples
+ for file in $(find . -name '*.py.example' -print); do
+ sed -r -i 's_^#!.*/usr/bin/python(\s|$)_#!/usr/bin/python2_' $file
+ sed -r -i 's_^#!.*/usr/bin/env(\s)*python(\s|$)_#!/usr/bin/env python2_' $file
+ done
+
+ # If the user has colormake installed then use that instead of make.
+ if hash colormake 2>/dev/null; then
+ colormake apollo
+ else
+ make apollo
+ fi
+
+}
+
+package() {
+ cd apollo
+
+ # We don't need anything related to git in the package
+ rm -rf .git*
+
+ # Setup Python by hand since no setup.py
+ mkdir -p $pkgdir/usr/lib/python2.7/site-packages/apollo/
+ cp python/caffe/proto/caffe_pb2.py $pkgdir/usr/lib/python2.7/site-packages/apollo/
+ cp -R apollo/* $pkgdir/usr/lib/python2.7/site-packages/apollo/
+
+ # Install shared libraries
+ mkdir -p $pkgdir/usr/lib/
+ install -Dm644 .build_release/lib/* "${pkgdir}/usr/lib/"
+
+}
+
+# vim:set ts=2 sw=2 et:
+