summarylogtreecommitdiffstats
diff options
context:
space:
mode:
authorDaniel Bermond2017-03-19 03:38:52 -0300
committerDaniel Bermond2017-03-19 03:38:52 -0300
commit148ce49ba0a5e33f4c333984f04e85583b71329c (patch)
treec401adc132ad483f43b2bb75260f241bbf3bdc07
parent9d14bfe48ff35dc7f13ae8d3f2a69f1dcacfbec7 (diff)
downloadaur-148ce49ba0a5e33f4c333984f04e85583b71329c.tar.gz
First commit after package adoption. Major rewrite.
A major rewrite was made. Most important changes: - removed 'Makefile.config' file (configuration is now made in PKGBUILD) - removed the custom 'classify.py' in order to follow upstream - removed the 250MB download during build() - removed the source tree installation in '/opt' - added cuDNN support - added NCCL support - added 'distribute' make target for easier installation - added documentation - use python3 instead of python2 note1: if you want python2 just follow PKGBUILD instructions note2: current AUR dependencies: - openblas-lapack - cudnn - nccl - python-leveldb - python-scikit-image
-rw-r--r--.SRCINFO84
-rw-r--r--Makefile.config78
-rw-r--r--PKGBUILD328
-rwxr-xr-xclassify-print-results.py170
4 files changed, 197 insertions, 463 deletions
diff --git a/.SRCINFO b/.SRCINFO
index ff83957cff19..72e972fe0330 100644
--- a/.SRCINFO
+++ b/.SRCINFO
@@ -1,56 +1,56 @@
+# Generated by mksrcinfo v8
+# Sun Mar 19 06:38:01 UTC 2017
pkgbase = caffe-git
- pkgdesc = A fast framework for deep learning built in C++ for speed with a Python 2 interface
- pkgver = rc3.r249.g7f8f9e1
+ pkgdesc = A deep learning framework made with expression, speed, and modularity in mind (git version, gpu enabled)
+ pkgver = rc5.r5.g317d162ac
pkgrel = 1
- url = https://github.com/BVLC/caffe
+ url = http://caffe.berkeleyvision.org/
arch = x86_64
- license = custom
+ license = BSD
makedepends = git
- makedepends = python2-setuptools
- makedepends = gcc-fortran
- makedepends = wget
- depends = cuda
- depends = opencv
- depends = openblas-lapack
+ makedepends = doxygen
+ makedepends = texlive-core
+ depends = boost-libs
+ depends = protobuf
depends = google-glog
depends = gflags
+ depends = hdf5
+ depends = opencv
+ depends = leveldb
depends = lmdb
- depends = cython2
- depends = ipython2
- depends = python2-pillow
- depends = python2-numpy
- depends = python2-yaml
- depends = python2-numpy
- depends = python2-scipy
- depends = python2-scikit-image
- depends = python2-scikit-learn
- depends = python2-matplotlib
- depends = python2-h5py
- depends = python2-leveldb
- depends = python2-networkx
- depends = python2-nose
- depends = python2-pandas
- depends = python2-dateutil
- depends = python2-protobuf
- depends = python2-gflags
- depends = python2-pandas
+ depends = python
depends = boost
- depends = boost-libs
- depends = bc
+ depends = cython
+ depends = python-numpy
+ depends = python-scipy
+ depends = python-matplotlib
+ depends = ipython
+ depends = python-h5py
+ depends = python-networkx
+ depends = python-nose
+ depends = python-pandas
+ depends = python-dateutil
+ depends = python-protobuf
+ depends = python-gflags
+ depends = python-yaml
+ depends = python-pillow
+ depends = python-six
+ depends = openblas-lapack
+ depends = cudnn
+ depends = nccl
+ depends = python-leveldb
+ depends = python-scikit-image
+ depends = python-pydot
provides = caffe
- provides = pycaffe
- provides = python2-pycaffe
+ provides = caffe-cpu
+ provides = caffe-cpu-git
conflicts = caffe
- conflicts = pycaffe
- conflicts = python2-pycaffe
- conflicts = pycaffe-git
- conflicts = python2-pycaffe-git
- source = git+https://github.com/BVLC/caffe.git
- source = classify-print-results.py
- source = Makefile.config
+ conflicts = caffe-cpu
+ conflicts = caffe-cpu-git
+ conflicts = caffe-dr-git
+ conflicts = caffe-mnc-dr-git
+ source = caffe-git::git+https://github.com/BVLC/caffe.git
sha256sums = SKIP
- sha256sums = c12ddbd524c1b5871cb42a8775cf17a3ef86ae8a859837a6c6c4e2c19deca3d5
- sha256sums = 35fa1150f5a5b3909e96422f1efe10d43bdd8cef6c0c5d5528c53f0bc579dd74
pkgname = caffe-git
diff --git a/Makefile.config b/Makefile.config
deleted file mode 100644
index 230d50b5f666..000000000000
--- a/Makefile.config
+++ /dev/null
@@ -1,78 +0,0 @@
-## Refer to http://caffe.berkeleyvision.org/installation.html
-# Contributions simplifying and improving our build system are welcome!
-
-# cuDNN acceleration switch (uncomment to build with cuDNN).
-# USE_CUDNN := 1
-
-# CPU-only switch (uncomment to build without GPU support).
-# CPU_ONLY := 1
-
-# To customize your choice of compiler, uncomment and set the following.
-# N.B. the default for Linux is g++ and the default for OSX is clang++
-# CUSTOM_CXX := g++
-
-# CUDA directory contains bin/ and lib/ directories that we need.
-CUDA_DIR := /opt/cuda
-# On Ubuntu 14.04, if cuda tools are installed via
-# "sudo apt-get install nvidia-cuda-toolkit" then use this instead:
-# CUDA_DIR := /usr
-
-# CUDA architecture setting: going with all of them.
-# For CUDA < 6.0, comment the *_50 lines for compatibility.
-CUDA_ARCH := -gencode arch=compute_20,code=sm_20 \
- -gencode arch=compute_20,code=sm_21 \
- -gencode arch=compute_30,code=sm_30 \
- -gencode arch=compute_35,code=sm_35 \
- -gencode arch=compute_50,code=sm_50 \
- -gencode arch=compute_50,code=compute_50
-
-# BLAS choice:
-# atlas for ATLAS (default)
-# mkl for MKL
-# open for OpenBlas
-BLAS := open
-# Custom (MKL/ATLAS/OpenBLAS) include and lib directories.
-# Leave commented to accept the defaults for your choice of BLAS
-# (which should work)!
-# BLAS_INCLUDE := /path/to/your/blas
-# BLAS_LIB := /path/to/your/blas
-
-# This is required only if you will compile the matlab interface.
-# MATLAB directory should contain the mex binary in /bin.
-# MATLAB_DIR := /usr/local
-# MATLAB_DIR := /Applications/MATLAB_R2012b.app
-
-# NOTE: this is required only if you will compile the python interface.
-# We need to be able to find Python.h and numpy/arrayobject.h.
-PYTHON_INCLUDE := /usr/include/python2.7 \
- /usr/lib/python2.7/site-packages/numpy/core/include
-# Anaconda Python distribution is quite popular. Include path:
-# PYTHON_INCLUDE := $(HOME)/anaconda/include \
- # $(HOME)/anaconda/include/python2.7 \
- # $(HOME)/anaconda/lib/python2.7/site-packages/numpy/core/include
-
-# We need to be able to find libpythonX.X.so or .dylib.
-PYTHON_LIB := /usr/lib
-# PYTHON_LIB := $(HOME)/anaconda/lib
-
-# Uncomment to support layers written in Python (will link against Python libs)
-WITH_PYTHON_LAYER := 1
-
-# Whatever else you find you need goes here.
-INCLUDE_DIRS := $(PYTHON_INCLUDE) /usr/include
-LIBRARY_DIRS := $(PYTHON_LIB) /usr/lib
-
-BUILD_DIR := build
-DISTRIBUTE_DIR := distribute
-
-# Uncomment for debugging. Does not work on OSX due to https://github.com/BVLC/caffe/issues/171
-# DEBUG := 1
-
-# The ID of the GPU that 'make runtest' will use to run unit tests.
-TEST_GPUID := 0
-
-# enable pretty build (comment to see full commands)
-Q ?= @
-
-# Indicate that OpenCV 3 is being used
-OPENCV_VERSION := 3
diff --git a/PKGBUILD b/PKGBUILD
index 8aacdb49e1a1..261fb5d51df5 100644
--- a/PKGBUILD
+++ b/PKGBUILD
@@ -1,187 +1,169 @@
-# Maintainer: Drew Noel <drewmnoel@gmail.com>
+# Maintainer : Daniel Bermond < yahoo-com: danielbermond >
+# Contributor: Drew Noel <drewmnoel@gmail.com>
+# Contributor: Jonathan Yantis
pkgname=caffe-git
-pkgver=rc3.r249.g7f8f9e1
+pkgver=rc5.r5.g317d162ac
pkgrel=1
-pkgdesc='A fast framework for deep learning built in C++ for speed with a Python 2 interface'
-arch=(x86_64)
-url='https://github.com/BVLC/caffe'
-license=('custom')
-
-# if using an AWS EC2 make sure to use the community repo for cuda and not the ec2 repo.
-depends=('cuda'
- 'opencv'
- 'openblas-lapack'
- 'google-glog'
- 'gflags'
- 'lmdb'
- 'cython2'
- 'ipython2'
- 'python2-pillow'
- 'python2-numpy'
- 'python2-yaml'
- 'python2-numpy'
- 'python2-scipy'
- 'python2-scikit-image'
- 'python2-scikit-learn'
- 'python2-matplotlib'
- 'python2-h5py'
- 'python2-leveldb'
- 'python2-networkx'
- 'python2-nose'
- 'python2-pandas'
- 'python2-dateutil'
- 'python2-protobuf'
- 'python2-gflags'
- 'python2-pandas'
- 'boost'
- 'boost-libs'
- 'bc'
- )
-
-source=('git+https://github.com/BVLC/caffe.git'
- 'classify-print-results.py'
- 'Makefile.config')
-makedepends=('git' 'python2-setuptools' 'gcc-fortran' 'wget')
-provides=('caffe' 'pycaffe' 'python2-pycaffe' )
-conflicts=('caffe' 'pycaffe' 'python2-pycaffe' 'pycaffe-git' 'python2-pycaffe-git')
-sha256sums=('SKIP'
- 'c12ddbd524c1b5871cb42a8775cf17a3ef86ae8a859837a6c6c4e2c19deca3d5'
- '35fa1150f5a5b3909e96422f1efe10d43bdd8cef6c0c5d5528c53f0bc579dd74')
+pkgdesc="A deep learning framework made with expression, speed, and modularity in mind (git version, gpu enabled)"
+arch=('x86_64')
+url="http://caffe.berkeleyvision.org/"
+license=('BSD')
+depends=( # binary repositories:
+ 'boost-libs' 'protobuf' 'google-glog' 'gflags' 'hdf5' 'opencv' 'leveldb'
+ 'lmdb' 'python' 'boost' 'cython' 'python-numpy' 'python-scipy'
+ 'python-matplotlib' 'ipython' 'python-h5py' 'python-networkx' 'python-nose'
+ 'python-pandas' 'python-dateutil' 'python-protobuf' 'python-gflags'
+ 'python-yaml' 'python-pillow' 'python-six'
+ # AUR:
+ 'openblas-lapack' 'cudnn' 'nccl' 'python-leveldb' 'python-scikit-image'
+ 'python-pydot')
+makedepends=('git' 'doxygen' 'texlive-core')
+provides=('caffe' 'caffe-cpu' 'caffe-cpu-git')
+conflicts=('caffe' 'caffe-cpu' 'caffe-cpu-git' 'caffe-dr-git' 'caffe-mnc-dr-git')
+source=("${pkgname}"::"git+https://github.com/BVLC/caffe.git")
+sha256sums=('SKIP')
+
+prepare() {
+ cd "${srcdir}/${pkgname}"
+
+ # prepare to configure options in Makefile.config
+ cp -f Makefile.config.example Makefile.config
+
+ # enable cuDNN acceleration switch
+ sed -i '/USE_CUDNN/s/^#[[:space:]]//g' Makefile.config
+
+ # enable NCCL acceleration switch
+ sed -i '/USE_NCCL/s/^#[[:space:]]//g' Makefile.config
+
+ # strictly enable I/O dependencies
+ sed -i '/USE_OPENCV/s/^#[[:space:]]//;/USE_OPENCV/s/0/1/' Makefile.config
+ sed -i '/USE_LEVELDB/s/^#[[:space:]]//;/USE_LEVELDB/s/0/1/' Makefile.config
+ sed -i '/USE_LMDB/s/^#[[:space:]]//;/USE_LMDB/s/0/1/' Makefile.config
+ sed -i '/OPENCV_VERSION/s/^#[[:space:]]//g' Makefile.config
+
+ # use gcc5 (gcc6 do not work)
+ sed -i '/CUSTOM_CXX/s/^#[[:space:]]//;/CUSTOM_CXX/s/$/-5/' Makefile.config
+
+ # set CUDA directory
+ sed -i '/CUDA_DIR/s/\/usr\/local\/cuda/\/opt\/cuda/' Makefile.config
+
+ # set OpenBLAS as the BLAS provider and adjust its directories
+ sed -i '/BLAS[[:space:]]\:=[[:space:]]atlas/s/atlas/open/' Makefile.config
+ sed -i 's/.*BLAS_INCLUDE[[:space:]]\:=[[:space:]]\/path.*/BLAS_INCLUDE := \/usr\/include/' Makefile.config
+ sed -i 's/.*BLAS_LIB[[:space:]]\:=[[:space:]]\/path.*/BLAS_LIB := \/usr\/lib/' Makefile.config
+
+ # python3 settings
+ _py2inc_line="$(sed -n '/PYTHON_INCLUDE[[:space:]]\:=[[:space:]]\/usr\/include\/python2\.7/=' Makefile.config)"
+ _py3inc_line="$(sed -n '/PYTHON_INCLUDE[[:space:]]\:=[[:space:]]\/usr\/include\/python3\.5m/=' Makefile.config)"
+ _py3libs_line="$(sed -n '/PYTHON_LIBRARIES/=' Makefile.config)"
+ sed -i "$((_py2inc_line))s/^/# /" Makefile.config # comment python2 lines
+ sed -i "$((_py2inc_line+1))s/^/#/" Makefile.config
+ sed -i "$((_py3inc_line))s/^#[[:space:]]//" Makefile.config # uncomment python3 PYTHON_INCLUDE lines
+ sed -i "$((_py3inc_line+1))s/^#//" Makefile.config
+ sed -i "$((_py3libs_line))s/^#[[:space:]]//" Makefile.config # uncomment PYTHON_LIBRARIES line
+ sed -i "$((_py3libs_line))s/5/6/" Makefile.config # change version in PYTHON_LIBRARIES
+ sed -i "$((_py3inc_line))s/5/6/" Makefile.config # change version in python3 PYTHON_INCLUDE
+ sed -i "$((_py3inc_line+1))s/5/6/;$((_py3inc_line+1))s/dist/site/" Makefile.config
+
+ # use python layers
+ sed -i '/WITH_PYTHON_LAYER/s/^#[[:space:]]//g' Makefile.config
+
+ # python2 settings
+ # if you want to use python2 _instead_ of python3:
+ # - uncomment this block
+ # - comment the python3 block
+ # - change python3 depends and optdepends to python2
+ # - NOTE: do not enable both python2 and python3 blocks. choose only one.
+ # - NOTE: python2 is the Caffe default but this package uses python3 by default
+ #_py2inc_line="$(sed -n '/PYTHON_INCLUDE[[:space:]]\:=[[:space:]]\/usr\/include\/python2\.7/=' Makefile.config)"
+ #sed -i "$((_py2inc_line+1))s/dist/site/" Makefile.config
+}
pkgver() {
- cd $srcdir/caffe
- set -o pipefail
- git describe --long --tags | sed 's/\([^-]*-g\)/r\1/;s/-/./g'
+ cd "${srcdir}/${pkgname}"
+
+ # git, tags available
+ git describe --long --tags | sed 's/\([^-]*-g\)/r\1/;s/-/./g'
}
build() {
- # You can modify this file and do some stuff like turn off using the GPU etc
- cp Makefile.config $srcdir/caffe
-
- # Modified classify.py for testing that will output results
- cp classify-print-results.py $srcdir/caffe/python/
-
- cd $srcdir/caffe
-
- # Patch any #!/usr/bin/python to #!/usr/bin/python2
- for file in $(find . -name '*.py' -print); do
- sed -r -i 's_^#!.*/usr/bin/python(\s|$)_#!/usr/bin/python2_' $file
- sed -r -i 's_^#!.*/usr/bin/env(\s)*python(\s|$)_#!/usr/bin/env python2_' $file
- done
- # Do the same for python examples
- for file in $(find . -name '*.py.example' -print); do
- sed -r -i 's_^#!.*/usr/bin/python(\s|$)_#!/usr/bin/python2_' $file
- sed -r -i 's_^#!.*/usr/bin/env(\s)*python(\s|$)_#!/usr/bin/env python2_' $file
- done
-
- # If the user has colormake installed then use that instead of make.
- if hash colormake 2>/dev/null; then
- colormake all
- colormake pycaffe
- else
+ cd "${srcdir}/${pkgname}"
+ msg2 "Building target 'all'..."
make all
+ msg2 "Building target 'pycaffe'..."
make pycaffe
- fi
-
- msg "Downloading the ImageNet Caffe model and labels"
- python2 scripts/download_model_binary.py models/bvlc_reference_caffenet # 232 MB
- sh data/ilsvrc12/get_ilsvrc_aux.sh # 17 MB
-
- msg "Downloading the mnist data"
- sh data/mnist/get_mnist.sh #10 MB
+ rm -rf doxygen
+ msg2 "Building target 'docs'..."
+ make docs
+ msg2 "Building target 'distribute'..."
+ make distribute
}
-# check() {
-# cd caffe
-
-# # Unrem these next two lines to run the 838 tests
-# # make test
-# # make runtest
-
-# # A simple test to make sure its working (Attempt to classify a picture of a cat)
-# # Expected result: [('tabby', '0.27933'), ('tiger cat', '0.21915'), ('Egyptian cat', '0.16064'), ('lynx', '0.12844'), ('kit fox', '0.05155')]
-# python2 python/classify-print-results.py --print_results examples/images/cat.jpg foo
-# msg "Tested that everything works.. you should see some cat type classifiations above this message"
-# }
-
package() {
- cd $srcdir/caffe
-
- # Setup Python by hand since no setup.py
- mkdir -p $pkgdir/usr/lib/python2.7/site-packages/caffe/
- cp -R python/caffe/* $pkgdir/usr/lib/python2.7/site-packages/caffe/
-
- # Add missing __init__.py file to ensure that the modules are detected.
- find "$pkgdir/usr/lib/python2.7/site-packages/caffe" -type d -exec touch '{}'/__init__.py \;
-
- # Still leaving a copy of the python code in the main caffe directory since it might be useful for some
- # Though because of that namcap will give this error:
- # caffe-git E: ELF file ('opt/caffe/python/caffe/_caffe.so') outside of a valid path.
-
- # Install shared libraries
- mkdir -p $pkgdir/usr/lib/
- install -Dm644 build/lib/* "${pkgdir}/usr/lib/"
-
- ### Install all the execulables ###
- mkdir -p $pkgdir/usr/bin/
-
- # Primary executable
- install -D -m755 build/tools/caffe.bin "$pkgdir/usr/bin/caffe"
-
- # Conversion executables
- install -D -m755 build/examples/cifar10/convert_cifar_data.bin "$pkgdir/usr/bin/convert_cifar_data"
- install -D -m755 build/examples/mnist/convert_mnist_data.bin "$pkgdir/usr/bin/convert_mnist_data"
- install -D -m755 build/examples/siamese/convert_mnist_siamese_data.bin "$pkgdir/usr/bin/convert_mnist_siamese_data"
-
- # Depreciated executables. All in caffe executable now but included here for backwards compatiblity
- install -D -m755 build/tools/finetune_net.bin "$pkgdir/usr/bin/finetune_net"
- install -D -m755 build/tools/train_net.bin "$pkgdir/usr/bin/train_net"
- install -D -m755 build/tools/device_query.bin "$pkgdir/usr/bin/device_query"
- install -D -m755 build/tools/net_speed_benchmark.bin "$pkgdir/usr/bin/net_speed_benchmark"
- install -D -m755 build/tools/compute_image_mean.bin "$pkgdir/usr/bin/compute_image_mean"
- install -D -m755 build/tools/convert_imageset.bin "$pkgdir/usr/bin/convert_imageset"
- install -D -m755 build/tools/test_net.bin "$pkgdir/usr/bin/test_net"
- install -D -m755 build/tools/upgrade_net_proto_text.bin "$pkgdir/usr/bin/upgrade_net_proto_text"
- # install -D -m755 build/tools/dump_network.bin "$pkgdir/usr/bin/dump_network"
- install -D -m755 build/tools/extract_features.bin "$pkgdir/usr/bin/extract_features"
-
- # Make main target dir
- mkdir -p $pkgdir/opt/caffe
-
- # Copy all source files over
- cp .Doxyfile $pkgdir/opt/caffe/
- cp .travis.yml $pkgdir/opt/caffe/
- cp CMakeLists.txt $pkgdir/opt/caffe/
- cp CONTRIBUTING.md $pkgdir/opt/caffe/
- cp CONTRIBUTORS.md $pkgdir/opt/caffe/
- cp INSTALL.md $pkgdir/opt/caffe/
- cp LICENSE $pkgdir/opt/caffe/
- cp Makefile $pkgdir/opt/caffe/
- cp README.md $pkgdir/opt/caffe/
- cp caffe.cloc $pkgdir/opt/caffe/
- cp -r cmake $pkgdir/opt/caffe/
- cp -r data $pkgdir/opt/caffe/
- cp -r distribute $pkgdir/opt/caffe/
- cp -r docker $pkgdir/opt/caffe/
- cp -r docs $pkgdir/opt/caffe/
- cp -r examples $pkgdir/opt/caffe/
- cp -r include $pkgdir/opt/caffe/
- cp -r matlab $pkgdir/opt/caffe/
- cp -r models $pkgdir/opt/caffe/
- cp -r python $pkgdir/opt/caffe/
- cp -r scripts $pkgdir/opt/caffe/
- cp -r src $pkgdir/opt/caffe/
- cp -r tools $pkgdir/opt/caffe/
-
- # Remove residual git files
- find $pkgdir/opt/caffe/ -name .gitignore -delete
-
- # Install BSD2 License (not in common licenses so lets make it custom)
- install -D -m644 LICENSE "${pkgdir}/usr/share/licenses/${pkgname}/LICENSE"
-
- # Install Documentation
- install -D -m644 README.md "${pkgdir}/usr/share/doc/${pkgname}/README.md"
+ # directories creation
+ mkdir -p "${pkgdir}/usr/bin"
+ mkdir -p "${pkgdir}/usr/include/caffe/"{layers,proto,test,util}
+ mkdir -p "${pkgdir}/usr/lib/python3.6/site-packages/caffe/"{imagenet,proto,test}
+ mkdir -p "${pkgdir}/usr/share/"{caffe,doc/"${pkgname}"/search,licenses/"${pkgname}"}
+
+ cd "${pkgname}/distribute"
+
+ # binaries
+ cd bin
+ install -D -m755 * "${pkgdir}/usr/bin"
+
+ # libraries
+ cd ../lib
+ install -D -m755 *.so "${pkgdir}/usr/lib"
+
+ # includes
+ cd ../include/caffe
+ install -D -m644 *.hpp "${pkgdir}/usr/include/caffe"
+ for _dir in layers proto test util
+ do
+ cd "${srcdir}/${pkgname}/distribute/include/caffe/${_dir}"
+ install -D -m644 * "${pkgdir}/usr/include/caffe/${_dir}"
+ done
+
+ # python
+ cd ../../../python
+ install -D -m755 *.py "${pkgdir}/usr/bin"
+ rm -rf python # remove duplicated 'python' folder
+
+ cd caffe
+ for _file in *
+ do
+ [ -d "$_file" ] && continue # skip directories
+ _mode="$(stat --format '%a' "$_file")"
+ install -D -m"$_mode" "$_file" "${pkgdir}/usr/lib/python3.6/site-packages/caffe"
+ done
+
+ for _dir in imagenet proto test
+ do
+ cd "${srcdir}/${pkgname}/distribute/python/caffe/$_dir"
+ for _file in *
+ do
+ _mode="$(stat --format '%a' "$_file")"
+ install -D -m"$_mode" "$_file" "${pkgdir}/usr/lib/python3.6/site-packages/caffe/${_dir}"
+ done
+ done
+
+ # proto
+ cd ../../../proto
+ install -D -m644 * "${pkgdir}/usr/share/caffe"
+
+ # docs
+ cd ../../doxygen/html
+ for _file in *
+ do
+ [ -d "$_file" ] && continue # skip directories
+ install -D -m644 "$_file" "${pkgdir}/usr/share/doc/${pkgname}"
+ done
+ cd search
+ install -D -m644 * "${pkgdir}/usr/share/doc/${pkgname}/search"
+
+ # license
+ cd "${srcdir}/${pkgname}"
+ install -D -m644 LICENSE "${pkgdir}/usr/share/licenses/${pkgname}"
}
-
-# vim:set ts=2 sw=2 et:
diff --git a/classify-print-results.py b/classify-print-results.py
deleted file mode 100755
index e356c94f653c..000000000000
--- a/classify-print-results.py
+++ /dev/null
@@ -1,170 +0,0 @@
-#!/usr/bin/env python2
-"""
-classify.py is an out-of-the-box image classifer callable from the command line.
-
-By default it configures and runs the Caffe reference ImageNet model.
-"""
-import numpy as np
-import os
-import sys
-import argparse
-import glob
-import time
-import pandas as pd
-import caffe
-
-
-def main(argv):
- pycaffe_dir = os.path.dirname(__file__)
-
- parser = argparse.ArgumentParser()
- # Required arguments: input and output files.
- parser.add_argument(
- "input_file",
- help="Input image, directory, or npy."
- )
- parser.add_argument(
- "output_file",
- help="Output npy filename."
- )
- # Optional arguments.
- parser.add_argument(
- "--model_def",
- default=os.path.join(pycaffe_dir,
- "../models/bvlc_reference_caffenet/deploy.prototxt"),
- help="Model definition file."
- )
- parser.add_argument(
- "--pretrained_model",
- default=os.path.join(
- pycaffe_dir,
- "../models/bvlc_reference_caffenet/bvlc_reference_caffenet.caffemodel"),
- help="Trained model weights file.")
-
- # parser.add_argument(
- # "--gpu",
- # action='store_true',
- # help="Switch for gpu computation."
- # )
-
- parser.add_argument(
- "--center_only",
- action='store_true',
- help="Switch for prediction from center crop alone instead of " +
- "averaging predictions across crops (default)."
- )
- parser.add_argument(
- "--images_dim",
- default='256,256',
- help="Canonical 'height,width' dimensions of input images."
- )
- parser.add_argument(
- "--mean_file",
- default=os.path.join(pycaffe_dir,
- 'caffe/imagenet/ilsvrc_2012_mean.npy'),
- help="Data set image mean of H x W x K dimensions (numpy array). " +
- "Set to '' for no mean subtraction."
- )
- parser.add_argument(
- "--input_scale",
- type=float,
- help="Multiply input features by this scale to finish preprocessing."
- )
- parser.add_argument(
- "--raw_scale",
- type=float,
- default=255.0,
- help="Multiply raw input by this scale before preprocessing."
- )
- parser.add_argument(
- "--channel_swap",
- default='2,1,0',
- help="Order to permute input channels. The default converts " +
- "RGB -> BGR since BGR is the Caffe default by way of OpenCV."
- )
- parser.add_argument(
- "--ext",
- default='jpg',
- help="Image file extension to take as input when a directory " +
- "is given as the input file."
- )
- parser.add_argument(
- "--labels_file",
- default=os.path.join(pycaffe_dir,
- "../data/ilsvrc12/synset_words.txt"),
- help="Readable label definition file."
- )
- parser.add_argument(
- "--print_results",
- action='store_true',
- help="Write output text to stdout rather than serializing to a file."
- )
-
- args = parser.parse_args()
-
- image_dims = [int(s) for s in args.images_dim.split(',')]
-
- mean, channel_swap = None, None
- if args.mean_file:
- mean = np.load(args.mean_file)
- if args.channel_swap:
- channel_swap = [int(s) for s in args.channel_swap.split(',')]
-
- # Make classifier.
-
- classifier = caffe.Classifier(args.model_def, args.pretrained_model,
- image_dims=image_dims, mean=mean,
- input_scale=args.input_scale, raw_scale=args.raw_scale,
- channel_swap=channel_swap)
- # # Make classifier.
- # classifier = caffe.Classifier(args.model_def, args.pretrained_model,
- # image_dims=image_dims, gpu=args.gpu, mean=mean,
- # input_scale=args.input_scale, raw_scale=args.raw_scale,
- # channel_swap=channel_swap)
-
- # if args.gpu:
- # print 'GPU mode'
-
- # Load numpy array (.npy), directory glob (*.jpg), or image file.
- args.input_file = os.path.expanduser(args.input_file)
- if args.input_file.endswith('npy'):
- inputs = np.load(args.input_file)
- elif os.path.isdir(args.input_file):
- inputs = [caffe.io.load_image(im_f)
- for im_f in glob.glob(args.input_file + '/*.' + args.ext)]
- else:
- inputs = [caffe.io.load_image(args.input_file)]
-
- print "Classifying %d inputs." % len(inputs)
-
- # Classify.
- start = time.time()
- scores = classifier.predict(inputs, not args.center_only).flatten()
- print "Done in %.2f s." % (time.time() - start)
-
- if args.print_results:
- with open(args.labels_file) as f:
- labels_df = pd.DataFrame([
- {
- 'synset_id': l.strip().split(' ')[0],
- 'name': ' '.join(l.strip().split(' ')[1:]).split(',')[0]
- }
- for l in f.readlines()
- ])
- labels = labels_df.sort('synset_id')['name'].values
-
- indices = (-scores).argsort()[:5]
- predictions = labels[indices]
-
- meta = [
- (p, '%.5f' % scores[i])
- for i, p in zip(indices, predictions)
- ]
-
- print meta
-
- # Save
- np.save(args.output_file, scores)
-
-if __name__ == '__main__':
- main(sys.argv)