选用 CUDA10.0 镜像
添加 nvidia-cuda 和修改 apt 源
curl -fsSL https://mirrors.aliyun.com/nvidia-cuda/ubuntu1804/x86_64/7fa2af80.pub | apt-key add - && \
echo "deb https://mirrors.aliyun.com/nvidia-cuda/ubuntu1804/x86_64/ /" > /etc/apt/sources.list.d/cuda.list && \
bash /public/script/switch_apt_source.sh
安装 curand
apt install cuda-curand-dev-10-0
修改 conda 源
bash /public/script/switch_conda_source.sh
创建 python3.7 虚拟环境
conda create -n py37 python=3.7
conda deactivate
conda activate py37
安装依赖包
apt-get -y install libboost-dev libprotobuf-dev libgflags-dev libgoogle-glog-dev libhdf5-dev libopencv-dev protobuf-c-compiler protobuf-compiler libopenblas-dev libhdf5-dev libleveldb-dev liblmdb-dev libboost-system-dev libboost-filesystem-dev libsnappy-dev libboost-thread-dev libatlas-base-dev libboost-python-dev
添加 nvidia-machine-learning 软件源
curl -fsSL https://mirrors.cloud.tencent.com/nvidia-machine-learning/ubuntu1804/x86_64/7fa2af80.pub | apt-key add - && \
echo "deb https://mirrors.cloud.tencent.com/nvidia-machine-learning/ubuntu1804/x86_64/ /" > /etc/apt/sources.list.d/cuda.list
安装剩余依赖包
apt update
apt install libnccl2=2.6.4-1+cuda10.0 libnccl-dev=2.6.4-1+cuda10.0
apt-get install -y --no-install-recommends libboost-all-dev
pip install boost
conda install opencv
git clone caffe 仓库
git clone -b 1.0 --depth 1 https://github.com/BVLC/caffe.git
cd caffe
for req in $(cat python/requirements.txt); do pip install $req; done
cp Makefile.config.example Makefile.config
git clone -b 1.0 --depth 1 https://gitee.com/matpools/caffe.git
查找对应路径
python -c "from distutils.sysconfig import get_python_inc; print(get_python_inc())"
python -c "import distutils.sysconfig as sysconfig; print(sysconfig.get_config_var('LIBDIR'))"
(py37) root@a688d840812b:/caffe# python -c "from distutils.sysconfig import get_python_inc; print(get_python_inc())"
/root/miniconda3/envs/py37/include/python3.7m
(py37) root@a688d840812b:/caffe# python -c "import distutils.sysconfig as sysconfig; print(sysconfig.get_config_var('LIBDIR'))"
/root/miniconda3/envs/py37/lib
查找 numpy 路径
find /root/miniconda3/envs/py37/lib/ -name numpy
(py37) root@a688d840812b:/caffe# find /root/miniconda3/envs/py37/lib/ -name numpy
/root/miniconda3/envs/py37/lib/python3.7/site-packages/numpy/core/include/numpy
如果也是 cuda10 纯镜像可以直接复制下面的文件,然后保存。
## Refer to http://caffe.berkeleyvision.org/installation.html
# Contributions simplifying and improving our build system are welcome!
# cuDNN acceleration switch (uncomment to build with cuDNN).
USE_CUDNN := 1
# CPU-only switch (uncomment to build without GPU support).
# CPU_ONLY := 1
# uncomment to disable IO dependencies and corresponding data layers
# USE_OPENCV := 0
# USE_LEVELDB := 0
# USE_LMDB := 0
# uncomment to allow MDB_NOLOCK when reading LMDB files (only if necessary)
# You should not set this flag if you will be reading LMDBs with any
# possibility of simultaneous read and write
# ALLOW_LMDB_NOLOCK := 1
# Uncomment if you're using OpenCV 3
OPENCV_VERSION := 3
# To customize your choice of compiler, uncomment and set the following.
# N.B. the default for Linux is g++ and the default for OSX is clang++
# CUSTOM_CXX := g++
# CUDA directory contains bin/ and lib/ directories that we need.
CUDA_DIR := /usr/local/cuda
# On Ubuntu 14.04, if cuda tools are installed via
# "sudo apt-get install nvidia-cuda-toolkit" then use this instead:
# CUDA_DIR := /usr
# CUDA architecture setting: going with all of them.
# For CUDA < 6.0, comment the *_50 through *_61 lines for compatibility.
# For CUDA < 8.0, comment the *_60 and *_61 lines for compatibility.
CUDA_ARCH := -gencode arch=compute_30,code=sm_30 \
-gencode arch=compute_35,code=sm_35 \
-gencode arch=compute_50,code=sm_50 \
-gencode arch=compute_52,code=sm_52 \
-gencode arch=compute_60,code=sm_60 \
-gencode arch=compute_61,code=sm_61 \
-gencode arch=compute_61,code=compute_61
# BLAS choice:
# atlas for ATLAS (default)
# mkl for MKL
# open for OpenBlas
BLAS := atlas
# Custom (MKL/ATLAS/OpenBLAS) include and lib directories.
# Leave commented to accept the defaults for your choice of BLAS
# (which should work)!
# BLAS_INCLUDE := /path/to/your/blas
# BLAS_LIB := /path/to/your/blas
# Homebrew puts openblas in a directory that is not on the standard search path
# BLAS_INCLUDE := $(shell brew --prefix openblas)/include
# BLAS_LIB := $(shell brew --prefix openblas)/lib
# This is required only if you will compile the matlab interface.
# MATLAB directory should contain the mex binary in /bin.
# MATLAB_DIR := /usr/local
# MATLAB_DIR := /Applications/MATLAB_R2012b.app
# NOTE: this is required only if you will compile the python interface.
# We need to be able to find Python.h and numpy/arrayobject.h. 如果是自己弄需要改PYTHON_INCLUDE
PYTHON_INCLUDE := /root/miniconda3/envs/py37/include/python3.7m \
/root/miniconda3/envs/py37/lib/python3.7/site-packages/numpy/core/include
# /usr/include/python2.7 \
# /usr/lib/python2.7/dist-packages/numpy/core/include
# Anaconda Python distribution is quite popular. Include path:
# Verify anaconda location, sometimes it's in root.
# ANACONDA_HOME := $(HOME)/anaconda
# PYTHON_INCLUDE := $(ANACONDA_HOME)/include \
# $(ANACONDA_HOME)/include/python2.7 \
# $(ANACONDA_HOME)/lib/python2.7/site-packages/numpy/core/include
# Uncomment to use Python 3 (default is Python 2) 如果是自己弄需要改PYTHON_LIBRARIES
PYTHON_LIBRARIES := boost_python3 python3.7m
# PYTHON_INCLUDE := /usr/include/python3.5m \
# /usr/lib/python3.5/dist-packages/numpy/core/include
# We need to be able to find libpythonX.X.so or .dylib. 如果是自己弄需要改PYTHON_LIB
PYTHON_LIB := /root/miniconda3/envs/py37/lib
# PYTHON_LIB := $(ANACONDA_HOME)/lib
# Homebrew installs numpy in a non standard path (keg only)
# PYTHON_INCLUDE += $(dir $(shell python -c 'import numpy.core; print(numpy.core.__file__)'))/include
# PYTHON_LIB += $(shell brew --prefix numpy)/lib
# Uncomment to support layers written in Python (will link against Python libs)
# WITH_PYTHON_LAYER := 1
# Whatever else you find you need goes here.
INCLUDE_DIRS := $(PYTHON_INCLUDE) /usr/local/include /usr/include/hdf5/serial
LIBRARY_DIRS := $(PYTHON_LIB) /usr/local/lib /usr/lib /usr/lib/x86_64-linux-gnu/hdf5/serial /usr/lib/x86_64-linux-gnu
# If Homebrew is installed at a non standard location (for example your home directory) and you use it for general dependencies
# INCLUDE_DIRS += $(shell brew --prefix)/include
# LIBRARY_DIRS += $(shell brew --prefix)/lib
# NCCL acceleration switch (uncomment to build with NCCL)
# https://github.com/NVIDIA/nccl (last tested version: v1.2.3-1+cuda8.0)
USE_NCCL := 1
# Uncomment to use `pkg-config` to specify OpenCV library paths.
# (Usually not necessary -- OpenCV libraries are normally installed in one of the above $LIBRARY_DIRS.)
# USE_PKG_CONFIG := 1
# N.B. both build and distribute dirs are cleared on `make clean`
BUILD_DIR := build
DISTRIBUTE_DIR := distribute
# Uncomment for debugging. Does not work on OSX due to https://github.com/BVLC/caffe/issues/171
# DEBUG := 1
# The ID of the GPU that 'make runtest' will use to run unit tests.
TEST_GPUID := 0
# enable pretty build (comment to see full commands)
Q ?= @
开始编译
make clean
make all -j6
make clean
make pycaffe -j6
设置环境变量
export PYTHONPATH=/caffe/python/:$PYTHONPATH
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/root/miniconda3/envs/py37/lib
使用 ipython 环境测试
ipython
import caffe
caffe.set_mode_gpu()
caffe.__version__
使用官方 examples 测试
#!/usr/bin/env sh
# This scripts downloads the mnist data and unzips it.
DIR="$( cd "$(dirname "$0")" ; pwd -P )"
cd "$DIR"
echo "Downloading..."
for fname in train-images-idx3-ubyte train-labels-idx1-ubyte t10k-images-idx3-ubyte t10k-labels-idx1-ubyte
do
if [ ! -e $fname ]; then
wget --no-check-certificate https://storage.googleapis.com/cvdf-datasets/mnist/${fname}.gz
gunzip ${fname}.gz
fi
done
./data/mnist/get_mnist.sh
./examples/mnist/create_mnist.sh
./examples/mnist/train_lenet.sh
nvidia-smi -l 5
参考文章
https://hub.docker.com/r/floydhub/caffe/tags?page=1&ordering=last_updated
https://github.com/tensorflow/datasets/blob/master/tensorflow_datasets/url_checksums/mnist.txt
https://www.cnblogs.com/laosan007/p/11737704.html
https://blog.csdn.net/u010417185/article/details/53559107
欢迎来到这里!
我们正在构建一个小众社区,大家在这里相互信任,以平等 • 自由 • 奔放的价值观进行分享交流。最终,希望大家能够找到与自己志同道合的伙伴,共同成长。
注册 关于