diff --git a/.Doxyfile b/.Doxyfile index e33235a2..b33ffef3 100644 --- a/.Doxyfile +++ b/.Doxyfile @@ -811,7 +811,7 @@ RECURSIVE = YES # Note that relative paths are relative to the directory from which doxygen is # run. -EXCLUDE = ./scanner/main.cpp +EXCLUDE = ./scanner/main.cpp ./python/scannerpy/build # The EXCLUDE_SYMLINKS tag can be used to select whether or not files or # directories that are symbolic links (a Unix file system feature) are excluded @@ -838,7 +838,7 @@ EXCLUDE_PATTERNS = # Note that the wildcards are matched against the file with absolute path, so to # exclude all test directories use the pattern */test/* -EXCLUDE_SYMBOLS = +EXCLUDE_SYMBOLS = scanner::internal # The EXAMPLE_PATH tag can be used to specify one or more files or directories # that contain example code fragments that are included (see the \include diff --git a/.clang-format b/.clang-format index 6bbae787..52bb2edc 100644 --- a/.clang-format +++ b/.clang-format @@ -41,7 +41,7 @@ BreakStringLiterals: true ColumnLimit: 80 CommentPragmas: '^ IWYU pragma:' ConstructorInitializerAllOnOneLineOrOnePerLine: true -ConstructorInitializerIndentWidth: 4 +ConstructorInitializerIndentWidth: 2 ContinuationIndentWidth: 4 Cpp11BracedListStyle: true DerivePointerAlignment: false @@ -89,6 +89,6 @@ SpacesInCStyleCastParentheses: false SpacesInParentheses: false SpacesInSquareBrackets: false Standard: Auto -TabWidth: 8 +TabWidth: 2 UseTab: Never ... diff --git a/.gdbinit b/.gdbinit new file mode 100644 index 00000000..d70ae0b8 --- /dev/null +++ b/.gdbinit @@ -0,0 +1 @@ +handle SIG40 nostop noprint diff --git a/.gitignore b/.gitignore index 2f9a3231..b6defbde 100644 --- a/.gitignore +++ b/.gitignore @@ -2,15 +2,21 @@ # similar but **not quite** the same. If you add a file here, make sure that # the Docker build still works. +dependencies.txt +nvidia-docker-compose.yml .cache build **/*.pyc **/*.trace thirdparty +!thirdparty/resources !thirdparty/CMakeLists.txt python/*.egg-info - +docker/**/deps.sh +docker/**/thirdparty **/*.mp4 **/*.mkv +docker/*.log -python/scannerpy/include \ No newline at end of file +python/scannerpy/include +dist \ No newline at end of file diff --git a/.scanner.example.toml b/.scanner.example.toml index cad236b3..b0f153f8 100644 --- a/.scanner.example.toml +++ b/.scanner.example.toml @@ -11,4 +11,6 @@ scanner_path = "/opt/scanner" # bucket = "gcs-bucket" [network] - master = "localhost:5001" + master = "localhost" + master_port = "5001" + worker_port = "5002" diff --git a/.travis.yml b/.travis.yml index 19368d70..e34a55c7 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,22 +1,66 @@ +language: python +python: +- '3.5' sudo: required - -language: cpp - services: - docker - +addons: + apt: + packages: + - docker-ce env: global: - - DOCKER_REPO=scannerresearch/scanner - - DOCKER_EMAIL=wcrichto@cs.stanford.edu - - DOCKER_USER=wcrichto - - secure: "Jf7f8UvpJ8yRA/2nnxaRb9jGrzWZvbRVRxzQtmcx+ltBpflmLFbXB6o4sySGETn5YrJe1g4VNWvkFBZIdLY0n+v4AQ/D1eMWtJT+kZLHihVGcxB3G7SUSOyEUaN1DzdHbaRW4VCSXQJWgK48amxlwnNfXtbNkSpiqIifDURBeHK1UT9+w1FKuTDZmEXv+ubDKjXvv1DuPdTtnolItDfrdtKLh5iXgTRvgXFanmNvydIcHqfz+72qJZJRCdTwUrA8FPq7jKNndftX9y2/dKuA/Xd/cP818iR57Z2PJXeR1c6ZmQ0RBTIZ8pdwG6bfix+48a5+aWlPJ3friA0G2b8psdZ/rQMxBbEClTI+BfnuJnYaIW8JREqmOBACBL7/4eo/Jifv1+vl+VhWHSicS6mWg2khCNAMPWj3cIA/bZrRlTmNlC06GkUgnHar/HhovGrowbZCGOxN0CNWWcuCGeLYfvPgPiI4JRU8eq0VqVhhqXZyPuS/RT1FoqZ4mMfNZG+RSIKHU56g5M4elakp/bZdu9lRoS4hr9VFpvPaLorOTOXPa3J+JxmB9OIP+NLxMIGjUhYnSYfTJ9/+Opxg2lWUOdqXzPZsyPyI+4RuIyyPh1w6jN/xD3EqQC55u5JlFlJ1WuNoyhx16QcKu1Eytxd2JxU6LutMw9WgQET53Q3EOVk=" # DOCKER_PASS - - secure: "mJDNPmfRd3cyFNIhRg6TQo8ow5hOS+FC+DX6MLpM6Giv2nL1KkJD0fgnLqgiZeEKDOFHvGpGswGjYTAzNWYeocsneby88w58sNbueUE6PT74UqsbzrmvMhKh2xtEERkJFz5gKvdwc9a4nAvH5ejJW7OZ5OvSscR2vRBl4tUwTuA3czWbod6NwBC/sUrf0jTZhNgZWWWT1j2SLt379Q52Xmd5+ixAaSibctf8hCEMzE/lNsWprR1gO7H89eyLVRQc8VQh8msVtgo7QD/aapr+w96GDWWUAm6z5iNe8gLGZB9v4BtTm82XOV0iefNDfT0fwIhHrM6vqIdiiuMeYMteErT3rF55h0o8hJrP57lwT5u7hU/yxUhNTvtCdJTo8THJihXHbOT7LjxaF20SlJzjARSGZq9mYHeBgv9sGej2jF80iSn+xTAbalVFbc4hKumELnn/D7mGWRTjsGGTbTqNXvTQvxm/QjXmNusav5vKeRFwjbPGfb0iDSfhlZ7BHkR50OyskZ/R/69WjIFS9gBE2pvp4FlVpndKWkkkd4TVSgmnKCXxnQdeNS8We8NodvTBlTcvHFub8j2izKTZRU7FBXRgo9d+Gu+43nSRvjJUgwg/6D6Bou6boVHJo8ib9xzakVjojbJxehatj+dadpuVkG3tjULejPNg/qnjlJc10Pg=" # PYPI_PASS - + - BUILD_TYPE=cpu + - DOCKER_REPO=scannerresearch/scanner + - DOCKER_TEST_REPO=scannerresearch/test-builds + - DOCKER_EMAIL=wcrichto@cs.stanford.edu + - DOCKER_USER=wcrichto + - secure: DAwB02uem8/aIRNfHPknzqZCCmwDlJoQq6r3qE1CyUqkM9VMu6mQa/RBYMBwp3Upny6oI6yBlYJqqVr1rjZzI4by0j0IVhuhQPp2G5DJ/mVZ5PQSJ0bxJtCzm29hHo8hi+EgIk7r7YTcHOr3SRmplf0vM0WFuJplW5fJ6J/RbJSPPpnlTOIgp0Ry9B4eowLhi4b//TbwmtzEX6j4yKxWnDTJ6cV9xRVN1FoNd15PvpiBkOZ3dVGWtdUZm9K3Q3IumeyuemAoTSuKU3r3aqMSamdLf7pmYjpzTrYDT9CMbbEVjzr8M3Y5wFyjQsIdFKzYM/0jRBlUbKaLMhz7a/9vlkplBI954ufOqHiGAx3Cdk4jfkJ3hyucCQqYuQwHxWMmru3lV6jIZf+rYn6UBomHeNN1AIiikZ1EjvNDwY8iwGZPJbcZ42gn2mTaUVxsmwMD757AalTOzZo94+pdFJDjWY6y6kPQlZbdj8AT14bBHf8x8zF8EsZrh/WQMcRgzEU3BcjDE3RIqNgK38Sv1OzqNVTYy57PXbEMKMqfpAQx8FTcx/7NXBRrStrQ24pRZJA+wvkUefpyWZnayyMPvttzB4106GCOWAjSdJEyYQTnZMrXMRX4Qf+NHx6KHlQcExxpww/hSvewsh0JRrvMol5HKXJaZqnF6ZTqxeQdLUDdn3g= + - secure: mJDNPmfRd3cyFNIhRg6TQo8ow5hOS+FC+DX6MLpM6Giv2nL1KkJD0fgnLqgiZeEKDOFHvGpGswGjYTAzNWYeocsneby88w58sNbueUE6PT74UqsbzrmvMhKh2xtEERkJFz5gKvdwc9a4nAvH5ejJW7OZ5OvSscR2vRBl4tUwTuA3czWbod6NwBC/sUrf0jTZhNgZWWWT1j2SLt379Q52Xmd5+ixAaSibctf8hCEMzE/lNsWprR1gO7H89eyLVRQc8VQh8msVtgo7QD/aapr+w96GDWWUAm6z5iNe8gLGZB9v4BtTm82XOV0iefNDfT0fwIhHrM6vqIdiiuMeYMteErT3rF55h0o8hJrP57lwT5u7hU/yxUhNTvtCdJTo8THJihXHbOT7LjxaF20SlJzjARSGZq9mYHeBgv9sGej2jF80iSn+xTAbalVFbc4hKumELnn/D7mGWRTjsGGTbTqNXvTQvxm/QjXmNusav5vKeRFwjbPGfb0iDSfhlZ7BHkR50OyskZ/R/69WjIFS9gBE2pvp4FlVpndKWkkkd4TVSgmnKCXxnQdeNS8We8NodvTBlTcvHFub8j2izKTZRU7FBXRgo9d+Gu+43nSRvjJUgwg/6D6Bou6boVHJo8ib9xzakVjojbJxehatj+dadpuVkG3tjULejPNg/qnjlJc10Pg= + - secure: J8Em68qmB/q+u9PEyNujfUN0zbUEKVgjylBNHctL5Bo/9HgQs7yoYBxuEhoY0f95lYJYdsrJ9sOoZ7pMBgCFi9UY8+Gv4f9RfpTVYsp+lav0bc1MNI3Ct8wgEO3uCbIhUSZQFNcW1YsNv2lruyyKxQpQhBJLuS4wR5UkZ+AhjnWKL6S3916zxhwSwKkFxtYDD8rEkfBlymgd9kEqygNzencp9v2JosV90bczUF1wDaDpCLczxjKlwODZWa2milKC3kWaJ5lvee42at0ik3jLrNoCaEzO3u2tMtY3sbjXOVAXy3qROz3mCjU9mFO4Zu22XW4qfhbfFsXR3ht8BSuQ2JqIsgpdWz7gvkYHB6I08RmPgyEYO2BXu4/lVGx+60JUMAJ8teasOVldId6MJ8CzjU6Bl57yRNDZQFpexK+HprXdFtj7O4qfrcp63jPj08r2x1ZxdTMk40PLJ9x8vpIJ3Drb8xegf8I4osQuAVPgBKnZhWGqfAYL6KVr+27U+4PqnhA6VfuJoh4JjfF+5Uxz3CubEKwNYtWgOu2J7fjHCCcF6wjSr+XoyNmDoABL6yvbs65F+XoP9OP+VCgKNsyz/FksXHvmTVIggePgXd5oZzh+EottaKD19EE9FFMeenqIkQ8rDmxogt4SRW9HJIOa6ocQD0JQXKOBM4gynXBfgHw= install: -- sudo apt-get install -y doxygen graphviz python-pip -- pip install doxypypy twine - -script: ./scripts/travis-build.sh - -after_success: ./scripts/travis-publish.sh +- sudo apt-get update +- sudo apt-get install -y doxygen graphviz +- pip3 install requests[security] --upgrade +jobs: + include: + - stage: "Test build" + script: "travis_retry ./scripts/travis-build.sh" + # - stage: "Test build" + # language: generic + # services: + # addons: + # install: + # pip3 install requests[security] --upgrade + # script: "travis_retry ./scripts/travis-osx-build.sh" + # os: osx + - stage: test + script: "travis_retry ./scripts/travis-test.sh" + env: TEST_TYPE=cpp + - stage: test + script: "travis_retry ./scripts/travis-test.sh" + env: TEST_TYPE=tutorials + - stage: test + script: "travis_retry ./scripts/travis-test.sh" + env: TEST_TYPE=integration + - stage: build + script: "travis_retry ./scripts/travis-build.sh" + after_success: "travis_retry ./scripts/travis-publish.sh" + env: + - BUILD_TYPE=cpu + - stage: build + script: "travis_retry ./scripts/travis-build.sh" + env: + - BUILD_TYPE=gpu-8.0-cudnn6 + - stage: build + script: "travis_retry ./scripts/travis-build.sh" + env: + - BUILD_TYPE=gpu-8.0-cudnn7 + - stage: build + script: "travis_retry ./scripts/travis-build.sh" + env: + - BUILD_TYPE=gpu-9.0-cudnn7 + - stage: build + script: "travis_retry ./scripts/travis-build.sh" + env: + - BUILD_TYPE=gpu-9.1-cudnn7 diff --git a/.travis/travisci_rsa_brew.enc b/.travis/travisci_rsa_brew.enc new file mode 100644 index 00000000..e2c44643 Binary files /dev/null and b/.travis/travisci_rsa_brew.enc differ diff --git a/.travis/travisci_rsa_brew.pub b/.travis/travisci_rsa_brew.pub new file mode 100644 index 00000000..d37c679d --- /dev/null +++ b/.travis/travisci_rsa_brew.pub @@ -0,0 +1 @@ +ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCw2maYHWQ6cdBi9UJ2Dzv4okW/bw0pmZAKAFZvXOjZ8rRcrhgu8pK289t42Hwso9dsCROJxxVegDTZeOh8jyuQqKziBXu1MqN/3+KuRWJfw3QGwOQNOuwPkBOEn+TZpkgiyz2GMDfvy1WseWxgKzadY2C/R4lHafnmc4Dr/6k2uNST3s73CAyHlGpORItlqZ+oc/IX74qz27PsjMjZsjarCEHDb3UWlsG95D1ymMWrwXUpx6/azyuAJEiWJp/UtdfL8oJVec9vXqK9pzI6IykLdFm0y72FYXtMwBDARugDGpqTpO1xmba0kSAoMe5Ip24nbNHB6gIHL8dnAD9qrbzF apoms@AlexandersMBP2.lan diff --git a/CMakeLists.txt b/CMakeLists.txt index 487721c2..62aec31c 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -19,8 +19,6 @@ project(Scanner) ###### Config options ##### option(BUILD_CUDA "" ON) option(BUILD_TESTS "" ON) -option(BUILD_SERVER "" OFF) -option(BUILD_EXAMPLES "" ON) option(ENABLE_PROFILING "" OFF) if (BUILD_TESTS) @@ -47,9 +45,14 @@ set(GLOBAL_OUTPUT_PATH ${PROJECT_BINARY_DIR}/bin) set(THIRDPARTY_SOURCE_DIR "${CMAKE_SOURCE_DIR}/thirdparty") set(THIRDPARTY_OUTPUT_PATH "${THIRDPARTY_SOURCE_DIR}/build/bin") -if (NOT CMAKE_BUILD_TYPE) +if(APPLE) + message(STATUS "Non-debug builds fail on MacOS. Setting to debug.") + set(CMAKE_BUILD_TYPE "Debug") +elseif(UNIX) + if (NOT CMAKE_BUILD_TYPE) message(STATUS "No build type selected, defaulting to Release") set(CMAKE_BUILD_TYPE "Release") + endif() endif() if(CMAKE_BUILD_TYPE MATCHES Debug) @@ -66,101 +69,14 @@ if (ENABLE_PROFILING) add_definitions(-DSCANNER_PROFILING) endif() -###### Optional Dependencies ####### -if (BUILD_CUDA) - find_package(CUDA REQUIRED) - add_definitions(-DHAVE_CUDA) - include_directories(${CUDA_INCLUDE_DIRS}) - if(COMPILER_SUPPORTS_CXX1Y) - set(CUDA_NVCC_FLAGS "${CUDA_NVCC_FLAGS} -std=c++11") - endif() -endif() - -if (BUILD_CUDA) - add_library(scanner_halide scanner/engine/halide_context.cpp) -endif() - -###### Required Dependencies ####### -find_package(SaneProtobuf REQUIRED) -find_package(FFmpeg REQUIRED) -find_package(LibLZMA REQUIRED) -find_package(OpenSSL REQUIRED) -find_package(BZip2 REQUIRED) -find_package(Boost COMPONENTS thread program_options regex python REQUIRED) -find_package(GFlags REQUIRED) -find_package(Glog REQUIRED) -find_package(GoogleTest REQUIRED) -find_package(CURL REQUIRED) -find_package(Iconv REQUIRED) -find_package(Storehouse REQUIRED CONFIG - PATHS "thirdparty/build/bin/storehouse") -find_package(TinyToml REQUIRED) -find_package(PythonLibs 2.7 EXACT REQUIRED) - -set(GTEST_INCLUDE_DIRS - "${THIRDPARTY_OUTPUT_PATH}/googletest/include") -set(GTEST_LIBRARIES - "${THIRDPARTY_OUTPUT_PATH}/googletest/lib/libgtest.a") -set(GTEST_LIB_MAIN - "${THIRDPARTY_OUTPUT_PATH}/googletest/lib/libgtest_main.a") - -set(SCANNER_LIBRARIES - "${PROTOBUF_LIBRARY}" - "${STOREHOUSE_LIBRARIES}" - "${FFMPEG_LIBRARIES}" - "-L/opt/ffmpeg-3.2.2/lib" - "-lswscale" - "${LIBLZMA_LIBRARIES}" - "${OPENSSL_LIBRARIES}" - "${BZIP2_LIBRARIES}" - "${PROXYGEN_LIBRARIES}" - "${FOLLY_LIBRARIES}" - "${Boost_LIBRARIES}" - "${GFLAGS_LIBRARIES}" - "${GLOG_LIBRARIES}" - "${CURL_LIBRARIES}" - "${ICONV_LIBRARIES}" - "${SCANNER_LIBRARIES}" - "${PYTHON_LIBRARIES}" - "-ljpeg" - "-lz" - "-ldl" - "-lgrpc++_unsecure -lgrpc -lgpr") - -include_directories( - "." - "${CMAKE_CURRENT_BINARY_DIR}" # for protobuf generated files - "${PROTOBUF_INCLUDE_DIRS}" - "${FFMPEG_INCLUDE_DIR}" - "${TINYTOML_INCLUDE_DIR}" - "${STOREHOUSE_INCLUDE_DIRS}" - "${OPENSSL_INCLUDE_DIR}" - "${Boost_INCLUDE_DIRS}" - "${GLOG_INCLUDE_DIRS}" - "${LIBLZMA_INCLUDE_DIRS}" - "${PYTHON_INCLUDE_DIRS}") - -if (BUILD_TESTS) - include_directories("${GTEST_INCLUDE_DIRS}") -endif() - -if (BUILD_CUDA) - list(APPEND SCANNER_LIBRARIES - util_cuda - "${CUDA_LIBRARIES}" - "/usr/lib/x86_64-linux-gnu/libnvcuvid.so" - "-lcuda") -endif() - -if (APPLE) - include_directories( - "/System/Library/Frameworks/Accelerate.framework/Versions/Current/Frameworks/vecLib.framework/Versions/Current/Headers/") -elseif() -endif() +include(cmake/Dependencies.cmake) ###### Project code ####### set(PROTO_FILES scanner/metadata.proto + scanner/source_args.proto + scanner/sink_args.proto + scanner/sampler_args.proto scanner/types.proto) set(GRPC_PROTO_FILES @@ -200,6 +116,7 @@ add_library(scanner SHARED $ $ $ + scanner/util/halide_context.cpp ${PROTO_SRCS} ${GRPC_PROTO_SRCS} ${STRUCK_SOURCES} @@ -236,10 +153,6 @@ endforeach() add_subdirectory(stdlib) -if (BUILD_EXAMPLES) - add_subdirectory(examples) -endif() - if (BUILD_TESTS) add_subdirectory(tests) endif() diff --git a/Dockerfile b/Dockerfile deleted file mode 100644 index a5c4082f..00000000 --- a/Dockerfile +++ /dev/null @@ -1,20 +0,0 @@ -FROM scannerresearch/scanner-base:ubuntu16.04 -MAINTAINER Will Crichton "wcrichto@cs.stanford.edu" -ARG cores=1 -ARG gpu=ON - -ADD . /opt/scanner -WORKDIR /opt/scanner -RUN cd thirdparty && mkdir build && cd build && \ - cmake -D CMAKE_BUILD_TYPE=Release .. && \ - make -j ${cores} -RUN mkdir build && cd build && \ - cmake -D BUILD_IMGPROC_OPS=ON \ - -D BUILD_CAFFE_OPS=ON \ - -D BUILD_OPENFACE_OPS=ON \ - -D BUILD_TESTS=ON \ - -D BUILD_CUDA=${gpu} \ - .. && \ - make -j ${cores} && \ - cd /opt/scanner && ./scripts/dev-setup.sh -ENV PYTHONPATH /opt/scanner/python:$PYTHONPATH diff --git a/INSTALL.md b/INSTALL.md new file mode 100644 index 00000000..e6be923f --- /dev/null +++ b/INSTALL.md @@ -0,0 +1,118 @@ +# Building Scanner + +*NOTE*: The following build instructions have only been tested on Ubuntu 16.04. + +There are five major steps to build and install Scanner: +1 Install apt-get dependencies +2 Install python pip dependencies +3 Run deps.sh to download and install external dependencies +4 Build Scanner +5 Install scannerpy python package + +Scanner depends on the following *major* dependencies: + +* Python == 2.7 +* boost >= 1.63.0 +* ffmpeg >= 3.3.1 +* opencv >= 3.2.0 +* protobuf == 3.4.0 +* grpc == 1.7.2 +* caffe >= rc5 OR intel-caffe >= 1.0.6 + +Scanner optionally requires: +* CUDA >= 8.0 + +Scanner provides a dependency script `deps.sh` to automatically install any or +all of the *major* dependencies if they are not already installed. Each of these +dependencies has a set of required system-level packages. If you need to install +all or most of of these dependencies, run the 'All dependencies' apt-get command +below. If you only need to install a few, we also provide apt-get commands for +each package. + +## 1. apt-get Dependencies + +All dependencies +```bash +apt-get install \ + build-essential \ + cmake git libgtk2.0-dev pkg-config libavcodec-dev libavformat-dev \ + libswscale-dev unzip llvm clang libc++-dev libgflags-dev libgtest-dev \ + libssl-dev libcurl3-dev liblzma-dev libeigen3-dev \ + libgoogle-glog-dev libatlas-base-dev libsuitesparse-dev libgflags-dev \ + libx264-dev libopenjpeg-dev libxvidcore-dev \ + libpng-dev libjpeg-dev libbz2-dev git python-pip wget \ + libleveldb-dev libsnappy-dev libhdf5-serial-dev liblmdb-dev python-dev \ + python-tk autoconf autogen libtool libtbb-dev libopenblas-dev \ + liblapacke-dev swig yasm python2.7 cpio \ + automake libass-dev libfreetype6-dev libsdl2-dev libtheora-dev libtool \ + libva-dev libvdpau-dev libvorbis-dev libxcb1-dev libxcb-shm0-dev \ + libxcb-xfixes0-dev mercurial pkg-config texinfo wget zlib1g-dev \ + curl unzip +``` + +For gRPC +```bash +apt-get install \ + build-essential autoconf automake libtool curl make g++ \ + unzip clang libc++-dev libgflags-dev libgtest-dev unzip +``` + +For OpenCV +```bash +apt-get install \ + build-essential cmake git libgtk2.0-dev pkg-config \ + python-dev libtbb2 libtbb-dev libjpeg-dev libpng-dev libtiff-dev \ + libjasper-dev libdc1394-22-dev +``` + +For FFMPEG +```bash +apt-get -y install \ + build-essential autoconf automake build-essential cmake git libass-dev \ + libfreetype6-dev \ libsdl2-dev libtheora-dev libtool libva-dev libvdpau-dev \ + libvorbis-dev libxcb1-dev libxcb-shm0-dev libxcb-xfixes0-dev mercurial \ + pkg-config texinfo wget zlib1g-dev +``` + +## 2. Python Pip Dependencies + +Scanner depends on several python packages installable via pip. From the +top-level directory, run: + +```bash +pip install -r requirements.txt +``` + +## 3. Run deps.sh + +To install or specify where your *major* dependencies are, from the top-level +directory run: + +```bash +bash ./deps.sh +``` + +This script will query you for each major +dependency and install those that are not already installed. By default, +it will install the dependencies to a local directory inside the scanner repo +(it will not install system-wide). + +## 4. Build Scanner + +Run the following commands from the top-level directory: +```bash +mkdir build +cd build +cmake .. +make -j +``` + +## 5. Install scannerpy python package + +Run the following commands from the top-level directory: +```bash +python python/setup.py bdist_wheel +pip install dist/scannerpy-0.1.13-py2-none-any.whl +``` + +Congratulations! You've installed the scannerpy package. diff --git a/README.md b/README.md index 0484e2f3..bb31f68e 100644 --- a/README.md +++ b/README.md @@ -1,61 +1,96 @@ -# Scanner: Efficient Video Analysis at Scale [![Build Status](https://travis-ci.org/scanner-research/scanner.svg?branch=master)](https://travis-ci.org/scanner-research/scanner) # +# Scanner: Efficient Video Analysis at Scale [![GitHub tag](https://img.shields.io/github/tag/scanner-research/scanner.svg)](https://GitHub.com/scanner-research/scanner/tags/) [![Build Status](https://travis-ci.org/scanner-research/scanner.svg?branch=master)](https://travis-ci.org/scanner-research/scanner) # -_For [build instructions](https://github.com/scanner-research/scanner/wiki/Building-Scanner), [tutorials](https://github.com/scanner-research/scanner/wiki/Getting-started), [documentation](https://github.com/scanner-research/scanner/wiki/Documentation), and [contributing guidelines](https://github.com/scanner-research/scanner/wiki/Contributing), visit the [Scanner wiki](https://github.com/scanner-research/scanner/wiki)._ +Scanner is a system for developing applications that efficiently process large video datasets. Scanner has been used for both video analysis and video synthesis tasks, such as: +* **Labeling and data mining large video collections:** Scanner is in use at Stanford University as the compute engine for visual data mining applications that detect faces, commercials, human poses, etc. in datasets as big as 70,000 hours of TV news (12 billion frames, 20 TB) or 600 feature length movies (106 million frames). We've used Scanner to run these tasks on hundreds of GPUs or thousands of CPUs on Google Compute Engine. +* **VR Video synthesis:** Scanner is use at Facebook to scale the [Surround 360 VR video stitching software](https://github.com/scanner-research/Surround360) to hundreds of CPUs. This application processes fourteen 2048x2048 input videos to produce 8k omidirectional stereo video output for VR display. -Scanner lets you write stateful functions that get efficiently mapped across batches of video frames. These functions can execute on a multi-core CPU or GPU and can be distributed across multiple machines. You can think about Scanner like Spark for pixels. For example, you could use Scanner to: +To learn more about Scanner, see the documentation below, check out the [various example applications](https://github.com/scanner-research/scanner/tree/master/examples), or read the SIGGRAPH 2018 Technical Paper: "[Scanner: Efficient Video Analysis at Scale](http://graphics.stanford.edu/papers/scanner/scanner_sig18.pdf)". -* [Locate and recognize faces](https://github.com/scanner-research/scanner/blob/master/examples/face_detection/face_detect.py) -* [Detect shots in a film](https://github.com/scanner-research/scanner/blob/master/examples/shot_detection/shot_detect.py) -* [Search videos by image](https://github.com/scanner-research/scanner/blob/master/examples/reverse_image_search/search.py) +For easy access to off-the-shelf pipelines like face detection and optical flow built using Scanner, check out our [scannertools](https://github.com/scanner-research/scannertools) library. -To support these applications, Scanner uses a Python interface similar to Tensorflow and Spark SQL. Videos are represented as tables in a database, and users write computation graphs to transform these tables. For example, to compute the color histogram for each frame in a set of videos on the GPU: +## Key Features -```python -from scannerpy import Database, DeviceType -from scannerpy.stdlib import parsers -db = Database() -videos = db.ingest_video_collection('my_videos', ['vid0.mp4', 'vid1.mkv']) -hist = db.ops.Histogram(device=DeviceType.GPU) -output = db.run(videos, hist, 'my_videos_hist') -vid0_hists = output.tables(0).columns(0).load(parsers.histograms) -``` +Scanner's key features include: -Scanner provides a convenient way to organize your videos as well as data derived from the videos (bounding boxes, histograms, feature maps, etc.) using a relational database. Behind the scenes, Scanner handles decoding the compressed videos into raw frames, allowing you to process an individual video in parallel. It then runs a computation graph on the decoded frames using kernels written in C++ for maximum performance and distributes the computation over a cluster. Scanner supports a number of operators and third-party libraries to reduce the work of writing new computations: +* **Video processing computations as dataflow graphs:** Like many modern ML frameworks, Scanner structures video analysis tasks as dataflow graphs whose nodes produce and consume sequences of per-frame data. Scanner's embodiment of the dataflow model includes operators useful for video processing tasks such as sparse frame sampling (e.g., "frames known to contain a face"), sliding window frame access (e.g., stencils for temporal smoothing), and stateful processing across frames (e.g., tracking). -* [Caffe](https://github.com/bvlc/caffe) support for neural network evaluation -* [OpenCV](https://github.com/opencv/opencv) support with included kernels for color histograms and optical flow -* Object tracking in videos with [Struck](https://github.com/samhare/struck) -* Image processing with [Halide](http://halide-lang.org/) +* **Videos as logical tables:** To simplify the management of and access to large-numbers of videos, Scanner represents video collections and the pixel-level products of video frame analysis (e.g., flow fields, depth maps, activations) as tables in a data store. Scanner's data store features first-class support for video frame column types to facilitate key performance optimizations, such as storing video in compressed form and providing fast access to sparse lists of video frames. -Lastly, Scanner also offers some utilities for ease of development: +* **First-class support for GPU acceleration:** Since many video processing algorithms benefit from GPU acceleration, Scanner provides first-class support for writing dataflow graph operations that utilize GPU execution. Scanner also leverages specialized GPU hardware for video decoding when available. -* Profiling via [chrome://tracing](https://www.chromium.org/developers/how-tos/trace-event-profiling-tool) -* Support for different storage backends including [Google Cloud Storage](https://cloud.google.com/storage/) -* Custom operators for adding your own functionality outside the source tree +* **Fault tolerant, distributed execution:** Scanner applications can be run on the cores of a single machine, on a multi-GPU server, or scaled to hundreds of machines (potentially with heterogeneous numbers of GPUs), without significant source-level change. Scanner also provides fault tolerance, so your applications can not only utilize many machines, but use cheaper preemptible machines on cloud computing platforms. -Scanner is an active research project, part of a collaboration between Carnegie Mellon and Stanford. Please contact [Alex Poms](https://github.com/apoms) and [Will Crichton](https://github.com/willcrichton) with questions. +What Scanner __is not__: -## Quick start ## +Scanner is not a system for implementing new high-performance image and video processing kernels from scratch. However, Scanner can be used to create scalable video processing applications by composing kernels that already exist as part of popular libraries such as OpenCV, Caffe, TensorFlow, etc. or have been implemented in popular performance-oriented languages like [CUDA](https://developer.nvidia.com/cuda-zone) or [Halide](http://halide-lang.org/). Yes, you can write your dataflow graph operations in Python or C++ too! -To quickly dive into Scanner, you can use one of our prebuilt [Docker images](https://hub.docker.com/r/scannerresearch/scanner). To run a GPU image, you must install and use [nvidia-docker](https://github.com/NVIDIA/nvidia-docker). +## Documentation -```bash -nvidia-docker run -d --name scanner -ti scannerresearch/scanner:gpu /bin/bash -nvidia-docker attach scanner -``` +Scanner's documentation is hosted at [scanner.run](http://scanner.run). Here +are a few links to get you started: -_Note: if you don't have a GPU, then run `docker` instead of `nvidia-docker` and use `scanner:cpu` instead of `scanner:gpu` in the Docker image name._ +* [Installation](http://scanner.run/installation.html) +* [Getting Started](http://scanner.run/getting-started.html) +* [Programming Handbook](http://scanner.run/programming-handbook.html) +* [API Reference](http://scanner.run/api.html) +* [SIGGRAPH 2018 Technical Paper](http://graphics.stanford.edu/papers/scanner/scanner_sig18.pdf) +* [Scanner Examples](https://github.com/scanner-research/scanner/tree/master/examples) -Then inside your Docker container, run: +## Example code -```bash -python examples/face_detection/face_detect.py -``` +Scanner applications are written using the Python API. Here's an example +application that resizes every third frame from a video and then saves the result as an mp4 video (the +[Quickstart](http://scanner.run/quickstart.html) walks through this +example in more detail): + +```python +from scannerpy import Database, Job + +# Ingest a video into the database (create a table with a row per video frame) +db = Database() +db.ingest_videos([('example_table', 'example.mp4')]) -This runs a Scanner demo which detects faces in every frame of a short video from YouTube, creating a file `example_faces.mp4`. Type `Ctrl-P + Ctrl-Q` to detach from the container and then run: +# Define a Computation Graph +frame = db.sources.FrameColumn() # Read input frames from database +sampled_frame = db.streams.Stride(input=frame, stride=3) # Select every third frame +resized = db.ops.Resize(frame=sampled_frame, width=640, height=480) # Resize input frames +output_frame = db.sinks.Column(columns={'frame': resized}) # Save resized frames as new video -```bash -nvidia-docker cp scanner:/opt/scanner/example_faces.mp4 . +# Set parameters of computation graph ops +job = Job(op_args={ + frame: db.table('example_table').column('frame'), # Column to read input frames from + output_frame: 'resized_example' # Table name for computation output +}) + +# Execute the computation graph and return a handle to the newly produced tables +output_tables = db.run(output=output_frame, jobs=[job], force=True) + +# Save the resized video as an mp4 file +output_tables[0].column('frame').save_mp4('resized_video') ``` -Then you can view the generated video on your own machine. That's it! To learn more about Scanner, please visit the [Scanner wiki](https://github.com/scanner-research/scanner/wiki). +If you'd like to see other example applications written with Scanner, check +out the [Examples](https://github.com/scanner-research/scanner/tree/master/examples) +directory in this repository. + +## Contributing + +If you'd like to contribute to the development of Scanner, you should first +build Scanner [from source](http://scanner.run/from_source.html). + +Please submit a pull-request rebased against the most recent version of the +master branch and we will review your changes to be merged. Thanks for +contributing! + +### Running tests +You can run the full suite of tests by executing `make test` in the directory +you used to build Scanner. This will run both the C++ tests and the end-to-end +tests that verify the python API. + +## About +Scanner is an active research project, part of a collaboration between Stanford and Carnegie Mellon University. Please contact [Alex Poms](https://github.com/apoms) and [Will Crichton](https://github.com/willcrichton) with questions. + +Scanner was developed with the support of the NSF (IIS-1539069), the Intel Corporation (through the Intel Science and Technology Center for Visual Cloud Computing and the NSF/Intel VEC program), and by Google. + +### Paper citation +Scanner will appear in the proceedings of SIGGRAPH 2018 as "[Scanner: Efficient Video Analysis at Scale](http://graphics.stanford.edu/papers/scanner/scanner_sig18.pdf)" by Poms, Crichton, Hanrahan, and Fatahalian. If you use Scanner in your research, we'd appreciate it if you cite the paper. diff --git a/build.sh b/build.sh new file mode 100755 index 00000000..726afdf9 --- /dev/null +++ b/build.sh @@ -0,0 +1,33 @@ +#!/bin/bash + +PKG=scannerpy + +if [[ "$OSTYPE" == "linux-gnu" ]]; then + cores=$(nproc) + # ... +elif [[ "$OSTYPE" == "darwin"* ]]; then + cores=$(gnproc) + # Mac OSX +else + # Unknown. + echo "Unknown OSTYPE: $OSTYPE. Exiting." + exit 1 +fi + +pushd build +if make -j$cores; then + popd + if rm -rf dist && \ + python3 setup.py bdist_wheel; + then + cwd=$(pwd) + # cd to /tmp to avoid name clashes with Python module name and any + # directories of the same name in our cwd + pushd /tmp + (yes | pip3 uninstall $PKG) + (yes | pip3 install --user $cwd/dist/*) + popd + fi +else + popd +fi diff --git a/cmake/Dependencies.cmake b/cmake/Dependencies.cmake new file mode 100644 index 00000000..9558a02f --- /dev/null +++ b/cmake/Dependencies.cmake @@ -0,0 +1,133 @@ +###### Parse dependency file ####### +file(STRINGS ${CMAKE_SOURCE_DIR}/dependencies.txt ConfigContents) +foreach(NameAndValue ${ConfigContents}) + # Strip leading spaces + string(REGEX REPLACE "^[ ]+" "" NameAndValue ${NameAndValue}) + # Find variable name + string(REGEX MATCH "^[^=]+" Name ${NameAndValue}) + # Find the value + string(REPLACE "${Name}=" "" Value ${NameAndValue}) + # Set the variable + set(${Name} "${Value}") +endforeach() + +list(APPEND CMAKE_PREFIX_PATH ${PROTOBUF_DIR}) +list(APPEND CMAKE_PREFIX_PATH ${PYBIND11_DIR}) + +# Disable cuda if nvidia-smi was not detected +if (${HAVE_GPU} STREQUAL "false") + set(BUILD_CUDA OFF) +endif() + +###### Optional Dependencies ####### +if (BUILD_CUDA) + find_package(CUDA REQUIRED) + add_definitions(-DHAVE_CUDA) + include_directories(${CUDA_INCLUDE_DIRS}) + if(COMPILER_SUPPORTS_CXX1Y) + set(CUDA_NVCC_FLAGS "${CUDA_NVCC_FLAGS} -std=c++11") + endif() + find_package(NVCUVID REQUIRED) +endif() + +if (BUILD_CUDA) + add_library(scanner_halide scanner/util/halide_context.cpp) +endif() + +set(OPENCV_DESIRED_COMPONENTS core highgui imgproc) +if (BUILD_CUDA) + list(APPEND OPENCV_DESIRED_COMPONENTS cudaimgproc cudaarithm) +endif() + +###### Required Dependencies ####### +find_package(SaneProtobuf REQUIRED) +find_package(GRPC REQUIRED) +find_package(FFmpeg REQUIRED) +find_package(LibLZMA REQUIRED) +if (APPLE) + set(OPENSSL_ROOT_DIR "/usr/local/opt/openssl") +endif() +find_package(OpenSSL REQUIRED) +find_package(BZip2 REQUIRED) +find_package(GFlags REQUIRED) +find_package(Glog REQUIRED) +find_package(GoogleTest REQUIRED) +find_package(CURL REQUIRED) +find_package(Iconv REQUIRED) +find_package(Storehouse REQUIRED CONFIG + PATHS "${CMAKE_SOURCE_DIR}/thirdparty/install" + "${STOREHOUSE_DIR}") +find_package(Hwang REQUIRED) +find_package(TinyToml REQUIRED) +find_package(OpenCV COMPONENTS ${OPENCV_DESIRED_COMPONENTS}) + +set(PYBIND11_PYTHON_VERSION 3) +find_package(pybind11 REQUIRED) + +if(NOT APPLE AND UNIX) + find_package(OpenMP REQUIRED) + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${OpenMP_CXX_FLAGS}") +endif() + +set(GTEST_INCLUDE_DIRS ${GOOGLETEST_INCLUDE_DIR}) +set(GTEST_LIBRARIES ${GOOGLETEST_LIBRARIES}) +set(GTEST_LIB_MAIN ${GOOGLETEST_MAIN}) + +set(SCANNER_LIBRARIES + "${HWANG_LIBRARY}" + "${PROTOBUF_LIBRARY}" + "${GRPC_LIBRARIES}" + "${FFMPEG_LIBRARIES}" + "${LIBLZMA_LIBRARIES}" + "${BZIP2_LIBRARIES}" + "${GFLAGS_LIBRARIES}" + "${GLOG_LIBRARIES}" + "${CURL_LIBRARIES}" + "${ICONV_LIBRARIES}" + "${SCANNER_LIBRARIES}" + "${STOREHOUSE_LIBRARIES}" + "${OPENSSL_LIBRARIES}" + "${PYTHON_LIBRARIES}" + "-ljpeg" + "-lz" + "-ldl" + ) + +include_directories( + "." + "${CMAKE_CURRENT_BINARY_DIR}" # for protobuf generated files + "${HWANG_INCLUDE_DIRS}" + "${PROTOBUF_INCLUDE_DIRS}" + "${GRPC_INCLUDE_DIRS}" + "${FFMPEG_INCLUDE_DIR}" + "${TINYTOML_INCLUDE_DIR}" + "${STOREHOUSE_INCLUDE_DIRS}" + "${OPENSSL_INCLUDE_DIR}" + "${GLOG_INCLUDE_DIRS}" + "${LIBLZMA_INCLUDE_DIRS}" + "${PYTHON_INCLUDE_DIRS}" + "${pybind11_INCLUDE_DIR}") + +if (OpenCV_FOUND) + list(APPEND SCANNER_LIBRARIES ${OpenCV_LIBRARIES}) + include_directories(${OpenCV_INCLUDE_DIRS}) + add_definitions(-DHAVE_OPENCV) +endif() + +if (BUILD_TESTS) + include_directories("${GTEST_INCLUDE_DIRS}") +endif() + +if (BUILD_CUDA) + list(APPEND SCANNER_LIBRARIES + util_cuda + ${CUDA_LIBRARIES} + ${NVCUVID_LIBRARIES} + "-lcuda") +endif() + +if (APPLE) + include_directories( + "/System/Library/Frameworks/Accelerate.framework/Versions/Current/Frameworks/vecLib.framework/Versions/Current/Headers/") +elseif() +endif() diff --git a/cmake/Modules/FindFFmpeg.cmake b/cmake/Modules/FindFFmpeg.cmake index c511df95..b2269196 100644 --- a/cmake/Modules/FindFFmpeg.cmake +++ b/cmake/Modules/FindFFmpeg.cmake @@ -54,6 +54,11 @@ NAMES swresample PATHS ${_FFMPEG_AVUTIL_LIBRARY_DIRS} /usr/lib /usr/local/lib /opt/local/lib /sw/lib ) +find_library(FFMPEG_LIBSWSCALE +NAMES swscale +PATHS ${_FFMPEG_AVCODEC_LIBRARY_DIRS} /usr/lib /usr/local/lib /opt/local/lib /sw/lib +) + if (FFMPEG_LIBAVCODEC AND FFMPEG_LIBAVFORMAT) set(FFMPEG_FOUND TRUE) endif() @@ -66,6 +71,7 @@ ${FFMPEG_LIBAVCODEC} ${FFMPEG_LIBAVFORMAT} ${FFMPEG_LIBAVUTIL} ${FFMPEG_LIBSWRESAMPLE} +${FFMPEG_LIBSWSCALE} ) endif (FFMPEG_FOUND) diff --git a/cmake/Modules/FindGRPC.cmake b/cmake/Modules/FindGRPC.cmake new file mode 100644 index 00000000..2c0b3946 --- /dev/null +++ b/cmake/Modules/FindGRPC.cmake @@ -0,0 +1,48 @@ +# - Try to find grpc library +# +# The following variables are optionally searched for defaults +# GRPC_DIR: Base directory where all components are found +# +# The following are set after configuration is done: +# GRPC_FOUND +# GRPC_INCLUDE_DIRS +# GRPC_LIBRARIES +# GRPC_LIBRARY_DIRS + +include(FindPackageHandleStandardArgs) + +set(GRPC_ROOT_DIR "" CACHE PATH "Folder contains GRPC") + +if (NOT "$ENV{GRPC_DIR}" STREQUAL "") + set(GRPC_DIR $ENV{GRPC_DIR}) +endif() + +# We are testing only a couple of files in the include directories +if(WIN32) + find_path(GRPC_INCLUDE_DIR grpc/grpc.h + PATHS ${GRPC_ROOT_DIR}/src/windows) +else() + find_path(GRPC_INCLUDE_DIR grpc/grpc.h + PATHS + ${GRPC_DIR}/include) +endif() + +find_library(GRPCPP_UNSECURE_LIBRARY grpc++_unsecure + PATHS + ${GRPC_DIR}/lib) + +find_library(GRPC_LIBRARY grpc + PATHS + ${GRPC_DIR}/lib) + +find_library(GPR_LIBRARY gpr + PATHS + ${GRPC_DIR}/lib) + +find_package_handle_standard_args(GRPC DEFAULT_MSG + GRPC_INCLUDE_DIR GRPC_LIBRARY) + +if(GRPC_FOUND) + set(GRPC_INCLUDE_DIRS ${GRPC_INCLUDE_DIR}) + set(GRPC_LIBRARIES ${GRPCPP_UNSECURE_LIBRARY} ${GRPC_LIBRARY} ${GPR_LIBRARY}) +endif() diff --git a/cmake/Modules/FindGipuma.cmake b/cmake/Modules/FindGipuma.cmake new file mode 100644 index 00000000..8615d225 --- /dev/null +++ b/cmake/Modules/FindGipuma.cmake @@ -0,0 +1,38 @@ +# - Try to find Gipuma +# +# The following variables are optionally searched for defaults +# GIPUMA_ROOT_DIR: Base directory where all Gipuma components are found +# +# The following are set after configuration is done: +# GIPUMA_FOUND +# GIPUMA_INCLUDE_DIRS + +include(FindPackageHandleStandardArgs) + +set(GIPUMA_ROOT_DIR "" CACHE PATH "Folder contains Gipuma") + +if (NOT "$ENV{Gipuma_DIR}" STREQUAL "") + set(GIPUMA_ROOT_DIR $ENV{Gipuma_DIR} CACHE PATH "Folder contains Gipuma" FORCE) +endif() + +# We are testing only a couple of files in the include directories +if(WIN32) + find_path(GIPUMA_INCLUDE_DIR gipuma.h + PATHS ${GIPUMA_ROOT_DIR}/src/windows + PATH_SUFFIXES gipuma) +else() + find_path(GIPUMA_INCLUDE_DIR gipuma.h + PATHS ${GIPUMA_ROOT_DIR}/include + PATH_SUFFIXES gipuma) + +endif() + +find_library(GIPUMA_LIBRARY gipuma PATHS ${GIPUMA_ROOT_DIR}/lib) + +find_package_handle_standard_args(GIPUMA DEFAULT_MSG GIPUMA_INCLUDE_DIR + GIPUMA_LIBRARY) + +if(GIPUMA_FOUND) + set(GIPUMA_INCLUDE_DIRS ${GIPUMA_INCLUDE_DIR}) + set(GIPUMA_LIBRARIES ${GIPUMA_LIBRARY}) +endif() diff --git a/cmake/Modules/FindGoogleTest.cmake b/cmake/Modules/FindGoogleTest.cmake index 68c1c44b..276bf783 100644 --- a/cmake/Modules/FindGoogleTest.cmake +++ b/cmake/Modules/FindGoogleTest.cmake @@ -25,13 +25,18 @@ else() find_path(GOOGLETEST_INCLUDE_DIR gtest/gtest.h PATHS ${GOOGLETEST_ROOT_DIR}/include - ${CMAKE_SOURCE_DIR}/thirdparty/build/bin/googletest/include) + ${CMAKE_SOURCE_DIR}/thirdparty/install/include) endif() find_library(GOOGLETEST_LIBRARY gtest PATHS ${GOOGLETEST_ROOT_DIR}/lib - ${CMAKE_SOURCE_DIR}/thirdparty/build/bin/googletest/lib) + ${CMAKE_SOURCE_DIR}/thirdparty/install/lib) + +find_library(GOOGLETEST_MAIN gtest_main + PATHS + ${GOOGLETEST_ROOT_DIR}/lib + ${CMAKE_SOURCE_DIR}/thirdparty/install/lib) find_package_handle_standard_args(GOOGLETEST DEFAULT_MSG GOOGLETEST_INCLUDE_DIR GOOGLETEST_LIBRARY) diff --git a/cmake/Modules/FindHalide.cmake b/cmake/Modules/FindHalide.cmake index e555696a..8d8509d0 100644 --- a/cmake/Modules/FindHalide.cmake +++ b/cmake/Modules/FindHalide.cmake @@ -1,12 +1,14 @@ # FindHalide.cmake # ... shamelessly based on FindJeMalloc.cmake - set(HALIDE_ROOT_DIR "" CACHE PATH "Folder contains Halide") if (NOT "$ENV{Halide_DIR}" STREQUAL "") set(HALIDE_ROOT_DIR $ENV{Halide_DIR} CACHE PATH "Folder contains Halide" FORCE) +elseif (Halide_DIR) + set(HALIDE_ROOT_DIR ${Halide_DIR} CACHE PATH "Folder contains Halide" + FORCE) endif() find_library(HALIDE_LIBRARIES @@ -25,8 +27,8 @@ find_package_handle_standard_args(Halide DEFAULT_MSG HALIDE_INCLUDE_DIR ) -set(HALIDE_LIBRARY HALIDE_LIBRARIES) -set(HALIDE_INCLUDE_DIRS HALIDE_INCLUDE_DIR) +set(HALIDE_LIBRARY ${HALIDE_LIBRARIES}) +set(HALIDE_INCLUDE_DIRS ${HALIDE_INCLUDE_DIR}) mark_as_advanced( HALIDE_ROOT_DIR diff --git a/cmake/Modules/FindHwang.cmake b/cmake/Modules/FindHwang.cmake new file mode 100644 index 00000000..abf30cd0 --- /dev/null +++ b/cmake/Modules/FindHwang.cmake @@ -0,0 +1,38 @@ +# FindHwang.cmake + +set(HWANG_ROOT_DIR "" CACHE PATH "Folder contains Hwang") + +if (NOT "$ENV{Hwang_DIR}" STREQUAL "") + set(HWANG_ROOT_DIR $ENV{Hwang_DIR} CACHE PATH "Folder contains Hwang" + FORCE) +elseif (Hwang_DIR) + set(HWANG_ROOT_DIR ${Hwang_DIR} CACHE PATH "Folder contains Hwang" + FORCE) +endif() + +find_library(HWANG_LIBRARIES + NAMES hwang + HINTS ${HWANG_ROOT_DIR}/lib + ) + +find_path(HWANG_INCLUDE_DIR + NAMES hwang/common.h + HINTS ${HWANG_ROOT_DIR}/include + ) + +include(FindPackageHandleStandardArgs) +find_package_handle_standard_args(Hwang DEFAULT_MSG + HWANG_LIBRARIES + HWANG_INCLUDE_DIR + ) + +set(HWANG_LIBRARY ${HWANG_LIBRARIES}) +set(HWANG_INCLUDE_DIRS ${HWANG_INCLUDE_DIR}) + +mark_as_advanced( + HWANG_ROOT_DIR + HWANG_LIBRARY + HWANG_LIBRARIES + HWANG_INCLUDE_DIR + HWANG_INCLUDE_DIRS + ) diff --git a/cmake/Modules/FindNVCUVID.cmake b/cmake/Modules/FindNVCUVID.cmake new file mode 100644 index 00000000..868e5288 --- /dev/null +++ b/cmake/Modules/FindNVCUVID.cmake @@ -0,0 +1,28 @@ +# - Try to find NVCUVID +# +# The following variables are optionally searched for defaults +# NVCUVID_DIR: Base directory where NVCUVID can be found +# +# The following are set after configuration is done: +# NVCUVID_FOUND +# NVCUVID_LIBRARY + +include(FindPackageHandleStandardArgs) + +set(NVCUVID_ROOT_DIR "" CACHE PATH "Folder contains NVCUVID") + +if (NOT "$ENV{NVCUVID_DIR}" STREQUAL "") + set(NVCUVID_ROOT_DIR + $ENV{NVCUVID_DIR} CACHE PATH "Folder contains NVCUVID" FORCE) +endif() + +find_library(NVCUVID_LIBRARY nvcuvid + PATHS + ${NVCUVID_ROOT_DIR}/lib + /usr/local/cuda/lib64) + +find_package_handle_standard_args(NVCUVID DEFAULT_MSG NVCUVID_LIBRARY) + +if(NVCUVID_FOUND) + set(NVCUVID_LIBRARIES ${NVCUVID_LIBRARY}) +endif() diff --git a/cmake/Modules/FindOpenPose.cmake b/cmake/Modules/FindOpenPose.cmake new file mode 100644 index 00000000..dae6683d --- /dev/null +++ b/cmake/Modules/FindOpenPose.cmake @@ -0,0 +1,37 @@ +# - Try to find OpenPose +# +# The following variables are optionally searched for defaults +# OPENPOSE_ROOT_DIR: Base directory where all Caffe components are found +# +# The following are set after configuration is done: +# OPENPOSE_FOUND +# OPENPOSE_INCLUDE_DIRS +# OPENPOSE_LIBRARIES +# OPENPOSE_LIBRARY_DIRS + +include(FindPackageHandleStandardArgs) + +set(OPENPOSE_ROOT_DIR "" CACHE PATH "Folder contains OpenPose") + +if (NOT "$ENV{OpenPose_DIR}" STREQUAL "") + set(OPENPOSE_ROOT_DIR $ENV{OpenPose_DIR}) +endif() + +# We are testing only a couple of files in the include directories +if(WIN32) + find_path(OPENPOSE_INCLUDE_DIR openpose/headers.hpp + PATHS ${OPENPOSE_ROOT_DIR}/src/windows) +else() + find_path(OPENPOSE_INCLUDE_DIR openpose/headers.hpp + PATHS ${OPENPOSE_ROOT_DIR}/include) +endif() + +find_library(OPENPOSE_LIBRARY openpose PATHS ${OPENPOSE_ROOT_DIR}/lib) + +find_package_handle_standard_args(OPENPOSE DEFAULT_MSG + OPENPOSE_INCLUDE_DIR OPENPOSE_LIBRARY) + +if(OPENPOSE_FOUND) + set(OPENPOSE_INCLUDE_DIRS ${OPENPOSE_INCLUDE_DIR}) + set(OPENPOSE_LIBRARIES ${OPENPOSE_LIBRARY}) +endif() diff --git a/cmake/Modules/FindSaneProtobuf.cmake b/cmake/Modules/FindSaneProtobuf.cmake index ddc967b7..5df49fd3 100644 --- a/cmake/Modules/FindSaneProtobuf.cmake +++ b/cmake/Modules/FindSaneProtobuf.cmake @@ -152,7 +152,7 @@ function(PROTOBUF_GENERATE_CPP SRCS HDRS USE_GRPC) OUTPUT "${CMAKE_CURRENT_BINARY_DIR}/${DIR_FIL}/${FIL_WE}.grpc.pb.cc" "${CMAKE_CURRENT_BINARY_DIR}/${DIR_FIL}/${FIL_WE}.grpc.pb.h" COMMAND ${PROTOBUF_PROTOC_EXECUTABLE} - ARGS --plugin=protoc-gen-grpc=/usr/local/bin/grpc_cpp_plugin --grpc_out ${CMAKE_CURRENT_BINARY_DIR} ${_protobuf_include_path} ${ABS_FIL} + ARGS --plugin=protoc-gen-grpc=${GRPC_CPP_PLUGIN} --grpc_out ${CMAKE_CURRENT_BINARY_DIR} ${_protobuf_include_path} ${ABS_FIL} DEPENDS ${ABS_FIL} ${PROTOBUF_PROTOC_EXECUTABLE} COMMENT "Running C++ protocol buffer compiler on ${FIL}" VERBATIM) @@ -218,7 +218,7 @@ function(PROTOBUF_GENERATE_PYTHON SRCS USE_GRPC) "${CMAKE_CURRENT_BINARY_DIR}/${DIR_FIL}/${FIL_WE}_pb2.py") add_custom_command( OUTPUT "${CMAKE_CURRENT_BINARY_DIR}/${DIR_FIL}/${FIL_WE}_pb2.py" - COMMAND ${PROTOBUF_PROTOC_EXECUTABLE} --python_out ${CMAKE_CURRENT_BINARY_DIR} --plugin=protoc-gen-grpc_python=/usr/local/bin/grpc_python_plugin --grpc_python_out ${CMAKE_CURRENT_BINARY_DIR} ${_protobuf_include_path} ${ABS_FIL} + COMMAND ${PROTOBUF_PROTOC_EXECUTABLE} --python_out ${CMAKE_CURRENT_BINARY_DIR} --plugin=protoc-gen-grpc_python=${GRPC_PYTHON_PLUGIN} --grpc_python_out ${CMAKE_CURRENT_BINARY_DIR} ${_protobuf_include_path} ${ABS_FIL} DEPENDS ${ABS_FIL} ${PROTOBUF_PROTOC_EXECUTABLE} COMMENT "Running Python protocol buffer compiler on ${FIL}" VERBATIM ) @@ -334,6 +334,23 @@ find_program(PROTOBUF_PROTOC_EXECUTABLE ) mark_as_advanced(PROTOBUF_PROTOC_EXECUTABLE) +find_program(GRPC_PYTHON_PLUGIN + NAMES grpc_python_plugin + DOC "" + PATHS + ${PROTOBUF_SRC_ROOT_FOLDER}/vsprojects/${_PROTOBUF_ARCH_DIR}Release + ${PROTOBUF_SRC_ROOT_FOLDER}/vsprojects/${_PROTOBUF_ARCH_DIR}Debug +) +mark_as_advanced(GRPC_PYTHON_PLUGIN) + +find_program(GRPC_CPP_PLUGIN + NAMES grpc_cpp_plugin + DOC "" + PATHS + ${PROTOBUF_SRC_ROOT_FOLDER}/vsprojects/${_PROTOBUF_ARCH_DIR}Release + ${PROTOBUF_SRC_ROOT_FOLDER}/vsprojects/${_PROTOBUF_ARCH_DIR}Debug +) +mark_as_advanced(GRPC_CPP_PLUGIN) include(FindPackageHandleStandardArgs) FIND_PACKAGE_HANDLE_STANDARD_ARGS(Protobuf DEFAULT_MSG diff --git a/cmake/Modules/FindTinyToml.cmake b/cmake/Modules/FindTinyToml.cmake index 06a039e3..755c47dd 100644 --- a/cmake/Modules/FindTinyToml.cmake +++ b/cmake/Modules/FindTinyToml.cmake @@ -12,7 +12,11 @@ include(FindPackageHandleStandardArgs) set(TINYTOML_ROOT_DIR "" CACHE PATH "Folder contains TinyToml") if (NOT "$ENV{TinyToml_DIR}" STREQUAL "") - set(TINYTOML_ROOT_DIR $ENV{TinyToml_DIR}) + set(TINYTOML_ROOT_DIR $ENV{TinyToml_DIR} CACHE PATH "Folder contains TinyToml" + FORCE) +elseif(TinyToml_DIR) + set(TINYTOML_ROOT_DIR ${TinyToml_DIR} CACHE PATH "Folder contains TinyToml" + FORCE) endif() # We are testing only a couple of files in the include directories diff --git a/cmake/Util/HalideGenerator.cmake b/cmake/Util/HalideGenerator.cmake deleted file mode 100644 index aded4d59..00000000 --- a/cmake/Util/HalideGenerator.cmake +++ /dev/null @@ -1,271 +0,0 @@ -include(CMakeParseArguments) - -function(halide_project name folder) - add_executable("${name}" ${ARGN}) - if (MSVC) - else() - target_compile_options("${name}" PUBLIC "-std=c++11") # Halide clients need C++11 - if(NOT HALIDE_ENABLE_RTTI) - target_compile_options("${name}" PUBLIC "-fno-rtti") - endif() - endif() - target_link_libraries("${name}" PRIVATE "${HALIDE_ROOT_DIR}/lib/libHalide.a" dl pthread z rt tinfo) - target_include_directories("${name}" PRIVATE "${HALIDE_ROOT_DIR}/include") - target_include_directories("${name}" PRIVATE "${HALIDE_ROOT_DIR}/tools") - set_target_properties("${name}" PROPERTIES FOLDER "${folder}") - if (MSVC) - # 4006: "already defined, second definition ignored" - # 4088: "/FORCE used, image may not work" - # (Note that MSVC apparently considers 4088 too important to allow us to ignore it; - # I'm nevertheless leaving this here to document that we don't care about it.) - set_target_properties(${name} PROPERTIES LINK_FLAGS "/ignore:4006 /ignore:4088") - target_compile_definitions("${name}" PRIVATE _CRT_SECURE_NO_WARNINGS) - target_link_libraries("${name}" PRIVATE Kernel32) - endif() -endfunction(halide_project) - -function(halide_generator_genfiles_dir NAME OUTVAR) - set(GENFILES_DIR "${CMAKE_BINARY_DIR}/generator_genfiles/${NAME}") - file(MAKE_DIRECTORY "${GENFILES_DIR}") - set(${OUTVAR} "${GENFILES_DIR}" PARENT_SCOPE) -endfunction() - -function(halide_generator_get_exec_path TARGET OUTVAR) - if(MSVC) - # In MSVC, the generator executable will be placed in a configuration specific - # directory specified by ${CMAKE_CFG_INTDIR}. - set(${OUTVAR} "${CMAKE_BINARY_DIR}/${CMAKE_CFG_INTDIR}/${TARGET}${CMAKE_EXECUTABLE_SUFFIX}" PARENT_SCOPE) - elseif(XCODE) - # In Xcode, the generator executable will be placed in a configuration specific - # directory, so the Xcode variable $(CONFIGURATION) is passed in the custom build script. - set(${OUTVAR} "${CMAKE_BINARY_DIR}/$(CONFIGURATION)/${TARGET}${CMAKE_EXECUTABLE_SUFFIX}" PARENT_SCOPE) - else() - get_target_property(GENERATOR_FOLDER ${args_GENERATOR_TARGET} FOLDER) - set(${OUTVAR} "${GENERATOR_FOLDER}/${TARGET}${CMAKE_EXECUTABLE_SUFFIX}" PARENT_SCOPE) - endif() -endfunction() - -function(halide_generator_add_exec_generator_target EXEC_TARGET) - set(options ) - set(oneValueArgs GENERATOR_TARGET GENFILES_DIR) - set(multiValueArgs OUTPUTS GENERATOR_ARGS) - cmake_parse_arguments(args "" "${oneValueArgs}" "${multiValueArgs}" ${ARGN}) - - halide_generator_get_exec_path(${args_GENERATOR_TARGET} EXEC_PATH) - - add_custom_command( - OUTPUT ${args_OUTPUTS} - DEPENDS ${args_GENERATOR_TARGET} - COMMAND ${EXEC_PATH} ${args_GENERATOR_ARGS} - WORKING_DIRECTORY ${args_GENFILES_DIR} - COMMENT "Executing Generator ${args_GENERATOR_TARGET} with args ${args_GENERATOR_ARGS}..." - ) - - add_custom_target(${EXEC_TARGET} DEPENDS ${args_OUTPUTS}) - set_target_properties(${EXEC_TARGET} PROPERTIES FOLDER "generator") -endfunction() - -# This function adds custom build steps to invoke a Halide generator exectuable -# and produce a static library containing the generated code. -# -# The generator executable must be produced separately, e.g. using a call to the -# function halide_add_generator() or halide_project(...) or add_executable(...) -# and passed to this function in the GENERATOR_TARGET parameter. -# -# Usage: -# halide_add_aot_library( -# GENERATOR_TARGET -# GENERATOR_NAME -# GENERATED_FUNCTION -# GENERATOR_OUTPUTS ... -# GENERATOR_ARGS ...) -# -# is the name of the library being defined. -# GENERATOR_TARGET is the name of the generator executable target, which is assumed to be -# defined elsewhere. -# GENERATOR_TARGET is the name of the generator executable target, which is assumed to be -# defined elsewhere. -# GENERATOR_NAME is the registered name of the Halide::Generator derived object -# GENERATED_FUNCTION is the name of the C function to be generated by Halide, including C++ -# namespace (if any); if omitted, default to GENERATOR_NAME -# GENERATOR_OUTPUTS are the values to pass to -e; if omitted, defaults to "h static_library" -# GENERATOR_ARGS are optional extra arguments passed to the generator executable during -# build. -function(halide_add_aot_library AOT_LIBRARY_TARGET) - # Parse arguments - set(options ) - set(oneValueArgs GENERATOR_TARGET GENERATOR_NAME GENERATED_FUNCTION) - set(multiValueArgs GENERATOR_ARGS GENERATOR_OUTPUTS) - cmake_parse_arguments(args "" "${oneValueArgs}" "${multiValueArgs}" ${ARGN}) - - if (args_GENERATED_FUNCTION STREQUAL "") - set(args_GENERATED_FUNCTION ${args_GENERATOR_NAME}) - endif() - - # Create a directory to contain generator specific intermediate files - halide_generator_genfiles_dir(${AOT_LIBRARY_TARGET} GENFILES_DIR) - - # Determine the name of the output files - set(FILTER_LIB "${AOT_LIBRARY_TARGET}${CMAKE_STATIC_LIBRARY_SUFFIX}") - set(FILTER_HDR "${AOT_LIBRARY_TARGET}.h") - set(FILTER_CPP "${AOT_LIBRARY_TARGET}.cpp") - - set(GENERATOR_EXEC_ARGS "-o" "${GENFILES_DIR}") - if (NOT ${args_GENERATED_FUNCTION} STREQUAL "") - list(APPEND GENERATOR_EXEC_ARGS "-f" "${args_GENERATED_FUNCTION}" ) - endif() - if (NOT ${args_GENERATOR_NAME} STREQUAL "") - list(APPEND GENERATOR_EXEC_ARGS "-g" "${args_GENERATOR_NAME}") - endif() - if (NOT "${args_GENERATOR_OUTPUTS}" STREQUAL "") - string(REPLACE ";" "," _tmp "${args_GENERATOR_OUTPUTS}") - list(APPEND GENERATOR_EXEC_ARGS "-e" ${_tmp}) - endif() - # GENERATOR_ARGS always come last - list(APPEND GENERATOR_EXEC_ARGS ${args_GENERATOR_ARGS}) - - if ("${args_GENERATOR_OUTPUTS}" STREQUAL "") - set(args_GENERATOR_OUTPUTS static_library h) - endif() - - set(OUTPUTS ) - - # This is the CMake idiom for "if foo in list" - list(FIND args_GENERATOR_OUTPUTS "static_library" _lib_index) - list(FIND args_GENERATOR_OUTPUTS "h" _h_index) - list(FIND args_GENERATOR_OUTPUTS "cpp" _cpp_index) - - if (${_lib_index} GREATER -1) - list(APPEND OUTPUTS "${GENFILES_DIR}/${FILTER_LIB}") - endif() - if (${_h_index} GREATER -1) - list(APPEND OUTPUTS "${GENFILES_DIR}/${FILTER_HDR}") - set_source_files_properties("${GENFILES_DIR}/${FILTER_HDR}" PROPERTIES GENERATED TRUE) - endif() - if (${_cpp_index} GREATER -1) - list(APPEND OUTPUTS "${GENFILES_DIR}/${FILTER_CPP}") - set_source_files_properties("${GENFILES_DIR}/${FILTER_HDR}" PROPERTIES GENERATED TRUE) - endif() - - halide_generator_add_exec_generator_target( - "${AOT_LIBRARY_TARGET}.exec_generator" - GENERATOR_TARGET ${args_GENERATOR_TARGET} - GENERATOR_ARGS "${GENERATOR_EXEC_ARGS}" - GENFILES_DIR ${GENFILES_DIR} - OUTPUTS ${OUTPUTS} - ) -endfunction(halide_add_aot_library) - -# Usage: -# halide_add_aot_library_dependency(TARGET AOT_LIBRARY_TARGET) -function(halide_add_aot_library_dependency TARGET AOT_LIBRARY_TARGET) - halide_generator_genfiles_dir(${AOT_LIBRARY_TARGET} GENFILES_DIR) - - add_dependencies("${TARGET}" "${AOT_LIBRARY_TARGET}.exec_generator") - - set(FILTER_LIB "${AOT_LIBRARY_TARGET}${CMAKE_STATIC_LIBRARY_SUFFIX}") - target_link_libraries("${TARGET}" PRIVATE "${GENFILES_DIR}/${FILTER_LIB}") - target_include_directories("${TARGET}" PRIVATE "${GENFILES_DIR}") - - if (WIN32) - if (MSVC) - # /FORCE:multiple allows clobbering the halide runtime symbols in the lib - # linker warnings disabled: - # 4006: "already defined, second definition ignored" - # 4088: "/FORCE used, image may not work" - # (Note that MSVC apparently considers 4088 too important to allow us to ignore it; - # I'm nevertheless leaving this here to document that we don't care about it.) - set_target_properties("${TARGET}" PROPERTIES LINK_FLAGS "/STACK:8388608,1048576 /FORCE:multiple /ignore:4006 /ignore:4088") - else() - set_target_properties("${TARGET}" PROPERTIES LINK_FLAGS "-Wl,--allow-multiple-definition") - endif() - else() - target_link_libraries("${TARGET}" PRIVATE dl pthread z) - endif() -endfunction(halide_add_aot_library_dependency) - -function(halide_add_generator NAME) - set(options WITH_STUB) - set(oneValueArgs STUB_GENERATOR_NAME) - set(multiValueArgs SRCS STUB_DEPS) - cmake_parse_arguments(args "${options}" "${oneValueArgs}" "${multiValueArgs}" ${ARGN}) - - # We need to generate an "object" library for every generator, so that any - # generator that depends on our stub can link in our generator as well. - # Unfortunately, an ordinary static library won't do: CMake has no way to - # force "alwayslink=1", and a static library with just a self-registering - # Generator is almost certain to get optimized away at link time. Using - # an "Object Library" lets us dodge this (it basically just groups .o files - # together and presents them at the end), at the cost of some decidedly - # ugly bits right here. - set(OBJLIB "${NAME}.objlib") - add_library("${OBJLIB}" OBJECT ${args_SRCS}) - # add_dependencies("${OBJLIB}" Halide) - target_include_directories("${OBJLIB}" PRIVATE "${HALIDE_ROOT_DIR}/include") - target_compile_options("${OBJLIB}" PRIVATE "-std=c++11" "-fno-rtti") - foreach(STUB ${args_STUB_DEPS}) - halide_add_generator_stub_dependency(TARGET ${OBJLIB} STUB_GENERATOR_TARGET ${STUB}) - endforeach() - - set(ALLSTUBS $) - foreach(STUB ${args_STUB_DEPS}) - list(APPEND ALLSTUBS $) - endforeach() - - halide_project("${NAME}" - "generator" - "${HALIDE_ROOT_DIR}/tools/GenGen.cpp" - ${ALLSTUBS}) - - # Declare a stub library if requested. - if (${args_WITH_STUB}) - halide_add_generator_stub_library(STUB_GENERATOR_TARGET "${NAME}" - STUB_GENERATOR_NAME ${args_STUB_GENERATOR_NAME}) - endif() - - set_target_properties("${NAME}" PROPERTIES FOLDER "${CMAKE_CURRENT_BINARY_DIR}") - - # Add any stub deps passed to us. -endfunction(halide_add_generator) - -function(halide_add_generator_stub_library) - set(options ) - set(oneValueArgs STUB_GENERATOR_TARGET STUB_GENERATOR_NAME) - set(multiValueArgs ) - cmake_parse_arguments(args "" "${oneValueArgs}" "${multiValueArgs}" ${ARGN}) - - halide_generator_genfiles_dir(${args_STUB_GENERATOR_TARGET} GENFILES_DIR) - - # STUBNAME_BASE = strip_suffix(STUB_GENERATOR_TARGET, ".generator") - string(REGEX REPLACE "\\.generator*$" "" STUBNAME_BASE ${args_STUB_GENERATOR_TARGET}) - - set(STUB_HDR "${GENFILES_DIR}/${STUBNAME_BASE}.stub.h") - - set(GENERATOR_EXEC_ARGS "-o" "${GENFILES_DIR}" "-e" "cpp_stub") - if (NOT ${args_STUB_GENERATOR_NAME} STREQUAL "") - list(APPEND GENERATOR_EXEC_ARGS "-g" "${args_STUB_GENERATOR_NAME}") - list(APPEND GENERATOR_EXEC_ARGS "-n" "${STUBNAME_BASE}") - endif() - - set(STUBGEN "${args_STUB_GENERATOR_TARGET}.exec_stub_generator") - halide_generator_add_exec_generator_target(${STUBGEN} - GENERATOR_TARGET ${args_STUB_GENERATOR_TARGET} - GENERATOR_ARGS "${GENERATOR_EXEC_ARGS}" - GENFILES_DIR ${GENFILES_DIR} - OUTPUTS "${STUB_HDR}" - ) - set_source_files_properties("${STUB_HDR}" PROPERTIES GENERATED TRUE) -endfunction(halide_add_generator_stub_library) - -function(halide_add_generator_stub_dependency) - # Parse arguments - set(options ) - set(oneValueArgs TARGET STUB_GENERATOR_TARGET) - set(multiValueArgs ) - cmake_parse_arguments(args "" "${oneValueArgs}" "${multiValueArgs}" ${ARGN}) - - halide_generator_genfiles_dir(${args_STUB_GENERATOR_TARGET} GENFILES_DIR) - set(STUBGEN "${args_STUB_GENERATOR_TARGET}.exec_stub_generator") - add_dependencies("${args_TARGET}" ${STUBGEN}) - target_include_directories("${args_TARGET}" PRIVATE "${GENFILES_DIR}") -endfunction(halide_add_generator_stub_dependency) diff --git a/cmake/Util/Op.cmake b/cmake/Util/Op.cmake index bf6bf46a..481ca6c5 100644 --- a/cmake/Util/Op.cmake +++ b/cmake/Util/Op.cmake @@ -2,15 +2,13 @@ # op. It sets a few default flags and exposes a function build_op for simplifying # the build process. See examples/tutorial/04_custom_op.py for an example usage. -if(NOT SCANNER_PATH) - message(FATAL_ERROR "Set SCANNER_PATH to the Scanner repo directory before including Op.cmake.") -endif() -list(APPEND CMAKE_MODULE_PATH "${SCANNER_PATH}/cmake/Modules/") +list(INSERT CMAKE_MODULE_PATH 0 "${CMAKE_CURRENT_LIST_DIR}/Modules") include(CheckCXXCompilerFlag) CHECK_CXX_COMPILER_FLAG("-std=c++1y" COMPILER_SUPPORTS_CXX1Y) if(NOT COMPILER_SUPPORTS_CXX1Y) - message(FATAL_ERROR "The compiler ${CMAKE_CXX_COMPILER} has no C++1y support.") + message(FATAL_ERROR + "The compiler ${CMAKE_CXX_COMPILER} has no C++1y support.") endif() if (NOT CMAKE_BUILD_TYPE) @@ -19,34 +17,57 @@ if (NOT CMAKE_BUILD_TYPE) endif() function(build_op) - set(options ) + set(options) set(oneValueArgs LIB_NAME PROTO_SRC NO_FLAGS) set(multiValueArgs CPP_SRCS) cmake_parse_arguments(args "" "${oneValueArgs}" "${multiValueArgs}" ${ARGN}) include_directories("${CMAKE_CURRENT_BINARY_DIR}") + # Build protobuf files if they exist if(NOT("${args_PROTO_SRC}" STREQUAL "")) find_package(SaneProtobuf REQUIRED) set(PROTOBUF_IMPORT_DIRS "${SCANNER_PATH}") protobuf_generate_cpp(PROTO_SRCS PROTO_HDRS OFF ${args_PROTO_SRC}) protobuf_generate_python(PROTO_PY OFF ${args_PROTO_SRC}) - add_custom_target(${args_LIB_NAME}_proto_files DEPENDS ${PROTO_HDRS} ${PROTO_PY}) + add_custom_target(${args_LIB_NAME}_proto_files + DEPENDS ${PROTO_HDRS} ${PROTO_PY}) add_library(${args_LIB_NAME} SHARED ${args_CPP_SRCS} ${PROTO_SRCS}) add_dependencies(${args_LIB_NAME} ${args_LIB_NAME}_proto_files) - target_link_libraries(${args_LIB_NAME} PUBLIC - "${PROTOBUF_LIBRARY}" - "${SCANNER_PATH}/build/libscanner.so") else() add_library(${args_LIB_NAME} SHARED ${args_CPP_SRCS}) endif() + # NO_FLAGS is primarily for special treatment of libstdlib right now if("${args_NO_FLAGS}" STREQUAL "") + # Explictly link libscanner.so + execute_process( + OUTPUT_VARIABLE SCANNER_LIB_PATH + COMMAND + python3 -c "import scannerpy.stdlib.build_flags as b; b.print_lib()") + + if(APPLE) + target_link_libraries(${args_LIB_NAME} PUBLIC + "${SCANNER_LIB_PATH}/libscanner.dylib") + else() + target_link_libraries(${args_LIB_NAME} PUBLIC + "${SCANNER_LIB_PATH}/libscanner.so") + endif() + execute_process( OUTPUT_VARIABLE BUILD_FLAGS - COMMAND python -c "import scannerpy; scannerpy.Database().print_build_flags()") + COMMAND + python3 -c "import scannerpy.stdlib.build_flags as b; b.print_compile_flags()") set_target_properties( ${args_LIB_NAME} PROPERTIES COMPILE_FLAGS "${BUILD_FLAGS}") + + execute_process( + OUTPUT_VARIABLE LINK_FLAGS + COMMAND + python3 -c "import scannerpy.stdlib.build_flags as b; b.print_link_flags()") + set_target_properties( + ${args_LIB_NAME} PROPERTIES + LINK_FLAGS "${LINK_FLAGS}") endif() endfunction() diff --git a/deps.sh b/deps.sh new file mode 100644 index 00000000..bfe55c5d --- /dev/null +++ b/deps.sh @@ -0,0 +1,809 @@ +#!/bin/bash + +if [[ "$OSTYPE" == "linux-gnu" ]]; then + cores=$(nproc) + # ... +elif [[ "$OSTYPE" == "darwin"* ]]; then + cores=$(sysctl -n hw.ncpu) + # Mac OSX +else + # Unknown. + echo "Unknown OSTYPE: $OSTYPE. Exiting." + exit 1 +fi + +LOCAL_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +BUILD_DIR=$LOCAL_DIR/thirdparty/build +DEFAULT_INSTALL_DIR=$LOCAL_DIR/thirdparty/install +FILES_DIR=$LOCAL_DIR/thirdparty/resources +PYTHON_VERSION=$(python3 -c 'import sys; print(".".join(map(str, sys.version_info[:2])))') + +POSITIONAL=() + +# Ask if installed +INSTALL_FFMPEG=true +INSTALL_OPENCV=true +INSTALL_PROTOBUF=true +INSTALL_GRPC=true +INSTALL_CAFFE=true +INSTALL_HALIDE=true +INSTALL_OPENPOSE=true + +USE_GPU=false +NO_USE_GPU=false + +# Assume not installed +INSTALL_GOOGLETEST=true +INSTALL_HWANG=true +INSTALL_TINYTOML=true +INSTALL_STOREHOUSE=true +INSTALL_PYBIND=true +INSTALL_LIBPQXX=true + +INSTALL_PREFIX=$DEFAULT_INSTALL_DIR + +INSTALL_ALL=false +INSTALL_NONE=false + +while [[ $# -gt 0 ]] +do +key="$1" + +case $key in + -c|--cores) + cores="$2" + shift # past arg + shift # past value + ;; + -g|--use-gpu) + USE_GPU=true + shift # past arg + ;; + -ng|--no-use-gpu) + NO_USE_GPU=true + shift # past arg + ;; + -p|--prefix) + INSTALL_PREFIX="$2" + shift # past arg + shift # past value + ;; + -a|--install-all) + INSTALL_ALL=true + shift # past arg + ;; + -n|--install-none) + INSTALL_NONE=true + shift # past arg + ;; + --with-ffmpeg) + WITH_FFMPEG="$2" + shift # past arg + shift # past value + ;; + --with-opencv) + WITH_OPENCV="$2" + shift # past arg + shift # past value + ;; + --with-protobuf) + WITH_PROTOBUF="$2" + shift # past arg + shift # past value + ;; + --with-grpc) + WITH_GRPC="$2" + shift # past arg + shift # past value + ;; + --with-caffe) + WITH_CAFFE="$2" + shift # past arg + shift # past value + ;; + --with-halide) + WITH_HALIDE="$2" + shift # past arg + shift # past value + ;; + --with-openpose) + WITH_OPENPOSE="$2" + shift # past arg + shift # past value + ;; + --with-hwang) + WITH_HWANG="$2" + shift # past arg + shift # past value + ;; + --with-storehouse) + WITH_STOREHOUSE="$2" + shift # past arg + shift # past value + ;; + --with-pybind) + WITH_PYBIND="$2" + shift # past arg + shift # past value + ;; + --with-libpqxx) + WITH_LIBPQXX="$2" + shift # past arg + shift # past value + ;; + + *) # unknown option + POSITIONAL+=("$1") # save it in an array for later + shift # past argument + ;; +esac +done + +echo "--------------------------------------------------------------" +echo "| Scanner Dependency Installation Script |" +echo "--------------------------------------------------------------" +echo "The script will ask if required dependencies are installed and" +echo "then install missing dependencies to " +echo "$INSTALL_PREFIX" +echo "(customized by specifying (--prefix )" + +set -- "${POSITIONAL[@]}" # restore positional parameters + +if command -v conda list >/dev/null 2>&1; then + # Anaconda is installed, so add lib to prefix path for OpenCV to find + # PythonLib + echo "Detected Anaconda, adding lib path to OpenCV and Caffe build" + py_path=$(dirname $(which python))/../lib + PY_EXTRA_CMDS="$py_path" +else + PY_EXTRA_CMDS="" +fi + +# Check if we have GPUs by looking for nvidia-smi +if command -v nvidia-smi >/dev/null 2>&1; then + HAVE_GPU=true +else + HAVE_GPU=false +fi + + +# Force building with GPU when specified +if [[ $USE_GPU == true ]]; then + HAVE_GPU=true +fi + +# Force NOT building with GPU when specified, overriding other commands +if [[ $NO_USE_GPU == true ]]; then + HAVE_GPU=false +fi + +echo "" +echo "Configuration:" +echo "--------------------------------------------------------------" +echo "Detected Python version: $PYTHON_VERSION" +echo "GPUs available: $HAVE_GPU" +echo "" + +# Directories for installed dependencies +FFMPEG_DIR=$INSTALL_PREFIX +OPENCV_DIR=$INSTALL_PREFIX +PROTOBUF_DIR=$INSTALL_PREFIX +GRPC_DIR=$INSTALL_PREFIX +CAFFE_DIR=$INSTALL_PREFIX +HALIDE_DIR=$INSTALL_PREFIX +PYBIND_DIR=$INSTALL_PREFIX +HWANG_DIR=$INSTALL_PREFIX +STOREHOUSE_DIR=$INSTALL_PREFIX +TINYTOML_DIR=$INSTALL_PREFIX +OPENPOSE_DIR=$INSTALL_PREFIX +LIBPQXX_DIR=$INSTALL_PREFIX + +if [[ ! -z ${WITH_FFMPEG+x} ]]; then + INSTALL_FFMPEG=false + FFMPEG_DIR=$WITH_FFMPEG +fi +if [[ ! -z ${WITH_OPENCV+x} ]]; then + INSTALL_OPENCV=false + OPENCV_DIR=$WITH_OPENCV +fi +if [[ ! -z ${WITH_PROTOBUF+x} ]]; then + INSTALL_PROTOBUF=false + PROTOBUF_DIR=$WITH_PROTOBUF +fi +if [[ ! -z ${WITH_GRPC+x} ]]; then + INSTALL_GRPC=false + GRPC_DIR=$WITH_GRPC +fi +if [[ ! -z ${WITH_CAFFE+x} ]]; then + INSTALL_CAFFE=false + CAFFE_DIR=$WITH_CAFFE +fi +if [[ ! -z ${WITH_HALIDE+x} ]]; then + INSTALL_HALIDE=false + HALIDE_DIR=$WITH_HALIDE +fi +if [[ ! -z ${WITH_PYBIND+x} ]]; then + INSTALL_PYBIND=false + PYBIND_DIR=$WITH_PYBIND +fi +if [[ ! -z ${WITH_HWANG+x} ]]; then + INSTALL_HWANG=false + HWANG_DIR=$WITH_HWANG +fi +if [[ ! -z ${WITH_STOREHOUSE+x} ]]; then + INSTALL_STOREHOUSE=false + STOREHOUSE_DIR=$WITH_STOREHOUSE +fi +if [[ ! -z ${WITH_OPENPOSE+x} ]]; then + INSTALL_OPENPOSE=false + OPENPOSE_DIR=$WITH_OPENPOSE +fi +if [[ ! -z ${WITH_LIBPQXX+x} ]]; then + INSTALL_LIBPQXX=false + LIBPQXX_DIR=$WITH_LIBPQXX +fi + +export C_INCLUDE_PATH=$INSTALL_PREFIX/include:$C_INCLUDE_PATH +export LD_LIBRARY_PATH=$INSTALL_PREFIX/lib:$LD_LIBRARY_PATH +export PATH=$INSTALL_PREFIX/bin:$PATH +export PKG_CONFIG_PATH=$INSTALL_PREFIX/lib/pkgconfig:$PGK_CONFIG_PATH + +mkdir -p $BUILD_DIR +mkdir -p $INSTALL_PREFIX + +if [[ $INSTALL_NONE == true ]]; then + INSTALL_FFMPEG=false + INSTALL_OPENCV=false + INSTALL_PROTOBUF=false + INSTALL_GRPC=false + INSTALL_CAFFE=false + INSTALL_HALIDE=false + INSTALL_OPENPOSE=false + INSTALL_GOOGLETEST=false + INSTALL_HWANG=false + INSTALL_TINYTOML=false + INSTALL_STOREHOUSE=false + INSTALL_PYBIND=false + INSTALL_LIBPQXX=false + +elif [[ $INSTALL_ALL == false ]]; then + # Ask about each library + if [[ -z ${WITH_FFMPEG+x} ]]; then + echo -n "Do you have ffmpeg>=3.3.1 installed? [y/N]: " + read yn + if [[ $yn == y ]] || [[ $yn == Y ]]; then + INSTALL_FFMPEG=false + echo -n "Where is your ffmpeg install? [/usr/local]: " + read install_location + if [[ $install_location == "" ]]; then + FFMPEG_DIR=/usr/local + else + FFMPEG_DIR=$install_location + fi + else + INSTALL_FFMPEG=true + fi + fi + + if [[ -z ${WITH_OPENCV+x} ]]; then + echo -n "Do you have opencv>=3.4.0 with contrib installed? [y/N]: " + read yn + if [[ $yn == y ]] || [[ $yn == Y ]]; then + INSTALL_OPENCV=false + echo -n "Where is your opencv install? [/usr/local]: " + read install_location + if [[ $install_location == "" ]]; then + OPENCV_DIR=/usr/local + else + OPENCV_DIR=$install_location + fi + else + INSTALL_OPENCV=true + fi + fi + + if [[ -z ${WITH_PROTOBUF+x} ]]; then + echo -n "Do you have protobuf>=3.5.1 installed? [y/N]: " + read yn + if [[ $yn == y ]] || [[ $yn == Y ]]; then + INSTALL_PROTOBUF=false + echo -n "Where is your protobuf install? [/usr/local]: " + read install_location + if [[ $install_location == "" ]]; then + PROTOBUF_DIR=/usr/local + else + PROTOBUF_DIR=$install_location + fi + else + INSTALL_PROTOBUF=true + fi + fi + + if [[ -z ${WITH_GRPC+x} ]]; then + echo -n "Do you have grpc==1.12.0 installed? [y/N]: " + read yn + if [[ $yn == y ]] || [[ $yn == Y ]]; then + INSTALL_GRPC=false + echo -n "Where is your grpc install? [/usr/local]: " + read install_location + if [[ $install_location == "" ]]; then + GRPC_DIR=/usr/local + else + GRPC_DIR=$install_location + fi + else + INSTALL_GRPC=true + fi + fi + + if [[ -z ${WITH_HALIDE+x} ]]; then + echo -n "Do you have halide (release_2018_02_15) installed? [y/N]: " + read yn + if [[ $yn == y ]] || [[ $yn == Y ]]; then + INSTALL_HALIDE=false + echo -n "Where is your halide install? [/usr/local]: " + read install_location + if [[ $install_location == "" ]]; then + HALIDE_DIR=/usr/local + else + HALIDE_DIR=$install_location + fi + else + INSTALL_HALIDE=true + fi + fi + + if [[ $HAVE_GPU == true ]]; then + if [[ -z ${WITH_OPENPOSE+x} ]]; then + echo -n "Do you have OpenPose (v1.3.0) installed? [y/N]: " + read yn + if [[ $yn == y ]] || [[ $yn == Y ]]; then + INSTALL_OPENPOSE=false + echo -n "Where is your OpenPose install? [/usr/local]: " + read install_location + if [[ $install_location == "" ]]; then + OPENPOSE_DIR=/usr/local + else + OPENPOSE_DIR=$install_location + fi + else + INSTALL_OPENPOSE=true + fi + fi + fi + + if [[ -z ${WITH_CAFFE+x} ]]; then + echo -n "Do you have caffe>=rc5 or intel-caffe>=1.0.6 installed? [y/N]: " + read yn + if [[ $yn == y ]] || [[ $yn == Y ]]; then + INSTALL_CAFFE=false + echo -n "Where is your caffe install? [/usr/local]: " + read install_location + if [[ $install_location == "" ]]; then + CAFFE_DIR=/usr/local + else + CAFFE_DIR=$install_location + fi + else + INSTALL_CAFFE=true + if [[ $HAVE_GPU == true ]]; then + echo -n "Do you plan to use GPUs for CNN evaluation? [Y/n]: " + read yn + if [[ $yn == n ]] || [[ $yn == N ]]; then + USE_GPU=false + else + USE_GPU=true + fi + else + USE_GPU=false + fi + fi + fi +fi + +if [[ $INSTALL_FFMPEG == true ]] && [[ ! -f $BUILD_DIR/ffmpeg.done ]] ; then + echo "Installing ffmpeg 3.3.1..." + + # Determine command string to use + if [[ "$OSTYPE" == "linux-gnu" ]]; then + # Linux + CMDS="--extra-version=0ubuntu0.16.04.1 + --toolchain=hardened + --cc=cc --cxx=g++" + # ... + elif [[ "$OSTYPE" == "darwin"* ]]; then + # Mac OSX + CMDS="" + fi + + # FFMPEG + cd $BUILD_DIR + rm -fr ffmpeg + git clone -b n3.3.1 https://git.ffmpeg.org/ffmpeg.git && cd ffmpeg && \ + ./configure --prefix=$INSTALL_PREFIX \ + --enable-shared --disable-stripping \ + --disable-decoder=libschroedinger \ + --enable-avresample \ + --enable-libx264 \ + --enable-nonfree \ + --enable-gpl \ + --enable-gnutls \ + $(echo $CMDS) && \ + make -j${cores} && make install && touch $BUILD_DIR/ffmpeg.done \ + || { echo 'Installing ffmpeg failed!' ; exit 1; } + echo "Done installing ffmpeg 3.3.1" +fi + +if [[ $INSTALL_OPENCV == true ]] && [[ ! -f $BUILD_DIR/opencv.done ]]; then + # OpenCV 3.4.0 + OpenCV contrib + echo "Installing OpenCV 3.4.0..." + + # Determine command string to use + if [[ "$OSTYPE" == "linux-gnu" ]]; then + # Linux + CMDS="" + # ... + elif [[ "$OSTYPE" == "darwin"* ]]; then + # Mac OSX + CMDS="-DWITH_CUDA=OFF" + fi + + cd $BUILD_DIR + rm -rf opencv opencv_contrib ceres-solver + git clone -b 3.4.1 https://github.com/opencv/opencv --depth 1 && \ + git clone -b 3.4.1 https://github.com/opencv/opencv_contrib \ + --depth 1 && \ + git clone -b 1.14.0 https://github.com/ceres-solver/ceres-solver \ + --depth 1 && \ + cd ceres-solver && mkdir -p build_cmake && cd build_cmake && \ + cmake .. -DCMAKE_INSTALL_PREFIX=$INSTALL_PREFIX && \ + make install -j$cores && \ + mkdir -p $BUILD_DIR/opencv/build && cd $BUILD_DIR/opencv/build && \ + cmake -D CMAKE_BUILD_TYPE=Release \ + -D CMAKE_INSTALL_PREFIX=$INSTALL_PREFIX \ + -D BUILD_TESTS=OFF -D BUILD_PERF_TESTS=OFF -D ENABLE_FAST_MATH=1 \ + -D CUDA_FAST_MATH=1 -D WITH_CUBLAS=1 -D WITH_NVCUVID=1 \ + -D BUILD_opencv_rgbd=OFF \ + -D BUILD_opencv_cnn_3dobj=OFF \ + -D OPENCV_EXTRA_MODULES_PATH=$BUILD_DIR/opencv_contrib/modules \ + $(echo $CMDS) -DCMAKE_PREFIX_PATH=$(echo $PY_EXTRA_CMDS) \ + .. && \ + make install -j$cores && touch $BUILD_DIR/opencv.done \ + || { echo 'Installing OpenCV failed!' ; exit 1; } + echo "Done installing OpenCV 3.4.0" +fi + +if [[ $INSTALL_PROTOBUF == true ]] && [[ ! -f $BUILD_DIR/protobuf.done ]] ; then + # protobuf 3.5.1 + echo "Installing protobuf 3.5.1..." + cd $BUILD_DIR + rm -fr protobuf + git clone -b v3.5.1 https://github.com/google/protobuf.git --depth 1 && \ + cd protobuf && bash ./autogen.sh && \ + ./configure --prefix=$INSTALL_PREFIX && make -j$cores && \ + make install && touch $BUILD_DIR/protobuf.done \ + || { echo 'Installing protobuf failed!' ; exit 1; } + echo "Done installing protobuf 3.5.1" +fi + +if [[ $INSTALL_GRPC == true ]] && [[ ! -f $BUILD_DIR/grpc.done ]] ; then + # gRPC 1.12.0 + echo "Installing gRPC 1.12.0..." + cd $BUILD_DIR + rm -fr grpc + git clone -b v1.12.0 https://github.com/grpc/grpc && \ + cd grpc && git submodule update --init --recursive && \ + CPPFLAGS=-I$INSTALL_PREFIX/include LDFLAGS=-L$INSTALL_PREFIX/lib make -j$cores && \ + CPPFLAGS=-I$INSTALL_PREFIX/include LDFLAGS=-L$INSTALL_PREFIX/lib make install prefix=$INSTALL_PREFIX && \ + touch $BUILD_DIR/grpc.done \ + || { echo 'Installing gRPC failed!' ; exit 1; } + if [[ "$OSTYPE" == "linux-gnu" ]]; then + # Linux + ldconfig -n $INSTALL_PREFIX/lib + elif [[ "$OSTYPE" == "darwin"* ]]; then + # OS X + install_name_tool -id "@rpath/libgrpc++_unsecure.dylib" \ + $INSTALL_PREFIX/lib/libgrpc++_unsecure.dylib + install_name_tool -id "@rpath/libgrpc.dylib" \ + $INSTALL_PREFIX/lib/libgrpc.dylib + install_name_tool -id "@rpath/libgpr.dylib" \ + $INSTALL_PREFIX/lib/libgpr.dylib + install_name_tool -change libgpr.dylib @rpath/libgpr.dylib \ + $INSTALL_PREFIX/lib/libgrpc++_unsecure.dylib + install_name_tool -change libgrpc_unsecure.dylib @rpath/libgrpc_unsecure.dylib \ + $INSTALL_PREFIX/lib/libgrpc++_unsecure.dylib + fi + echo "Done installing gRPC 1.12.0" +fi + +if [[ $INSTALL_HALIDE == true ]] && [[ ! -f $BUILD_DIR/halide.done ]] ; then + # Halide + echo "Installing Halide..." + + cd $BUILD_DIR + rm -fr Halide + mkdir Halide + cd Halide + if [[ "$OSTYPE" == "linux-gnu" ]]; then + # If CLANG is not set, we should set it to clang or clang-5.0 + if [ -z ${CLANG+x} ]; then + if command -v clang >/dev/null 2>&1 && + [[ $(clang++ -v 2>&1 | + grep version | + sed 's/.*version \([0-9]*.[0-9]*.[0-9]*\) .*/\1/g' | + perl -pe '($_)=/([0-9]+([.][0-9]+)+)/') > '4.0.0' ]]; then + export CLANG=clang + elif command -v clang-5.0 >/dev/null 2>&1; then + export CLANG=clang-5.0 + fi + echo $CLANG + fi + # If LLVM_CONFIG is not set, we should set it to llvm-config or + # llvm-config-5.0 + if [ -z ${LLVM_CONFIG+x} ]; then + if command -v llvm-config >/dev/null 2>&1 && + [[ $(llvm-config --version) > '4.0.0' ]]; then + export LLVM_CONFIG=llvm-config + elif command -v llvm-config-5.0 >/dev/null 2>&1; then + export LLVM_CONFIG=llvm-config-5.0 + fi + fi + git clone -b release_2018_02_15 https://github.com/halide/Halide --depth 1 && \ + cd Halide && \ + make distrib -j$cores && \ + cp -r distrib/* $INSTALL_PREFIX && \ + touch $BUILD_DIR/halide.done \ + || { echo 'Installing Halide failed!' ; exit 1; } + elif [[ "$OSTYPE" == "darwin"* ]]; then + TAR_NAME=halide-mac-64-trunk-46d8e9e0cdae456489f1eddfd6d829956fc3c843.tgz + wget --retry-on-http-error=403 https://github.com/halide/Halide/releases/download/release_2018_02_15/$TAR_NAME && \ + wget --retry-on-http-error=403 https://raw.githubusercontent.com/halide/Halide/release_2018_02_15/src/Generator.h && \ + tar -zxf $TAR_NAME && \ + cp Generator.h halide/include && \ + mkdir -p $INSTALL_PREFIX/lib && \ + find ./halide -type f -exec chmod 644 {} + && \ + find ./halide -type d -exec chmod 755 {} + && \ + find ./halide/bin -type f -exec chmod 755 {} + && \ + cp -r halide/bin/* $INSTALL_PREFIX/lib && \ + rm -r halide/bin && \ + cp -r halide/* $INSTALL_PREFIX && \ + install_name_tool -id "@rpath/libHalide.dylib" $INSTALL_PREFIX/lib/libHalide.dylib + touch $BUILD_DIR/halide.done \ + || { echo 'Installing Halide failed!' ; exit 1; } + fi + + echo "Done installing Halide" +fi + +if [[ $INSTALL_PYBIND == true ]] && [[ ! -f $BUILD_DIR/pybind.done ]] ; then + echo "Installing pybind..." + cd $BUILD_DIR + rm -fr pybind11 + git clone -b v2.2.2 https://github.com/pybind/pybind11 --depth 1 && \ + cd pybind11 && \ + mkdir build && cd build && \ + cmake .. -DCMAKE_INSTALL_PREFIX=$INSTALL_PREFIX -DPYBIND11_TEST=Off -DCMAKE_BUILD_TYPE=RelWithDebInfo && \ + make install -j${cores} && cd ../../ && \ + touch $BUILD_DIR/pybind.done \ + || { echo 'Installing pybind failed!' ; exit 1; } + echo "Done installing pybind" +fi + +if [[ $INSTALL_STOREHOUSE == true ]] && [[ ! -f $BUILD_DIR/storehouse.done ]] ; then + echo "Installing storehouse..." + cd $BUILD_DIR + rm -fr storehouse + git clone https://github.com/scanner-research/storehouse && \ + cd storehouse && \ + git checkout v0.6.3 && \ + cd thirdparty && mkdir build && cd build && \ + cmake .. -DCMAKE_INSTALL_PREFIX=$INSTALL_PREFIX -DCMAKE_BUILD_TYPE=RelWithDebInfo && \ + make -j${cores} && cd ../../ && \ + mkdir build && cd build && \ + cmake .. -DCMAKE_INSTALL_PREFIX=$INSTALL_PREFIX -DCMAKE_BUILD_TYPE=RelWithDebInfo && \ + make install -j${cores} && cd .. && \ + CPATH=$INSTALL_PREFIX/include LD_LIBRARY_PATH=$INSTALL_PREFIX/lib ./build.sh && \ + touch $BUILD_DIR/storehouse.done \ + || { echo 'Installing storehouse failed!' ; exit 1; } + echo "Done installing storehouse" +fi + +if [[ $INSTALL_GOOGLETEST == true ]] && [[ ! -f $BUILD_DIR/googletest.done ]]; then + echo "Installing googletest..." + cd $BUILD_DIR + rm -fr googletest + git clone https://github.com/google/googletest && \ + cd googletest && git checkout release-1.8.1 && \ + mkdir build && cd build && \ + cmake .. -DCMAKE_INSTALL_PREFIX=$INSTALL_PREFIX && \ + make -j${cores} && make install && \ + touch $BUILD_DIR/googletest.done \ + || { echo 'Installing googletest failed!' ; exit 1; } + echo "Done installing googletest" +fi + +if [[ $INSTALL_HWANG == true ]] && [[ ! -f $BUILD_DIR/hwang.done ]] ; then + echo "Installing hwang..." + cd $BUILD_DIR + rm -fr hwang + git clone https://github.com/scanner-research/hwang && \ + cd hwang && \ + git checkout v0.3.6 && \ + bash ./deps.sh -a \ + --with-ffmpeg $INSTALL_PREFIX \ + --with-protobuf $INSTALL_PREFIX \ + --cores ${cores} && \ + mkdir -p build && cd build && \ + cmake .. -DCMAKE_BUILD_TYPE=RelWithDebInfo -DCMAKE_INSTALL_PREFIX=$INSTALL_PREFIX -DBUILD_CUDA=$USE_GPU && \ + make install -j${cores} && cd .. && ./build.sh && \ + touch $BUILD_DIR/hwang.done \ + || { echo 'Installing hwang failed!' ; exit 1; } + echo "Done installing hwang" +fi + +if [[ $INSTALL_TINYTOML == true ]] && [[ ! -f $BUILD_DIR/tinytoml.done ]]; then + echo "Installing tinytoml..." + cd $BUILD_DIR + rm -fr tinytoml + git clone https://github.com/mayah/tinytoml.git && \ + cd tinytoml && git checkout 3559856002eee57693349b8a2d8a0cf6250d269c && \ + cp -r include/* $INSTALL_PREFIX/include && \ + touch $BUILD_DIR/tinytoml.done \ + || { echo 'Installing tinytoml failed!' ; exit 1; } + echo "Done installing tinytoml" +fi + +if [[ $INSTALL_CAFFE == true ]] && [[ $USE_GPU == false ]] && \ + [[ "$OSTYPE" == "linux-gnu" ]] && [[ ! -f $BUILD_DIR/caffe.done ]]; then + # Intel Caffe 1.0.6 + cd $BUILD_DIR + rm -fr caffe + # Use more recent mkldnn commit to fix gcc bug + git clone -b 1.0.6 https://github.com/intel/caffe --depth 1 && \ + cd caffe && \ + cp $FILES_DIR/caffe/Makefile.config Makefile.config && \ + rm mkldnn.commit && \ + echo "2604f435da7bb9f1896ae37200d91734adfdba9c" > mkldnn.commit && \ + mkdir build && cd build && \ + cmake -DCMAKE_INSTALL_PREFIX=$INSTALL_PREFIX \ + -DCMAKE_PREFIX_PATH="$INSTALL_PREFIX;$PY_EXTRA_CMDS" \ + -DCPU_ONLY=ON \ + -DOpenCV_DIR=$OPENCV_DIR \ + -DBUILD_python=OFF \ + -Dpython_version=3 \ + -DBLAS=mkl \ + .. && \ + make -j${cores} && \ + make install && \ + cd .. && \ + cp -r external/mkl/mklml_lnx_2018.0.20170908/* $INSTALL_PREFIX && \ + cp -r external/mkldnn/install/* $INSTALL_PREFIX && \ + touch $BUILD_DIR/caffe.done \ + || { echo 'Installing caffe failed!' ; exit 1; } +fi + +#if [[ $INSTALL_CAFFE == true ]] && +if [[ $INSTALL_CAFFE == true ]] && \ + ([[ $USE_GPU == true ]] || + [[ "$OSTYPE" == "darwin"* ]]) && \ + [[ ! -f $BUILD_DIR/caffe.done ]]; then + cd $BUILD_DIR + # Intel MKL + + if [[ "$OSTYPE" == "linux-gnu" ]]; then + rm -fr mkl + mkdir mkl && \ + cd mkl && \ + wget http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/12414/l_mkl_2018.1.163.tgz && \ + tar -zxf l_mkl_2018.1.163.tgz && \ + cp $FILES_DIR/mkl/silent.cfg silent.cfg && \ + echo "PSET_INSTALL_DIR=$INSTALL_PREFIX/intel" >> silent.cfg && \ + cd l_mkl_2018.1.163 && \ + bash install.sh --cli-mode --silent ../silent.cfg + fi + + if [[ $USE_GPU == true ]]; then + CPU_ONLY=OFF + else + CPU_ONLY=ON + fi + + cd $BUILD_DIR + # Caffe rc5 + rm -fr caffe + git clone https://github.com/BVLC/caffe && \ + cd caffe && + git checkout 18b09e807a6e146750d84e89a961ba8e678830b4 && + cp $FILES_DIR/caffe/Makefile.config Makefile.config && \ + mkdir build && cd build && \ + cmake -DCMAKE_INSTALL_PREFIX=$INSTALL_PREFIX \ + -DCMAKE_PREFIX_PATH=$INSTALL_PREFIX \ + -DINTEL_ROOT=$INSTALL_PREFIX/intel \ + -DCPU_ONLY=$CPU_ONLY \ + -DBLAS=mkl \ + -DBUILD_python=OFF \ + -Dpython_version=3 \ + -DCUDA_ARCH_NAME="Manual" \ + -DCUDA_ARCH_BIN="30 35 50 60 61" \ + -DCUDA_ARCH_PTX="30 35 50 60 61" \ + -DOpenCV_DIR=$INSTALL_PREFIX \ + .. && \ + make -j${cores} && \ + make install && \ + touch $BUILD_DIR/caffe.done \ + || { echo 'Installing caffe failed!' ; exit 1; } +fi + +if [[ $INSTALL_OPENPOSE == true ]] && [[ ! -f $BUILD_DIR/openpose.done ]] && \ + ! [[ "$OSTYPE" == "darwin"* ]]; then + EXTRA_FLAGS="" + if [[ $HAVE_GPU == false ]]; then + EXTRA_FLAGS="-DGPU_MODE=CPU_ONLY" + fi + + cd $BUILD_DIR + rm -rf openpose + git clone -b v1.3.0 https://github.com/CMU-Perceptual-Computing-Lab/openpose --depth 1 && \ + cd openpose && mkdir build && cd build && \ + cmake -D CMAKE_INSTALL_PREFIX=$INSTALL_PREFIX \ + -D CMAKE_PREFIX_PATH=$INSTALL_PREFIX \ + -D OpenCV_DIR=$INSTALL_PREFIX \ + -D BUILD_CAFFE=OFF \ + -D Caffe_INCLUDE_DIRS=$CAFFE_DIR/include \ + -D Caffe_LIBS=$CAFFE_DIR/lib/libcaffe.so \ + -D BUILD_EXAMPLES=Off \ + -D BUILD_DOCS=Off \ + -D DOWNLOAD_COCO_MODEL=Off \ + -D DOWNLOAD_HAND_MODEL=Off \ + -D DOWNLOAD_FACE_MODEL=Off \ + -DCUDA_ARCH="Manual" \ + -DCUDA_ARCH_BIN="30 35 50 60 61" \ + -DCUDA_ARCH_PTX="30 35 50 60 61" \ + ${EXTRA_FLAGS} \ + .. && \ + make install -j${cores} && \ + touch $BUILD_DIR/openpose.done \ + || { echo 'Installing OpenPose failed!'; exit 1; } +fi + + +if [[ $INSTALL_LIBPQXX == true ]] && [[ ! -f $BUILD_DIR/libpqxx.done ]]; then + cd $BUILD_DIR + rm -rf libpqxx + git clone -b 6.2.2 https://github.com/jtv/libpqxx --depth 1 && \ + cd libpqxx && \ + CXXFLAGS="-fPIC" ./configure --prefix=$INSTALL_PREFIX --disable-documentation && \ + make install -j${cores} && \ + touch $BUILD_DIR/libpqxx.done \ + || { echo 'Installing libpqxx failed!'; exit 1; } +fi + + +DEP_FILE=$LOCAL_DIR/dependencies.txt +rm -f $DEP_FILE +echo "HAVE_GPU=$HAVE_GPU" >> $DEP_FILE +echo "CAFFE_GPU=$USE_GPU" >> $DEP_FILE +echo "PYBIND11_DIR=$PYBIND_DIR" >> $DEP_FILE +echo "FFMPEG_DIR=$FFMPEG_DIR" >> $DEP_FILE +echo "OpenCV_DIR=$OPENCV_DIR" >> $DEP_FILE +echo "PROTOBUF_DIR=$PROTOBUF_DIR" >> $DEP_FILE +echo "GRPC_DIR=$GRPC_DIR" >> $DEP_FILE +echo "Caffe_DIR=$CAFFE_DIR" >> $DEP_FILE +echo "Halide_DIR=$HALIDE_DIR" >> $DEP_FILE +echo "Hwang_DIR=$HWANG_DIR" >> $DEP_FILE +echo "STOREHOUSE_DIR=$STOREHOUSE_DIR" >> $DEP_FILE +echo "TinyToml_DIR=$TINYTOML_DIR" >> $DEP_FILE +echo "LIBPQXX_DIR=$LIBPQXX_DIR" >> $DEP_FILE + +echo "Done installing required dependencies!" +echo -n "Add $INSTALL_PREFIX/lib to your LD_LIBRARY_PATH, " +echo -n "add $INSTALL_PREFIX/bin to your PATH, and " +echo -n "add $INSTALL_PREFIX/lib/pkgconfig to your PKG_CONFIG_PATH so the installed " +echo -n "dependencies can be found! " +echo "e.g. export LD_LIBRARY_PATH=$INSTALL_PREFIX/lib:\$LD_LIBRARY_PATH" +if [[ $INSTALL_OPENCV == true ]]; then + echo "Add $INSTALL_PREFIX/lib/python$PYTHON_VERSION/dist-packages to your PYTHONPATH to use OpenCV from Python" +fi +if [[ $INSTALL_CAFFE_CPU == true ]] || [[ $INSTALL_CAFFE_GPU == true ]]; then + echo "Add $INSTALL_PREFIX/python to your PYTHONPATH to use Caffe from Python" +fi diff --git a/docker/Dockerfile.scanner b/docker/Dockerfile.scanner new file mode 100644 index 00000000..cb26c6db --- /dev/null +++ b/docker/Dockerfile.scanner @@ -0,0 +1,30 @@ +ARG tag=gpu +FROM scannerresearch/scanner-base:ubuntu16.04-${tag} +MAINTAINER Will Crichton "wcrichto@cs.stanford.edu" +ARG cores=1 +ARG gpu=ON +ARG deps_opt='' + +ADD . /opt/scanner +WORKDIR /opt/scanner +ENV Caffe_DIR /usr/local +ENV LD_LIBRARY_PATH \ + "/usr/lib/x86_64-linux-gnu:/usr/local/cuda/lib64:$LD_LIBRARY_PATH:/usr/local/cuda/lib64/stubs" +ENV PKG_CONFIG_PATH "/usr/local/lib/pkgconfig:$PKG_CONFIG_PATH" +RUN cd /opt/scanner && \ + (bash deps.sh --install-none --prefix /usr/local ${deps_opt}) && \ + mkdir build && cd build && \ + cmake -D BUILD_IMGPROC_OPS=ON \ + -D BUILD_CAFFE_OPS=ON \ + -D BUILD_OPENFACE_OPS=OFF \ + -D BUILD_TESTS=ON \ + -D BUILD_CUDA=${gpu} \ + -D CMAKE_BUILD_TYPE=RelWithDebinfo \ + .. && \ + cd .. && \ + (yes | pip3 uninstall grpcio protobuf) && \ + ./build.sh && \ + ldconfig + +ENV LC_ALL C.UTF-8 +ENV LANG C.UTF-8 diff --git a/docker/build-all-base.sh b/docker/build-all-base.sh new file mode 100755 index 00000000..d8abfa36 --- /dev/null +++ b/docker/build-all-base.sh @@ -0,0 +1,74 @@ +#!/bin/bash +set -e + +NO_CACHE=false +CORES=$(nproc) + +DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" + +for dir in $DIR/*/ +do + base=`basename ${dir%*/}` + + cp $DIR/../deps.sh $dir/deps.sh + + rm -rf $dir/thirdparty + mkdir -p $dir/thirdparty + cp -r $DIR/../thirdparty/resources $dir/thirdparty/ + + function build { + local TYPE=$1 + local TAG=$2 + local BASE_TAG=$3 + + docker build \ + --build-arg cores=$CORES \ + --build-arg base_tag=$BASE_TAG \ + --no-cache=$NO_CACHE \ + -t scannerresearch/scanner-base:$TAG \ + -f $dir/Dockerfile.$TYPE \ + $dir 2>&1 > ${TAG}-output.log \ + && rm ${TAG}-output.log \ + || { echo "Building $TAG failed! Check $TAG-output.log."; } + } + + function build_chain { + local TYPE=$1 + local TAG=$2 + local BASE_TAG=$3 + + build base $TAG-base $BASE_TAG && \ + build $TYPE $TAG scannerresearch/scanner-base:$TAG-base + } + + function push { + docker push scannerresearch/scanner-base:$1 + } + + function build_push_gpu { + local CUDA_MAJOR_VERSION=$1 + local CUDA_VERSION=$2 + local CUDNN_VERSION=$3 + local BASE_TAG=nvidia/cuda:${CUDA_VERSION}-${CUDNN_VERSION}-devel-ubuntu16.04 + local TAG=$base-gpu-$CUDA_VERSION-$CUDNN_VERSION + + build_chain gpu${CUDA_MAJOR_VERSION} $TAG $BASE_TAG && \ + push $TAG + } + + + base_tag=scannerresearch/scanner-base:$base + + # Build cpu with ubuntu:16.04 + build_chain cpu $base-cpu ubuntu:16.04 & + + # GPU + build_push_gpu 8 8.0 cudnn6 & + build_push_gpu 8 8.0 cudnn7 & + build_push_gpu 9 9.0 cudnn7 & + build_push_gpu 9 9.1 cudnn7 & + + wait $(jobs -p) + + push $base-cpu +done diff --git a/docker/docker-compose.yml b/docker/docker-compose.yml new file mode 100644 index 00000000..5530e21f --- /dev/null +++ b/docker/docker-compose.yml @@ -0,0 +1,17 @@ +version: "2.3" +services: + gpu: + image: scannerresearch/scanner:gpu-9.1-cudnn7-latest + command: jupyter notebook --allow-root --ip=0.0.0.0 --port=8888 + ports: + - "8888:8888" + volumes: + - .:/app + runtime: nvidia + cpu: + image: scannerresearch/scanner:cpu-latest + command: jupyter notebook --allow-root --ip=0.0.0.0 --port=8888 + ports: + - "8888:8888" + volumes: + - .:/app diff --git a/docker/ubuntu16.04/Dockerfile.base b/docker/ubuntu16.04/Dockerfile.base new file mode 100644 index 00000000..5f05ef64 --- /dev/null +++ b/docker/ubuntu16.04/Dockerfile.base @@ -0,0 +1,48 @@ +# Scanner base image for Ubuntu 16.04 + +ARG base_tag +FROM ${base_tag} +MAINTAINER Will Crichton "wcrichto@cs.stanford.edu" +ARG cores=1 +ARG cpu_only=OFF + +# Apt-installable dependencies +RUN apt-get update && apt-get upgrade -y && \ + apt-get install -y software-properties-common && \ + add-apt-repository -y ppa:git-core/ppa && \ + apt-get update && \ + apt-get install -y \ + build-essential \ + git libgtk2.0-dev pkg-config unzip llvm-5.0-dev clang-5.0 libc++-dev \ + libgflags-dev libgtest-dev libssl-dev libcurl3-dev liblzma-dev \ + libeigen3-dev libgoogle-glog-dev libatlas-base-dev libsuitesparse-dev \ + libgflags-dev libx264-dev libopenjpeg-dev libxvidcore-dev \ + libpng-dev libjpeg-dev libbz2-dev python-pip wget \ + libleveldb-dev libsnappy-dev libhdf5-serial-dev liblmdb-dev python-dev \ + python-tk autoconf autogen libtool libtbb-dev libopenblas-dev \ + liblapacke-dev swig yasm python3.5 python3-pip cpio automake libass-dev \ + libfreetype6-dev libsdl2-dev libtheora-dev libtool \ + libva-dev libvdpau-dev libvorbis-dev libxcb1-dev libxcb-shm0-dev \ + libxcb-xfixes0-dev mercurial texinfo zlib1g-dev curl libcap-dev \ + libgnutls-dev libpq-dev postgresql + +RUN apt-get install -y --no-install-recommends libboost-all-dev + +# Non-apt-installable dependencies +ENV deps /deps +WORKDIR ${deps} + +# CMake (we use 3.7 because >3.8 has issues building OpenCV due to http_proxy) +RUN wget "https://cmake.org/files/v3.7/cmake-3.7.0.tar.gz" && \ + tar -xf cmake-3.7.0.tar.gz && cd ${deps}/cmake-3.7.0 && \ + ./bootstrap --parallel=${cores} -- -DCMAKE_USE_OPENSSL=ON && \ + make -j${cores} && \ + make install && \ + rm -rf ${deps}/cmake-3.7.0.tar.gz ${deps}/cmake-3.7.0 + +# Python dependencies +WORKDIR /opt/scanner-base +ADD . . +RUN pip3 install -r requirements.txt + +ENV NVIDIA_DRIVER_CAPABILITIES compute,utility,video diff --git a/docker/ubuntu16.04/Dockerfile.cpu b/docker/ubuntu16.04/Dockerfile.cpu new file mode 100644 index 00000000..279f1021 --- /dev/null +++ b/docker/ubuntu16.04/Dockerfile.cpu @@ -0,0 +1,15 @@ +# Scanner base CPU image for Ubuntu 16.04 + +ARG base_tag +FROM ${base_tag} +MAINTAINER Will Crichton "wcrichto@cs.stanford.edu" +ARG cores=1 + +RUN bash ./deps.sh --install-all --prefix /usr/local && \ + rm -rf /opt/scanner-base + +ENV PYTHONPATH /usr/local/python:${PYTHONPATH} +ENV PYTHONPATH /usr/local/lib/python3.5/site-packages:${PYTHONPATH} +ENV PYTHONPATH /usr/local/lib/python3.5/dist-packages:${PYTHONPATH} + +WORKDIR / diff --git a/docker/ubuntu16.04/Dockerfile.gpu8 b/docker/ubuntu16.04/Dockerfile.gpu8 new file mode 100644 index 00000000..44c346bd --- /dev/null +++ b/docker/ubuntu16.04/Dockerfile.gpu8 @@ -0,0 +1,21 @@ +# Scanner base GPU image for Ubuntu 16.04 CUDA 8.0 + +ARG base_tag +FROM ${base_tag} +MAINTAINER Will Crichton "wcrichto@cs.stanford.edu" +ARG cores=1 + +ADD thirdparty/resources/cuda/libnvcuvid.so.367.48 /usr/lib/x86_64-linux-gnu/libnvcuvid.so +RUN ln -s /usr/local/cuda-8.0/targets/x86_64-linux/lib/stubs/libcuda.so \ + /usr/local/cuda-8.0/targets/x86_64-linux/lib/stubs/libcuda.so.1 +ENV CUDA_LIB_PATH /usr/local/cuda/lib64/stubs + +RUN bash ./deps.sh --install-all --prefix /usr/local --use-gpu && \ + rm -rf /opt/scanner-base + +ENV LD_LIBRARY_PATH /usr/local/intel/mkl/lib:${LD_LIBRARY_PATH} +ENV PYTHONPATH /usr/local/python:${PYTHONPATH} +ENV PYTHONPATH /usr/local/lib/python3.5/site-packages:${PYTHONPATH} +ENV PYTHONPATH /usr/local/lib/python3.5/dist-packages:${PYTHONPATH} + +WORKDIR / diff --git a/docker/ubuntu16.04/Dockerfile.gpu9 b/docker/ubuntu16.04/Dockerfile.gpu9 new file mode 100644 index 00000000..5a8dddc9 --- /dev/null +++ b/docker/ubuntu16.04/Dockerfile.gpu9 @@ -0,0 +1,21 @@ +# Scanner base GPU image for Ubuntu 16.04 CUDA 9.X + +ARG base_tag +FROM ${base_tag} +MAINTAINER Will Crichton "wcrichto@cs.stanford.edu" +ARG cores=1 + +ADD thirdparty/resources/cuda/libnvcuvid.so.387.26 /usr/lib/x86_64-linux-gnu/libnvcuvid.so +RUN ln -s /usr/local/cuda/targets/x86_64-linux/lib/stubs/libcuda.so \ + /usr/local/cuda/targets/x86_64-linux/lib/stubs/libcuda.so.1 +ENV CUDA_LIB_PATH /usr/local/cuda/lib64/stubs + +RUN bash ./deps.sh --install-all --prefix /usr/local --use-gpu && \ + rm -rf /opt/scanner-base + +ENV LD_LIBRARY_PATH /usr/local/intel/mkl/lib:${LD_LIBRARY_PATH} +ENV PYTHONPATH /usr/local/python:${PYTHONPATH} +ENV PYTHONPATH /usr/local/lib/python3.5/site-packages:${PYTHONPATH} +ENV PYTHONPATH /usr/local/lib/python3.5/dist-packages:${PYTHONPATH} + +WORKDIR / diff --git a/docker/ubuntu16.04/requirements.txt b/docker/ubuntu16.04/requirements.txt new file mode 100644 index 00000000..f22e2356 --- /dev/null +++ b/docker/ubuntu16.04/requirements.txt @@ -0,0 +1,18 @@ +ipython==5.3.0 +numpy==1.12.0 +protobuf==3.2.0 +toml==0.9.2 +youtube-dl +scipy==0.18.1 +scikit-learn==0.18.1 +scikit-image==0.12.3 +enum34==1.1.6 +matplotlib==2.0.0 +seaborn==0.7.1 +grpcio==1.7.0 +doxypypy==0.8.8.6 +pytest==3.0.6 +twine==1.8.1 +ipaddress==1.0.18 +plotly==2.0.6 +jupyter==1.0.0 diff --git a/docs/Makefile b/docs/Makefile new file mode 100644 index 00000000..7b8fb973 --- /dev/null +++ b/docs/Makefile @@ -0,0 +1,28 @@ +# Minimal makefile for Sphinx documentation +# + +# You can set these variables from the command line. +SPHINXOPTS = +SPHINXBUILD = sphinx-build +SPHINXPROJ = scanner +SOURCEDIR = . +BUILDDIR = ../build/docs + +# Put it first so that "make" without argument is like "make help". +help: + @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) + +.PHONY: help Makefile + +# Catch-all target: route all unknown targets to Sphinx using the new +# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). +%: Makefile + @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) + +watch: autobuild notifywait + +autobuild: + sphinx-autobuild -b html "$(SOURCEDIR)" $(SPHINXOPTS) $(O) $(BUILDDIR)/html -p 4567 -H 0.0.0.0 + +notifywait: + inotifywait -mr .. @../build @../thirdparty @../dist @../docs --format '%f' | while read i; do cd .. && python3 python/setup.py bdist_wheel && cd docs && touch Makefile; done diff --git a/docs/_static/custom.css b/docs/_static/custom.css new file mode 100644 index 00000000..18edaf82 --- /dev/null +++ b/docs/_static/custom.css @@ -0,0 +1,192 @@ +pre { + padding: 7px 15px; +} + +div.document { + position: relative; +} + +div.sphinxsidebar { + position: absolute; + margin: 0; + left: 0; + top: 0; + height: 100%; +} + +div.sphinxsidebarwrapper { + position: sticky; + left: 0; + top: 15px; +} + +div.related { + display: none; +} + +div.body { + color: rgb(42, 45, 50); +} + +div.body a.reference { + color: #900; + border-bottom: 1px dotted #900; +} + +div.body a.reference:visited { + color: #700; + border-bottom: 1px dotted #700; +} + +div.body a.reference:hover { + text-decoration: none; +} + +div.body a.reference:hover { + color: #d00; + text-decoration: none; + border-bottom-color: #d00; +} + +div.body code.xref { + font-weight: bold; + border: none !important; + color: black !important; +} + +div.body code.xref:hover { + border: none !important; + color: black !important; + text-decoration: none !important; + background: #eee; +} + +/* copied from ghbtn */ +.github-btn{ + height:20px; + overflow:hidden +} +.gh-btn,.gh-count{ + padding:2px 5px 2px 4px; + color:#333; + text-decoration:none; + text-shadow:0 1px 0 #fff; + white-space:nowrap; + cursor:pointer; + border-radius:3px +} +.gh-btn { + │ background-color:#eee; + background-image:-webkit-gradient(linear,left top,left bottom,color-stop(0,#fcfcfc),color-stop(100%,#eee)); + background-image:-webkit-linear-gradient(top,#fcfcfc 0,#eee 100%); + background-image:-moz-linear-gradien\ t(top,#fcfcfc 0,#eee 100%); + background-image:-ms-linear-gradient(top,#fcfcfc 0,#eee 100%); + background-image:-o-linear-gradient(top,#fcfcfc 0,#eee 100%); + background-image:linear-gradient(to bottom,#fcfcfc 0,#eee 100%); + filter:progid:DXImage\ Transform.Microsoft.gradient(startColorstr='#fcfcfc', endColorstr='#eeeeee', GradientType=0); + background-repeat:no-repeat; + border:1px solid #d5d5d5 +} + .gh-btn:focus,.gh-btn:hover{ + text-decoration:none; + background-color:#ddd; + background-image:-webkit-gradient(linear,left top,left bottom,color-stop(0,#eee),color-stop(100%,#ddd)); + background-image:-webkit-linear-gradient(top,#eee 0,#ddd 100\ %); + background-image:-moz-linear-gradient(top,#eee 0,#ddd 100%); + background-image:-ms-linear-gradient(top,#eee 0,#ddd 100%); + background-image:-o-linear-gradient(top,#eee 0,#ddd 100%); + background-image:linear-gradient(to bottom,#eee 0,#ddd \ 100%); + filter:progid:DXImageTransform.Microsoft.gradient(startColorstr='#eeeeee', endColorstr='#dddddd', GradientType=0); + border-color:#ccc +} + .gh-btn:active{ + background-image:none; + background-color:#dcdcdc; + border-color:#b5b5b5; + box-shadow:inset 0 2px 4px rgba(0,0,0,.15) +} +.gh-ico{ + width:14px; + height:14px; + margin-right:4px; + background-image:url(data:image/svg+xml; + base64,PHN2ZyB4bWxucz\ 0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHhtbG5zOnhsaW5rPSJodHRwOi8vd3d3LnczLm9yZy8xOTk5L3hsaW5rIiB2ZXJzaW9uPSIxLjEiIGlkPSJMYXllcl8xIiB4PSIwcHgiIHk9IjBweCIgd2lkdGg9IjQwcHgiIGhlaWdodD0iNDBweCIgdmlld0JveD0iMTIgMTIgNDAgNDAiIGVuYWJsZS1iYWNrZ\ 3JvdW5kPSJuZXcgMTIgMTIgNDAgNDAiIHhtbDpzcGFjZT0icHJlc2VydmUiPjxwYXRoIGZpbGw9IiMzMzMzMzMiIGQ9Ik0zMiAxMy40Yy0xMC41IDAtMTkgOC41LTE5IDE5YzAgOC40IDUuNSAxNS41IDEzIDE4YzEgMC4yIDEuMy0wLjQgMS4zLTAuOWMwLTAuNSAwLTEuNyAwLTMuMiBjLTUuMyAxLjEtNi40LTIu\ Ni02LjQtMi42QzIwIDQxLjYgMTguOCA0MSAxOC44IDQxYy0xLjctMS4yIDAuMS0xLjEgMC4xLTEuMWMxLjkgMC4xIDIuOSAyIDIuOSAyYzEuNyAyLjkgNC41IDIuMSA1LjUgMS42IGMwLjItMS4yIDAuNy0yLjEgMS4yLTIuNmMtNC4yLTAuNS04LjctMi4xLTguNy05LjRjMC0yLjEgMC43LTMuNyAyLTUuMWMtMC4\ yLTAuNS0wLjgtMi40IDAuMi01YzAgMCAxLjYtMC41IDUuMiAyIGMxLjUtMC40IDMuMS0wLjcgNC44LTAuN2MxLjYgMCAzLjMgMC4yIDQuNyAwLjdjMy42LTIuNCA1LjItMiA1LjItMmMxIDIuNiAwLjQgNC42IDAuMiA1YzEuMiAxLjMgMiAzIDIgNS4xYzAgNy4zLTQuNSA4LjktOC43IDkuNCBjMC43IDAuNiAxLj\ MgMS43IDEuMyAzLjVjMCAyLjYgMCA0LjYgMCA1LjJjMCAwLjUgMC40IDEuMSAxLjMgMC45YzcuNS0yLjYgMTMtOS43IDEzLTE4LjFDNTEgMjEuOSA0Mi41IDEzLjQgMzIgMTMuNHoiLz48L3N2Zz4=); + background-size:100% 100%; + background-repeat:no-repeat +} +.gh-count{ + position:relative; + d\ isplay:none; + margin-left:4px; + background-color:#fafafa; + border:1px solid #d4d4d4 +} +.gh-count:focus,.gh-count:hover{ + color:#4183C4 +} +.gh-count:after,.gh-count:before{ + content:''; + position:absolute; + display:inline-block; + width:0; + height:0; + border-colo\ r:transparent; + border-style:solid +} +.gh-count:before{ + top:50%; + left:-3px; + margin-top:-4px; + border-width:4px 4px 4px 0; + border-right-color:#fafafa +} +.gh-count:after{ + top:50%; + left:-4px; + z-index:-1; + margin-top:-5px; + border-width:5px 5px 5px 0; + border-ri\ ght-color:#d4d4d4 +} +.github-btn-large{ + height:30px +} +.github-btn-large .gh-btn,.github-btn-large .gh-count{ + padding:3px 10px 3px 8px; + font-size:16px; + line-height:22px; + border-radius:4px +} +.github-btn-large .gh-ico{ + width:20px; + height:20px +} +.github-b\ tn-large .gh-count{ + margin-left:6px +} +.github-btn-large .gh-count:before{ + left:-5px; + margin-top:-6px; + border-width:6px 6px 6px 0 +} +.github-btn-large .gh-count:after{ + left:-6px; + margin-top:-7px; + border-width:7px 7px 7px 0 +} +.gh-text { + font-style: normal; + font-variant-ligatures: normal; + font-variant-caps: normal; + font-variant-numeric: normal; + font-variant-east-asian: normal; + font-weight: 700; + font-stretch: normal; + font-size: 11px; + line-height: 14px; + font-family: "Helvetica Neue", Helvetica, Arial, sans-serif; +} + +#scanner-efficient-video-analysis-at-scale h1 { + font-size: 200%; +} diff --git a/docs/_templates/about.html b/docs/_templates/about.html new file mode 100644 index 00000000..00b7e2ed --- /dev/null +++ b/docs/_templates/about.html @@ -0,0 +1,71 @@ + +{% if logo %} +

{{ project }}

+ {% endif %} + +

+{% else %} +

{{ project }}

+{% endif %} + +{% if description %} +

{{ description }}

+{% endif %} + +{% if github_user and github_repo %} +{% if github_button|lower == 'true' %} +

+ View on GitHub + + + https://secure.travis-ci.org/scanner-research/scanner.svg?branch=master + +

+{% endif %} +{% endif %} + +

+

diff --git a/docs/_templates/navigation.html b/docs/_templates/navigation.html new file mode 100644 index 00000000..0146a429 --- /dev/null +++ b/docs/_templates/navigation.html @@ -0,0 +1,37 @@ + +

{{ _('Navigation') }}

+{{ toctree(collapse=True, maxdepth=4) }} diff --git a/docs/about.rst b/docs/about.rst new file mode 100644 index 00000000..7e0ccf23 --- /dev/null +++ b/docs/about.rst @@ -0,0 +1,12 @@ +Citation & About +================ + +Scanner is an active research project, part of a collaboration between Carnegie +Mellon and Stanford University. Please contact +`Alex Poms `_ and +`Will Crichton `_ with questions. + + +Paper citation +-------------- +Scanner will appear in the proceedings of SIGGRAPH 2018 as `"Scanner: Efficient Video Analysis at Scale `__ by Poms, Crichton, Hanrahan, and Fatahalian. If you use Scanner in your research, we'd appreciate it if you cite the paper. diff --git a/docs/api.rst b/docs/api.rst new file mode 100644 index 00000000..22cbb1e2 --- /dev/null +++ b/docs/api.rst @@ -0,0 +1,9 @@ +API Reference +============= + +.. toctree:: + :maxdepth: 3 + + api/stdlib + api/scannerpy + api/scanner diff --git a/docs/api/scanner.rst b/docs/api/scanner.rst new file mode 100644 index 00000000..513f1439 --- /dev/null +++ b/docs/api/scanner.rst @@ -0,0 +1,4 @@ +Scanner C++ API +=============== + +.. toctree:: diff --git a/docs/api/scannerpy.rst b/docs/api/scannerpy.rst new file mode 100644 index 00000000..46e8fcd6 --- /dev/null +++ b/docs/api/scannerpy.rst @@ -0,0 +1,135 @@ +Scanner Python API +================== + +.. toctree:: + +scannerpy\.database module +-------------------------- + +.. automodule:: scannerpy.database + :members: + :undoc-members: + :show-inheritance: + +scannerpy\.config module +------------------------ + +.. automodule:: scannerpy.config + :members: + :undoc-members: + :show-inheritance: + + +scannerpy\.op module +-------------------- + +.. automodule:: scannerpy.op + :members: + :undoc-members: + :show-inheritance: + +scannerpy\.streams module +-------------------- + +.. automodule:: scannerpy.streams + :members: + :undoc-members: + :show-inheritance: + +scannerpy\.source module +------------------------ + +.. automodule:: scannerpy.source + :members: + :undoc-members: + :show-inheritance: + +scannerpy\.sink module +------------------------ + +.. automodule:: scannerpy.sink + :members: + :undoc-members: + :show-inheritance: + +scannerpy\.job module +--------------------- + +.. automodule:: scannerpy.job + :members: + :undoc-members: + :show-inheritance: + + +scannerpy\.column module +------------------------ + +.. automodule:: scannerpy.column + :members: + :undoc-members: + :show-inheritance: + +scannerpy\.common module +------------------------ + +.. automodule:: scannerpy.common + :members: + :undoc-members: + :show-inheritance: + +scannerpy\.kernel module +------------------------ + +.. automodule:: scannerpy.kernel + :members: + :undoc-members: + :show-inheritance: + +scannerpy\.partitioner module +----------------------------- + +.. automodule:: scannerpy.partitioner + :members: + :undoc-members: + :show-inheritance: + +scannerpy\.profiler module +-------------------------- + +.. automodule:: scannerpy.profiler + :members: + :undoc-members: + :show-inheritance: + +scannerpy\.protobuf\_generator module +------------------------------------- + +.. automodule:: scannerpy.protobuf_generator + :members: + :undoc-members: + :show-inheritance: + +scannerpy\.sampler module +------------------------- + +.. automodule:: scannerpy.sampler + :members: + :undoc-members: + :show-inheritance: + +scannerpy\.table module +----------------------- + +.. automodule:: scannerpy.table + :members: + :undoc-members: + :show-inheritance: + + +Module contents +--------------- + +.. automodule:: scannerpy + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/api/scannerpy.stdlib.rst b/docs/api/scannerpy.stdlib.rst new file mode 100644 index 00000000..0f5aae5d --- /dev/null +++ b/docs/api/scannerpy.stdlib.rst @@ -0,0 +1,105 @@ +scannerpy\.stdlib package +========================= + +This section is still being written. Send an email to apoms@cs.cmu.edu if you want this section written sooner. + +Submodules +---------- + +scannerpy\.stdlib\.bboxes module +-------------------------------- + +.. automodule:: scannerpy.stdlib.bboxes + :members: + :undoc-members: + :show-inheritance: + +scannerpy\.stdlib\.build\_flags module +-------------------------------------- + +.. automodule:: scannerpy.stdlib.build_flags + :members: + :undoc-members: + :show-inheritance: + +scannerpy\.stdlib\.montage module +--------------------------------- + +.. automodule:: scannerpy.stdlib.montage + :members: + :undoc-members: + :show-inheritance: + +scannerpy\.stdlib\.net\_descriptor module +----------------------------------------- + +.. automodule:: scannerpy.stdlib.net_descriptor + :members: + :undoc-members: + :show-inheritance: + +scannerpy\.stdlib\.pipelines module +----------------------------------- + +.. automodule:: scannerpy.stdlib.pipelines + :members: + :undoc-members: + :show-inheritance: + +scannerpy\.stdlib\.pose\_nms\_kernel module +------------------------------------------- + +.. automodule:: scannerpy.stdlib.pose_nms_kernel + :members: + :undoc-members: + :show-inheritance: + +scannerpy\.stdlib\.poses module +------------------------------- + +.. automodule:: scannerpy.stdlib.poses + :members: + :undoc-members: + :show-inheritance: + +scannerpy\.stdlib\.readers module +--------------------------------- + +.. automodule:: scannerpy.stdlib.readers + :members: + :undoc-members: + :show-inheritance: + + +scannerpy\.stdlib\.util module +------------------------------ + +.. automodule:: scannerpy.stdlib.util + :members: + :undoc-members: + :show-inheritance: + +scannerpy\.stdlib\.video module +------------------------------- + +.. automodule:: scannerpy.stdlib.video + :members: + :undoc-members: + :show-inheritance: + +scannerpy\.stdlib\.writers module +--------------------------------- + +.. automodule:: scannerpy.stdlib.writers + :members: + :undoc-members: + :show-inheritance: + + +Module contents +--------------- + +.. automodule:: scannerpy.stdlib + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/api/stdlib.rst b/docs/api/stdlib.rst new file mode 100644 index 00000000..955fccd6 --- /dev/null +++ b/docs/api/stdlib.rst @@ -0,0 +1,15 @@ +.. _standard_library: + +Scanner Standard Library +======================== + +This section is still being written. Send an email to apoms@cs.cmu.edu if you want this section written sooner. + +Sources +------- + +Ops +--- + +Sinks +----- diff --git a/docs/conf.py b/docs/conf.py new file mode 100644 index 00000000..5edf76f1 --- /dev/null +++ b/docs/conf.py @@ -0,0 +1,199 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +# +# scanner documentation build configuration file, created by +# sphinx-quickstart on Sun Nov 26 19:06:21 2017. +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +# +import os +import sys +import sphinx_readable_theme + + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +# +# needs_sphinx = '1.0' + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + 'sphinx.ext.autodoc', + 'sphinx.ext.napoleon', + 'sphinx_autodoc_typehints', + 'sphinx.ext.intersphinx', + 'sphinx.ext.todo', + 'sphinx.ext.coverage', + 'sphinx.ext.mathjax', + 'sphinx.ext.viewcode', + 'sphinx.ext.githubpages', +] + +napoleon_use_ivar = True + +# Add any paths that contain templates here, relative to this directory. +templates_path = ['_templates'] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# +# source_suffix = ['.rst', '.md'] +source_suffix = '.rst' + +# The master toctree document. +master_doc = 'index' + +# General information about the project. +project = 'Scanner' +copyright = '2018, Alex Poms, Will Crichton' +author = 'Alex Poms, Will Crichton' + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The short X.Y version. +version = '0.0.1' +# The full version, including alpha/beta/rc tags. +release = '0.0.1' + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +# This patterns also effect to html_static_path and html_extra_path +exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = 'sphinx' + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +# +html_theme_path = [sphinx_readable_theme.get_html_theme_path()] +html_theme = 'readable' + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +# +# html_theme_options = {} + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ['_static'] + +# Custom sidebar templates, must be a dictionary that maps document names +# to template names. +# +# This is required for the alabaster theme +# refs: http://alabaster.readthedocs.io/en/latest/installation.html#sidebars +html_sidebars = { + '**': [ + 'about.html', + 'navigation.html', + 'relations.html', + 'searchbox.html' + ], +} + +html_context = { + 'description': 'Efficient Video Analysis at Scale', + 'github_user': 'scanner-research', + 'github_repo': 'scanner', + 'badge_branch': 'master', + 'github_button': True, + 'github_type': 'star', + 'github_count': 'true', + 'travis_button': True, + 'codecov_button': False, +} + +# -- Options for HTMLHelp output ------------------------------------------ + +# Output file base name for HTML help builder. +htmlhelp_basename = 'scannerdoc' + + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + # + # 'papersize': 'letterpaper', + + # The font size ('10pt', '11pt' or '12pt'). + # + # 'pointsize': '10pt', + + # Additional stuff for the LaTeX preamble. + # + # 'preamble': '', + + # Latex figure (float) alignment + # + # 'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + (master_doc, 'scanner.tex', 'scanner Documentation', + 'Alex Poms, Will Crichton', 'manual'), +] + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + (master_doc, 'scanner', 'scanner Documentation', + [author], 1) +] + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + (master_doc, 'scanner', 'scanner Documentation', + author, 'scanner', 'One line description of project.', + 'Miscellaneous'), +] + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = {'https://docs.python.org/': None} + +def setup(app): + app.add_stylesheet('custom.css') diff --git a/docs/docker.rst b/docs/docker.rst new file mode 100644 index 00000000..7d910b37 --- /dev/null +++ b/docs/docker.rst @@ -0,0 +1,32 @@ +.. _docker: + +Docker +====== + +`Docker `__ is a service for managing containers, which you can think of as lightweight virtual machines. If you want to run Scanner in a distributed setting (e.g. on a cloud platform), Docker is essential for providing a consistent runtime environment on your worker machines, but it's also useful for testing locally to avoid having to install all of Scanner's dependencies. We provide prebuilt Docker images containing Scanner and all its dependencies (e.g. OpenCV, Caffe) at `scannerresearch/scanner `__. + +To start using Scanner with Docker, first install `Docker `__. If you have a GPU and you're running on Linux, you can install `nvidia-docker `__ (which provides GPU support inside Docker containers). Then run: + +.. code-block:: bash + + pip3 install --upgrade docker-compose + wget https://raw.githubusercontent.com/scanner-research/scanner/master/docker/docker-compose.yml + docker-compose run --service-ports cpu /bin/bash + +If you installed nvidia-docker, then use :code:`gpu` intead of :code:`cpu` in the above :code:`docker-compose` commands. + +This installs the `docker-compose `__ utility which helps manage Docker containers. It uses the :code:`docker-compose.yml` configuration file to create an instance of the Scanner docker image. + +If these commands were successful, you should now have bash session inside the docker container. To start using Scanner to process videos, check out :ref:`getting-started`. + +The full set of docker configurations we provide are: + +- :code:`scannerresearch/scanner:cpu-VERSION` - CPU-only build +- :code:`scannerresearch/scanner:gpu-9.1-cudnn7-VERSION` - CUDA 9.1, CUDNN 7 +- :code:`scannerresearch/scanner:gpu-8.0-cudnn7-VERSION` - CUDA 8.0, CUDNN 7 +- :code:`scannerresearch/scanner:gpu-8.0-cudnn6-VERSION` - CUDA 8.0, CUDNN 6 + +where :code:`VERSION` is one of: + +- :code:`latest` - The most recent build of the master branch +- :code:`vX.X.X` - A git tag (where X is an integer) diff --git a/docs/from_source.rst b/docs/from_source.rst new file mode 100644 index 00000000..3f4d8d5b --- /dev/null +++ b/docs/from_source.rst @@ -0,0 +1,101 @@ +.. _from_source: + +Building Scanner from source +---------------------------- + +Scanner provides a dependency script :code:`deps.sh` to automatically install any or all +of its major dependencies if they are not already installed. Each of these +dependencies has a set of required system-level packages. + +Scanner depends on the following major dependencies: + - Python >= 3.5 + - pybind >= 1.58.0 + - ffmpeg >= 3.3.1 + - opencv >= 3.4.0 + - protobuf == 3.4.0 + - grpc == 1.7.2 + - caffe >= rc5 OR intel-caffe >= 1.0.6 + +To compile with CUDA support, it requires: + + - `CUDA `__ 8.0 or above + - `cuDNN `__ v6.x or above + +Install system-level packages +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Ubuntu 16.04 +```````````` + +Run the following command: + +.. code-block:: bash + + apt-get install \ + build-essential \ + cmake git libgtk2.0-dev pkg-config unzip llvm-5.0-dev clang-5.0 libc++-dev \ + libgflags-dev libgtest-dev libssl-dev libcurl3-dev liblzma-dev \ + libeigen3-dev libgoogle-glog-dev libatlas-base-dev libsuitesparse-dev \ + libgflags-dev libx264-dev libopenjpeg-dev libxvidcore-dev \ + libpng-dev libjpeg-dev libbz2-dev wget \ + libleveldb-dev libsnappy-dev libhdf5-serial-dev liblmdb-dev python-dev \ + python-tk autoconf autogen libtool libtbb-dev libopenblas-dev \ + liblapacke-dev swig yasm python3.5 python3-pip cpio automake libass-dev \ + libfreetype6-dev libsdl2-dev libtheora-dev libtool \ + libva-dev libvdpau-dev libvorbis-dev libxcb1-dev libxcb-shm0-dev \ + libxcb-xfixes0-dev mercurial texinfo zlib1g-dev curl libcap-dev \ + libboost-all-dev libgnutls-dev libpq-dev postgresql + +macOS +````` + +Install `homebrew `__ then run the following command: + +.. code-block:: bash + + brew install coreutils cmake git wget unzip pkg-config \ + automake fdk-aac lame libass libtool libvorbis libvpx \ + opus sdl shtool texi2html theora x264 x265 xvid nasm \ + eigen glog \ + snappy leveldb gflags glog szip lmdb hdf5 boost boost-python3 \ + llvm python gnutls postgresql libpq libpqxx + + +Run deps.sh +~~~~~~~~~~~ + +To install or specify where your major dependencies are, from the top-level directory run: + +.. code-block:: bash + + bash ./deps.sh + +This script will query you for each major dependency and install those that are not already installed. By default, it will install the dependencies to a local directory inside the scanner repo (it will not install system-wide). + +.. note:: + + Make sure to follow the directions after :code:`deps.sh` finishes that tell you to + add entries to your PATH, LD_LIBRARY_PATH, and PYTHONPATH + +Build Scanner +~~~~~~~~~~~~~ + +Run the following commands from the top-level directory: + +.. code-block:: bash + + mkdir build + cd build + cmake .. + make -j$(nproc) + +Install scannerpy python package +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Run the following command from the top-level directory: + +.. code-block:: bash + + bash ./build.sh + +Congratulations! You've installed the scannerpy package. To learn how to start +using Scanner, check out :ref:`getting-started`. diff --git a/docs/getting-started.rst b/docs/getting-started.rst new file mode 100644 index 00000000..a4ad38cb --- /dev/null +++ b/docs/getting-started.rst @@ -0,0 +1,15 @@ +.. _getting-started: + +Getting Started +=============== + +To understand how to use Scanner, check out the :ref:`quickstart`, which talks through the components of the system with a simple example. + +Once you have a handle on the basics, you can checkout :ref:`walkthrough`, which provides a more full fledged walkthrough of using Scanner in a real application. Or you can take a look at the numerous Scanner `tutorials `__ available on github which provide examples of using key features of the Scanner API. + +.. toctree:: + :maxdepth: 1 + + quickstart + walkthrough + tutorial diff --git a/docs/handbook/concepts.rst b/docs/handbook/concepts.rst new file mode 100644 index 00000000..74289533 --- /dev/null +++ b/docs/handbook/concepts.rst @@ -0,0 +1,24 @@ +Concepts +======== + + +Database +-------- + +Computation Graphs +------------------ + +Sources +~~~~~~~ + +Ops +~~~ + +Sinks +~~~~~ + +Jobs +---- + +.. toctree:: + :maxdepth: 1 diff --git a/docs/handbook/concepts/database.rst b/docs/handbook/concepts/database.rst new file mode 100644 index 00000000..eb3f1275 --- /dev/null +++ b/docs/handbook/concepts/database.rst @@ -0,0 +1,2 @@ +Database +======== diff --git a/docs/handbook/concepts/graphs.rst b/docs/handbook/concepts/graphs.rst new file mode 100644 index 00000000..59b4f8fb --- /dev/null +++ b/docs/handbook/concepts/graphs.rst @@ -0,0 +1,14 @@ +Computation Graphs +================== + +Sources +------- + +Ops +--- + +Sinks +----- + +.. toctree:: + :maxdepth: 1 diff --git a/docs/handbook/custom-ops.rst b/docs/handbook/custom-ops.rst new file mode 100644 index 00000000..ac3eea16 --- /dev/null +++ b/docs/handbook/custom-ops.rst @@ -0,0 +1,2 @@ +Writing a custom Op +=================== diff --git a/docs/handbook/custom-sink.rst b/docs/handbook/custom-sink.rst new file mode 100644 index 00000000..425f3124 --- /dev/null +++ b/docs/handbook/custom-sink.rst @@ -0,0 +1,2 @@ +Writing a custom Sink +===================== diff --git a/docs/handbook/custom-source.rst b/docs/handbook/custom-source.rst new file mode 100644 index 00000000..63e91726 --- /dev/null +++ b/docs/handbook/custom-source.rst @@ -0,0 +1,2 @@ +Writing a custom Source +======================= diff --git a/docs/handbook/distributed.rst b/docs/handbook/distributed.rst new file mode 100644 index 00000000..84b46e6f --- /dev/null +++ b/docs/handbook/distributed.rst @@ -0,0 +1,2 @@ +Using a Cluster +=============== diff --git a/docs/handbook/kubernetes.rst b/docs/handbook/kubernetes.rst new file mode 100644 index 00000000..613c8b89 --- /dev/null +++ b/docs/handbook/kubernetes.rst @@ -0,0 +1,12 @@ +.. _kubernetes: + +Scanner on Kubernetes +===================== + +The easiest way to scale out using Scanner is via `kubernetes `__. +Kubernetes is tool for automating the deployment and management of a cluster of +machines that run `containers `__ +(a container is akin to a light-weight VM image). + +We provide a step-by-step example of getting Scanner up and running on your own +kubernetes cluster using `Google Cloud Platform `__. diff --git a/docs/index.rst b/docs/index.rst new file mode 100644 index 00000000..532ab4c3 --- /dev/null +++ b/docs/index.rst @@ -0,0 +1,47 @@ +.. scanner documentation master file, created by + sphinx-quickstart on Sun Nov 26 19:06:21 2017. + You can adapt this file completely to your liking, but it should at least + contain the root `toctree` directive. + +.. image:: scanner_logo.png + +=========================== + +Scanner is a system for developing applications that efficiently process large video datasets. Scanner applications can run on a multi-core laptop, a server packed with multiple GPUs, or a large number of machines in the cloud. Scanner has been used for: + +- **Labeling and data mining large video collections:** Scanner is in use at Stanford University as the compute engine for visual data mining applications that detect faces, commercials, human poses, etc. in datasets as big as 70,000 hours of TV news (12 billion frames, 20 TB) or 600 feature length movies (106 million frames). + +- **VR Video synthesis:** Scanner is in use at Facebook to scale the `Surround 360 VR video stitching software `__, which processes fourteen 2048x2048 input videos to produce 8k stereo video output. + +To learn more about Scanner, see the documentation below or read the SIGGRAPH +2018 Technical Paper: `"Scanner: Efficient Video Analysis at Scale" `__ by Poms, Crichton, Hanrahan, and Fatahalian. + +For easy access to off-the-shelf pipelines built using Scanner like face detection and optical flow, check out our `scannertools `__ library. + +Key Features +------------ + +Scanner's key features include: + +- **Video processing computations as dataflow graphs**. Like many modern ML frameworks, Scanner structures video analysis tasks as dataflow graphs whose nodes produce and consume sequences of per-frame data. Scanner's embodiment of the dataflow model includes operators useful for video processing tasks such as sparse frame sampling (e.g., "frames known to contain a face"), sliding window frame access (e.g., stencils for temporal smoothing), and stateful processing across frames (e.g., tracking). + +- **Videos as logical tables** To simplify the management of and access to large-numbers of videos, Scanner represents video collections and the pixel-level products of video frame analysis (e.g., flow fields, depth maps, activations) as tables in a data store. Scanner's data store features first-class support for video frame column types to facilitate key performance optimizations, such as storing video in compressed form and providing fast access to sparse lists of video frames. + +- **First-class support for GPU acceleration:** Since many video processing algorithms benefit from GPU acceleration, Scanner provides first-class support for writing dataflow graph operations that utilize GPU execution. Scanner also leverages specialized GPU hardware for video decoding when available. + +- **Fault tolerant, distributed execution:** Scanner applications can be run on the cores of a single machine, on a multi-GPU server, or scaled to hundreds of machines (potentially with heterogeneous numbers of GPUs), without significant source-level change. Scanner also provides fault tolerance, so your applications can not only utilize many machines, but use cheaper preemptible machines on cloud computing platforms. + + +What Scanner **is not**: + +Scanner is not a system for implementing new high-performance image and video processing kernels from scratch. However, Scanner can be used to create scalable video processing applications by composing kernels that already exist as part of popular libraries such as OpenCV, Caffe, TensorFlow, etc. or have been implemented in popular performance-oriented languages like `CUDA `__ or `Halide `__. Yes, you can write your own dataflow graph operations in Python or C++ too! + +.. toctree:: + :maxdepth: 2 + :includehidden: + + installation + getting-started + programming-handbook + api + about diff --git a/docs/installation.rst b/docs/installation.rst new file mode 100644 index 00000000..142e963b --- /dev/null +++ b/docs/installation.rst @@ -0,0 +1,21 @@ +.. _installation: + +Installation +============ + +Scanner has out-of-the-box compatibility with frameworks like OpenCV and Caffe, but the flip side is that installing all of the dependencies can take a long time. The easiest way to get started with Scanner is using our pre-built :ref:`docker` images, but we also support :ref:`from_source`. + +On MacOS, you can install Scanner using homebrew with the following commands: + +.. code-block:: bash + + brew tap scanner-research/homebrew-scanner + brew install scanner + pip3 install scannerpy + + +.. toctree:: + :maxdepth: 1 + + docker + from_source diff --git a/docs/programming-handbook.rst b/docs/programming-handbook.rst new file mode 100644 index 00000000..cbadc2cf --- /dev/null +++ b/docs/programming-handbook.rst @@ -0,0 +1,18 @@ +Programming Handbook +==================== + +The programming handbook is a collection of guides that provide more details +about the Scanner system or explain how to use some of the more advanced +functionality in Scanner, such as writing custom Ops or using :ref:`kubernetes`. + +.. toctree:: + :maxdepth: 2 + + handbook/kubernetes + + +.. handbook/concepts + handbook/distributed + handbook/custom-ops + handbook/custom-source + handbook/custom-sink diff --git a/docs/quickstart.rst b/docs/quickstart.rst new file mode 100644 index 00000000..a588110f --- /dev/null +++ b/docs/quickstart.rst @@ -0,0 +1,219 @@ +.. _quickstart: + +Quickstart +========== + +To explain how Scanner is used, let's walk through a simple example that reads every third frame from a video, resizes the frames, and then creates a new video from the sequence of resized frames. + +.. note:: + + This Quickstart walks you through a very basic Scanner application that downsamples a video in space and time. Once you are done with this guide, check out the `examples `__ directory for more useful applications, such as using Tensorflow `for detecting objects in all frames of a video `__ and Caffe for `face detection `__. + +To run the code discussed here, install Scanner (:ref:`installation`). Then from the top-level Scanner directory, run: + +.. code-block:: bash + + cd examples/apps/quickstart + wget https://storage.googleapis.com/scanner-data/public/sample-clip.mp4 + python3 main.py + +After :code:`main.py` exits, you should now have a resized version of :code:`sample-clip.mp4` named :code:`sample-clip-resized.mp4` in the current directory. Let's see how that happened by looking inside :code:`main.py`. + +Starting up Scanner +------------------- +The first step in any Scanner program is to create a :py:class:`~scannerpy.database.Database` object. The :py:class:`~scannerpy.database.Database` object manages videos or other data that you have may have stored from data processing you've done in the past. The Database object also provides the API to construct and execute new video processing jobs. + +.. code-block:: python + + from scannerpy import Database, Job + + db = Database() + +Ingesting a video into the Database +----------------------------------- +Scanner is designed to provide fast access to frames in videos, even under random access patterns. In order to provide this functionality, Scanner first needs to analyze the video to build an index on the video. For example, given an mp4 video named :code:`example.mp4`, we can ingest this video as follow: +.. code-block:: python + + db.ingest_videos([('table_name', 'example.mp4')]) + +Scanner analyzes the file to build the index and creates a :py:class:`~scannerpy.table.Table` for that video in the Scanner database called :code:`table_name`. You can see the contents of the database by running: + +.. code-block:: python + + >>> print(db.summarize()) + ** TABLES ** + --------------------------------------------------- + ID | Name | # rows | Columns | Committed + --------------------------------------------------- + 0 | table_name | 360 | index, frame | true + +By default, ingest copies the video data into the Scanner database (located at :code:`~/.scanner/db` by default). However, Scanner can also read videos without copying them using the :code:`inplace` flag. + +.. code-block:: python + + db.ingest_videos([('table_name', 'example.mp4')], inplace=True) + +This still builds the index for accessing the video but avoids copying the files +into the database. + +.. _defining_a_graph: + +Defining a Computation Graph +---------------------------- + +Now we can tell Scanner how to process the video by constructing a *computation graph*. A computation graph is a graph of input nodes (**Sources**), function nodes (**Ops**), and output nodes (**Sinks**). **Sources** can read data from the Scanner database (such as the table we ingested above) or from other sources of data, such as the filesystem or a SQL database. **Ops** represent functions that transform their inputs into new outputs. **Sinks**, like **Sources**, write data to the database or to other forms of persistent storage. + +Let's define a computation graph to read frames from the database, select every third frame, resize them to 640 x 480 resolution, and then save them back to a new database table. First, we'll create a Source that reads from a column in a table: + +.. code-block:: python + + frame = db.sources.FrameColumn() + +But wait a second, we didn't tell the **Source** the table and column it should read from. What's going on? Since it's fairly typical to use the same computation graph to process a collection of videos at once, Scanner adopts a "binding model" that lets the user define a computation graph up front and then later "bind" different videos to the inputs. We'll see this in action in the :ref:`defining_a_job` section. + +The :code:`frame` object returned by the **Source** represents the stream of frames that are stored in the table, and we'll use it as the input to the next operation: + +.. code-block:: python + + sampled_frame = db.streams.Stride(input=frame, stride=3) # Select every third frame + +This is where we select only every third frame from the stream of frames we read from the **Source**. This comes from a special class of ops (from :code:`db.streams`) that can change the size of a stream, as opposed to transforming inputs to outputs 1-to-1. + +We then process the sampled frames by instantiating a Resize **Op** that will resize the frames in the :code:`frame` stream to 640 x 480: + +.. code-block:: python + + resized = db.ops.Resize(frame=sampled_frame, width=640, height=480) + +This **Op** returns a new stream of frames which we call :code:`resized`. The Resize **Op** is one of the collection of built-in **Ops** in the :ref:`standard_library`. (You can learn how to write your own **Ops** by following the :ref:`tutorial`.) + +Finally, we write these resized frames to a column called 'frame' in a new table by passing them into a column **Sink**: + +.. code-block:: python + + output_frame = db.sinks.Column(columns={'frame': resized}) + +Putting it all together, we have: + +.. code-block:: python + + frame = db.sources.FrameColumn() + sampled_frame = db.streams.Stride(input=frame, stride=3) + resized = db.ops.Resize(frame=sampled_frame, width=640, height=480) + output_frame = db.sinks.Column(columns={'frame': resized}) + +At this point, we have defined a computation graph that describes the computation to run, but we haven't yet told Scanner to execute the graph. + +.. _defining_a_job: + +Defining Jobs +------------- + +As alluded to in :ref:`defining_a_graph`, we need to tell Scanner which table we should read and which table we should write to before executing the computation graph. We can perform this "binding" of arguments to graph nodes using a **Job**: + +.. code-block:: python + + job = Job(op_args={ + frame: db.table('table_name').column('frame'), + output_frame: 'resized_table' + }) + +Here, we say that the :code:`FrameColumn` indicated by :code:`frame` should read from the column :code:`frame` in the table :code:`"table_name"`, and that the output table indicated by :code:`output_frame` should be called :code:`"resized_table"`. + +Running a Job +-------------- + +Now we can run the computation graph over the video we ingested. This is done by simply calling :code:`run` on the database object, specifying the jobs and outputs that we are interested in: + +.. code-block:: python + + output_tables = db.run(output=output_frame, jobs=[job]) + +This call will block until Scanner has finished processing the job. You should see a progress bar while Scanner is executing the computation graph. Once the job are done, :code:`run` returns the newly computed tables, here shown as :code:`output_tables`. + +Reading the results of a Job +---------------------------- + +We can directly read the results of job we just ran in the Python code by querying the :code:`frame` column on the table :code:`resized_table`: + +.. code-block:: python + + for resized_frame in db.table('resized_table').column('frame').load(): + print(resized_frame.shape) + +Video frames are returned as numpy arrays. Here we are printing out the shape of the frame, which should have a width of 640 and height of 480. + +Exporting to mp4 +---------------- + +We can also directly save the frame column as an mp4 file by calling :code:`save_mp4` on the :code:`frame` column: + +.. code-block:: python + + db.table('resized_table').column('frame').save_mp4('resized-video') + +After this call returns, an mp4 video should be saved to the current working directory called :code:`resized-video.mp4` that consists of the resized frames that we generated. + +That's a complete Scanner pipeline! If you'd like to learn about process multiple jobs, keep reading! Otherwise, to learn more about the features of Scanner, either follow the :ref:`walkthrough` or go through the extended :ref:`tutorial`. + +.. toctree:: + :maxdepth: 1 + +Processing multiple videos +-------------------------- + +Now let's say that we have a directory of videos we want to process, instead of just a single one as above. +To see the multiple video code in action, run the following commands from the quickstart app directoroy: + +.. code-block:: bash + + wget https://storage.googleapis.com/scanner-data/public/sample-clip-1.mp4 + wget https://storage.googleapis.com/scanner-data/public/sample-clip-2.mp4 + wget https://storage.googleapis.com/scanner-data/public/sample-clip-3.mp4 + python3 main-multi-video.py + +After :code:`main-multi-video.py` exits, you should now have a resized version of each of the downloaded videos named :code:`sample-clip-%d-resized.mp4` in the current directory, where :code:`%d` is replaced with the number of the video. + +There are two places in the code that need to change to process multiple videos. Let's look at those pieces of code inside :code:`main-multi-video.py` now. + +Ingesting multiple videos +------------------------- + +The first change is that we need to ingest all of our videos. This means changing our call to :code:`ingest_videos` to take a list of three tuples, instead of just one: + +.. code-block:: python + + videos_to_process = [ + ('sample-clip-1', 'sample-clip-1.mp4'), + ('sample-clip-2', 'sample-clip-2.mp4'), + ('sample-clip-3', 'sample-clip-3.mp4') + ] + + # Ingest the videos into the database + db.ingest_videos(videos_to_process) + +Now we have three tables that are ready to be processed! + +Defining and executing multiple Jobs +------------------------------------ + +The second change is to define multiple jobs, one for each video that we want to process. + +.. code-block:: python + + jobs = [] + for table_name, _ in videos_to_process: + job = Job(op_args={ + frame: db.table(table_name).column('frame'), + output_frame: 'resized-{:s}'.format(table_name) + }) + jobs.append(job) + +Now we can process these multiple jobs at the same time using :code:`run`: + +.. code-block:: python + + output_tables = db.run(output=output_frame, jobs=jobs) + +Like before, this call will block until Scanner has finished processing all the jobs. You should see a progress bar while Scanner is executing the computation graph as before. Once the jobs are done, :code:`run` returns the newly computed tables, here shown as :code:`output_tables`. + diff --git a/docs/scanner_logo.png b/docs/scanner_logo.png new file mode 100644 index 00000000..27c897f4 Binary files /dev/null and b/docs/scanner_logo.png differ diff --git a/docs/tutorial.rst b/docs/tutorial.rst new file mode 100644 index 00000000..a2ee4afa --- /dev/null +++ b/docs/tutorial.rst @@ -0,0 +1,8 @@ +.. _tutorial: + +Tutorials +========= + +Scanner provides a set of tutorials that provide step-by-step examples of many +of the basic features provided by Scanner. These tutorials can be found +`here `__. diff --git a/docs/walkthrough.rst b/docs/walkthrough.rst new file mode 100644 index 00000000..0fda07b0 --- /dev/null +++ b/docs/walkthrough.rst @@ -0,0 +1,25 @@ +.. _walkthrough: + +Interactive Jupyter Walkthrough +=============================== + +To get a more detailed understanding of how Scanner can be used in a real +application, we recommend trying the Jupyter notebook tutorial. To start the +notebook, if you're using Docker: + +.. code-block:: bash + + pip3 install --upgrade docker-compose + wget https://raw.githubusercontent.com/scanner-research/scanner/master/docker/docker-compose.yml + docker-compose up cpu + +If you installed Scanner yourself, then run: + +.. code-block:: bash + + pip3 install jupyter requests matplotlib + cd path/to/scanner + jupyter notebook --ip=0.0.0.0 --port=8888 + +Then visit port 8888 on your server/localhost, click through to +:code:`examples/Walkthrough.ipynb`, and follow the directions in the notebook. diff --git a/examples/CMakeLists.txt b/examples/CMakeLists.txt deleted file mode 100644 index c01ac803..00000000 --- a/examples/CMakeLists.txt +++ /dev/null @@ -1 +0,0 @@ -add_subdirectory(simple) diff --git a/examples/README.md b/examples/README.md new file mode 100644 index 00000000..d8128251 --- /dev/null +++ b/examples/README.md @@ -0,0 +1,38 @@ +# Scanner Examples + +This directory contains simple examples and full applications that +demonstrate how to use Scanner. + +## Tutorials + +* [Walkthrough.ipynb](https://github.com/scanner-research/scanner/blob/master/examples/Walkthrough.ipynb): an IPython notebook that goes through a simple application (shot detection) using Scanner. +* [List of Tutorials](https://github.com/scanner-research/scanner/blob/master/examples/tutorials): a set of well-commented files exploring different Scanner features in code. + +If you want to run the notebook yourself so that you can interactively edit the +code, run: + +```bash +cd path/to/your/scanner/directory/ +cd examples +jupyter notebook --allow-root --ip=0.0.0.0 --port=8888 +``` + +Then in your browser, go to [http://localhost:8888/notebooks/Walkthrough.ipynb](http://localhost:8888/notebooks/Walkthrough.ipynb) and copy in the token from the console logs. Follow the instructions in the Jupyter notebook. + +## Example Applications + +* [face_detection](https://github.com/scanner-research/scanner/blob/master/examples/apps/face_detection): Detect all faces in a video, and then render a new video overlaying the face bounding boxes on the video. +* [shot_detection](https://github.com/scanner-research/scanner/blob/master/examples/apps/shot_detection): Segment a video into shots and then create a single image montage featuring one thumbnail for each shot. (Same application as the walkthrough.) +* [hyperlapse](https://github.com/scanner-research/scanner/blob/master/examples/apps/hyperlapse): Create a stable timelapse video using the [Hyperlapse](https://www.microsoft.com/en-us/research/publication/real-time-hyperlapse-creation-via-optimal-frame-selection/) algorithm. +* [optical_flow](https://github.com/scanner-research/scanner/blob/master/examples/apps/optical_flow): Use OpenCV to compute flow fields within a video. +* [object_detection_tensorflow](https://github.com/scanner-research/scanner/blob/master/examples/apps/object_detection_tensorflow): Use Tensorflow's SSD Mobilenet DNN to detect objects. +* [detectron](https://github.com/scanner-research/scanner/blob/master/examples/apps/detectron): Use the Detectron object detection API for Caffe2 to detect objects. +* [reverse_image_search](https://github.com/scanner-research/scanner/blob/master/examples/apps/reverse_image_search): Search through a video to look for a query frame. +* [depth_from_stereo](https://github.com/scanner-research/scanner/blob/master/examples/apps/depth_from_stereo): Compute a per-pixel depth image from two views of the same location. + +## How-Tos + +* [tensorflow](https://github.com/scanner-research/scanner/blob/master/examples/how-tos/tensorflow): How to expose [TensorFlow](https://www.tensorflow.org/) computations as Scanner graph operations. +* [caffe](https://github.com/scanner-research/scanner/blob/master/examples/how-tos/caffe): How to use Caffe nets as Scanner graph operations. +* [python_kernel](https://github.com/scanner-research/scanner/blob/master/examples/how-tos/python_kernel): How to implement Scanner graph ops in Python. +* [halide](https://github.com/scanner-research/scanner/blob/master/examples/how-tos/halide): How to use [Halide](http://halide-lang.org/) kernels as Scanner graph operations. diff --git a/examples/Walkthrough.ipynb b/examples/Walkthrough.ipynb new file mode 100644 index 00000000..3cececad --- /dev/null +++ b/examples/Walkthrough.ipynb @@ -0,0 +1,312 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Scanner walkthrough\n", + "\n", + "To explore how Scanner fits in to a bigger pipeline, we're going to walk through a simple video analysis application. If you want to analyze a film, a common unit of analysis is the _shot_, short segments of video often delineated by the camera cutting to a different angle or location. In this walkthrough, we're going to use Scanner to implement _shot segmentation_, or breaking up a video into shots. To start, we need to get a video. We'll use a scene from Baby Driver:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "%%html\n", + "