diff --git a/.travis.yml b/.travis.yml
index 0592d2c2b3a..85b4c77921a 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -170,13 +170,8 @@ py36_osx_config: &py36_osx_config
packages: &py36_osx_config_brew_packages
- openssl
env:
- # Fix Python 3 issue linking to OpenSSL
- &py36_osx_config_env >
- PATH="/usr/local/opt/openssl/bin:$PATH"
- LDFLAGS="-L/usr/local/opt/openssl/lib"
- CPPFLAGS="-I/usr/local/opt/openssl/include"
- PYENV_ROOT="${HOME}/.pyenv"
- PATH="${PYENV_ROOT}/shims:${PATH}"
+ PATH="/usr/local/opt/openssl/bin:$PATH" LDFLAGS="-L/usr/local/opt/openssl/lib" CPPFLAGS="-I/usr/local/opt/openssl/include" PYENV_ROOT="${HOME}/.pyenv" PATH="${PYENV_ROOT}/shims:${PATH}"
before_install:
- curl -L https://github.com/stedolan/jq/releases/download/jq-1.5/jq-osx-amd64 -o /usr/local/bin/jq
- chmod 755 /usr/local/bin/jq
@@ -208,11 +203,7 @@ py36_osx_test_config: &py36_osx_test_config
env:
# Must duplicate py36_osx_config's env because it cannot be merged into a new anchor
- &py36_osx_test_config_env >
- PATH="/usr/local/opt/openssl/bin:$PATH"
- LDFLAGS="-L/usr/local/opt/openssl/lib"
- CPPFLAGS="-I/usr/local/opt/openssl/include"
- PYENV_ROOT="${HOME}/.pyenv"
- PATH="${PYENV_ROOT}/shims:${PATH}"
+ PATH="/usr/local/opt/openssl/bin:$PATH" LDFLAGS="-L/usr/local/opt/openssl/lib" CPPFLAGS="-I/usr/local/opt/openssl/include" PYENV_ROOT="${HOME}/.pyenv" PATH="${PYENV_ROOT}/shims:${PATH}"
BOOTSTRAPPED_PEX_KEY_SUFFIX=py36.osx
linux_with_fuse: &linux_with_fuse
@@ -381,10 +372,20 @@ cargo_audit: &cargo_audit
# Build wheels
# -------------------------------------------------------------------------
+# N.B. With Python 2, we must build pantsbuild.pants with both UCS2 and UCS4 to provide full
+# compatibility for end users. This is because we constrain our ABI due to the native engine.
+# See https://www.python.org/dev/peps/pep-0513/#ucs-2-vs-ucs-4-builds. Note this distinction is
+# not necessary with Python 3.3+ due to flexible storage of Unicode strings (https://www.python.org/dev/peps/pep-0393/).
+#
+# We treat both Linux UCS4 and OSX UCS2 normally, as these are the defaults for those environments.
+# The Linux UCS2 and OSX UCS4 shards, however, must rebuild Python with
+# `PYTHON_CONFIGURE_OPTS=--enable-unicode=ucs{2,4}` set, along with bootstrapping Pants again rather
+# than pulling the PEX from AWS.
+
base_build_wheels: &base_build_wheels
stage: *test
env:
- - &base_build_wheels_env RUN_PANTS_FROM_PEX=1 PREPARE_DEPLOY=1
+ - &base_build_wheels_env PREPARE_DEPLOY=1
base_linux_build_wheels: &base_linux_build_wheels
# Similar to the bootstrap shard, we build Linux wheels in a docker image to maximize compatibility.
@@ -398,51 +399,102 @@ base_linux_build_wheels: &base_linux_build_wheels
travis_ci:latest
sh -c "RUN_PANTS_FROM_PEX=1 ./build-support/bin/release.sh ${RELEASE_ARGS} -n"
-py27_linux_build_wheels: &py27_linux_build_wheels
- <<: *py27_linux_test_config
+py27_linux_build_wheels_ucs2: &py27_linux_build_wheels_ucs2
+ <<: *py27_linux_config
+ <<: *base_linux_build_wheels
+ <<: *native_engine_cache_config
+ name: "Build wheels - Linux and cp27m (UCS2)"
+ env:
+ - *base_build_wheels_env
+ - CACHE_NAME=linuxwheelsbuild.ucs2
+ script:
+ - docker build --rm -t travis_ci_py27_ucs2
+ --build-arg "TRAVIS_USER=$(id -un)"
+ --build-arg "TRAVIS_UID=$(id -u)"
+ --build-arg "TRAVIS_GROUP=$(id -gn)"
+ --build-arg "TRAVIS_GID=$(id -g)"
+ build-support/docker/travis_ci_py27_ucs2/
+ - &docker_dry_run_release docker run --rm -t
+ -v "${HOME}:/travis/home"
+ -v "${TRAVIS_BUILD_DIR}:/travis/workdir"
+ travis_ci_py27_ucs2:latest
+ sh -c "PEX_VERBOSE=9 ./build-support/bin/ci.sh -2b && RUN_PANTS_FROM_PEX=1 PEX_VERBOSE=9 ./build-support/bin/release.sh -n"
+
+py27_linux_build_wheels_ucs4: &py27_linux_build_wheels_ucs4
<<: *base_linux_build_wheels
- name: "Build Linux wheels (Py2.7 PEX)"
+ <<: *py27_linux_test_config
+ # `py27_linux_test_config` overrides the stage set by `base_build_wheels`, so we re-override it.
+ stage: *test
+ name: "Build wheels - Linux and cp27mu (UCS4)"
env:
- *py27_linux_test_config_env
- *base_build_wheels_env
- RELEASE_ARGS=''
- - CACHE_NAME=linuxwheelsbuild.py27
+ - CACHE_NAME=linuxwheelsbuild.ucs4
py36_linux_build_wheels: &py36_linux_build_wheels
- <<: *py36_linux_test_config
<<: *base_linux_build_wheels
- name: "Build Linux wheels (Py3.6 PEX)"
+ <<: *py36_linux_test_config
+ name: "Build wheels - Linux and abi3 (Py3.6+)"
env:
- *py36_linux_test_config_env
- *base_build_wheels_env
- RELEASE_ARGS='-3'
- - CACHE_NAME=linuxwheelsbuild.py36
+ - CACHE_NAME=linuxwheelsbuild.abi3
base_osx_build_wheels: &base_osx_build_wheels
<<: *base_build_wheels
osx_image: xcode8
script:
- - ./build-support/bin/release.sh ${RELEASE_ARGS} -n
+ - RUN_PANTS_FROM_PEX=1 ./build-support/bin/release.sh ${RELEASE_ARGS} -n
-py27_osx_build_wheels: &py27_osx_build_wheels
+py27_osx_build_wheels_ucs2: &py27_osx_build_wheels_ucs2
<<: *py27_osx_test_config
<<: *base_osx_build_wheels
- name: "Build OSX wheels (Py2.7 PEX)"
+ name: "Build wheels - OSX and cp27m (UCS2)"
env:
- *py27_osx_test_config_env
- *base_build_wheels_env
- RELEASE_ARGS=''
- - CACHE_NAME=osxwheelsbuild.py27
+ - CACHE_NAME=osxwheelsbuild.ucs2
+
+py27_osx_build_wheels_ucs4: &py27_osx_build_wheels_ucs4
+ <<: *py27_osx_config
+ <<: *base_osx_build_wheels
+ <<: *native_engine_cache_config
+ name: "Build wheels - OSX and cp27mu (UCS4)"
+ addons:
+ brew:
+ packages:
+ - openssl
+ env:
+ - *base_build_wheels_env
+ - CACHE_NAME=osxwheelsbuild.ucs4
+ - PATH="/usr/local/opt/openssl/bin:$PATH" LDFLAGS="-L/usr/local/opt/openssl/lib" CPPFLAGS="-I/usr/local/opt/openssl/include" PYENV_ROOT="${HOME}/.pyenv" PATH="${PYENV_ROOT}/shims:${PATH}"
+
+ - PYTHON_CONFIGURE_OPTS=--enable-unicode=ucs4
+ # We set $PY to ensure the UCS4 interpreter is used when bootstrapping the PEX.
+ - PY=${PYENV_ROOT}/shims/python2.7
+ before_install:
+ - curl -L https://github.com/stedolan/jq/releases/download/jq-1.5/jq-osx-amd64 -o /usr/local/bin/jq
+ - chmod 755 /usr/local/bin/jq
+ - ./build-support/bin/install_aws_cli_for_ci.sh
+ - git clone https://github.com/pyenv/pyenv ${PYENV_ROOT}
+ - ${PYENV_ROOT}/bin/pyenv install 2.7.13
+ - ${PYENV_ROOT}/bin/pyenv global 2.7.13
+ script:
+ - PEX_VERBOSE=9 ./build-support/bin/ci.sh -2b
+ - RUN_PANTS_FROM_PEX=1 ./build-support/bin/release.sh -n
py36_osx_build_wheels: &py36_osx_build_wheels
<<: *py36_osx_test_config
<<: *base_osx_build_wheels
- name: "Build OSX wheels (Py3.6 PEX)"
+ name: "Build wheels - OSX and abi3 (Py3.6+)"
env:
- *py36_osx_test_config_env
- *base_build_wheels_env
- RELEASE_ARGS='-3'
- - CACHE_NAME=osxwheelsbuild.py36
+ - CACHE_NAME=osxwheelsbuild.abi3
# -------------------------------------------------------------------------
# Rust tests
@@ -644,11 +696,6 @@ matrix:
- <<: *linux_rust_clippy
- <<: *cargo_audit
- - <<: *py27_linux_build_wheels
- - <<: *py36_linux_build_wheels
- - <<: *py27_osx_build_wheels
- - <<: *py36_osx_build_wheels
-
- <<: *py27_linux_test_config
name: "Unit tests for pants and pants-plugins (Py2.7 PEX)"
stage: *test
@@ -666,6 +713,14 @@ matrix:
script:
- ./build-support/bin/travis-ci.sh -lp
+ - <<: *py27_linux_build_wheels_ucs2
+ - <<: *py27_linux_build_wheels_ucs4
+ - <<: *py36_linux_build_wheels
+
+ - <<: *py27_osx_build_wheels_ucs2
+ - <<: *py27_osx_build_wheels_ucs4
+ - <<: *py36_osx_build_wheels
+
- <<: *py36_linux_test_config
name: "Integration tests for pants - shard 0 (Py3.6 PEX)"
env:
diff --git a/build-support/bin/ci.sh b/build-support/bin/ci.sh
index 7f294494f65..46dfc332d37 100755
--- a/build-support/bin/ci.sh
+++ b/build-support/bin/ci.sh
@@ -104,26 +104,31 @@ esac
# We're running against a Pants clone.
export PANTS_DEV=1
-# Note that we set PY, and when running with Python 3, also set PANTS_PYTHON_SETUP_INTERPRETER_CONSTRAINTS.
-# This would usually not be necessary when developing locally, because the `./pants` and `./pants3`
-# scripts set these constraints for us already. However, we must set the values here because in non-bootstrap shards
-# we run CI using `./pants.pex` instead of the scripts `./pants` and `./pants3`, so those scripts cannot set
-# the relevant environment variables. Without setting these environment variables, the Python 3 shards will try to
-# execute subprocesses using Python 2, which results in the _Py_Dealloc error (#6985), and shards that do not
-# pull down `./pants.pex` but still use a virtualenv (such as Rust Tests) will fail to execute.
+# Determine the Python version to use for bootstrapping pants.pex. This would usually not be
+# necessary to set when developing locally, because the `./pants` and `./pants3` scripts set
+# these constraints for us already. However, we must set the values here because in
+# non-bootstrap shards we run CI using `./pants.pex` instead of the scripts `./pants`
+# and `./pants3`, so those scripts cannot set the relevant environment variables.
if [[ "${python_two:-false}" == "false" ]]; then
- py_version_number="3.6"
+ py_major_minor="3.6"
bootstrap_pants_script="./pants3"
- export PANTS_PYTHON_SETUP_INTERPRETER_CONSTRAINTS="['CPython==${py_version_number}.*']"
else
- py_version_number="2.7"
+ py_major_minor="2.7"
bootstrap_pants_script="./pants"
fi
-export PY="python${py_version_number}"
-banner "Using Python ${py_version_number} to execute spawned subprocesses (e.g. tests)"
+export PY="${PY:-python${py_major_minor}}"
+
+# Also set PANTS_PYTHON_SETUP_INTERPRETER_CONSTRAINTS. We set this to the exact Python version
+# to resolve any potential ambiguity when multiple Python interpreters are discoverable, such as
+# Python 2.7.10 vs. 2.7.13. When running with Python 3, we must also set this constraint to ensure
+# all spawned subprocesses use Python 3 rather than the default of Python 2. This is in part
+# necessary to avoid the _Py_Dealloc error (#6985).
+py_major_minor_patch=$(${PY} -c 'import sys; print(".".join(map(str, sys.version_info[0:3])))')
+export PANTS_PYTHON_SETUP_INTERPRETER_CONSTRAINTS="${PANTS_PYTHON_SETUP_INTERPRETER_CONSTRAINTS:-['CPython==${py_major_minor_patch}']}"
+banner "Setting interpreter constraints to ${PANTS_PYTHON_SETUP_INTERPRETER_CONSTRAINTS}"
if [[ "${run_bootstrap:-false}" == "true" ]]; then
- start_travis_section "Bootstrap" "Bootstrapping pants as a Python ${py_version_number} PEX"
+ start_travis_section "Bootstrap" "Bootstrapping pants as a Python ${py_major_minor_patch} PEX"
(
if [[ "${run_bootstrap_clean:-false}" == "true" ]]; then
./build-support/python/clean.sh || die "Failed to clean before bootstrapping pants."
diff --git a/build-support/bin/pre-commit.sh b/build-support/bin/pre-commit.sh
index f6e0d075345..3981f3bbdb6 100755
--- a/build-support/bin/pre-commit.sh
+++ b/build-support/bin/pre-commit.sh
@@ -51,36 +51,36 @@ printf "%s\n" "${ADDED_FILES[@]}" \
echo "* Checking for banned imports"
./build-support/bin/check_banned_imports.sh
-if git diff master --name-only | grep '\.rs$' > /dev/null; then
- echo "* Checking formatting of rust files" && ./build-support/bin/check_rust_formatting.sh || exit 1
- # Clippy happens on a different shard because of separate caching concerns.
- if [[ "${RUNNING_VIA_TRAVIS_CI_SCRIPT}" != "1" ]]; then
- echo "* Running cargo clippy" && ./build-support/bin/check_clippy.sh || exit 1
- fi
- echo "* Checking rust target headers" && build-support/bin/check_rust_target_headers.sh || exit 1
-fi
-
echo "* Checking for bad shell patterns" && ./build-support/bin/check_shell.sh || exit 1
-$(git rev-parse --verify master > /dev/null 2>&1)
-if [[ $? -eq 0 ]]; then
+# When travis builds a tag, it does so in a shallow clone without master fetched, which
+# fails in pants changed.
+if git rev-parse --verify "master" &>/dev/null; then
echo "* Checking imports" && ./build-support/bin/isort.sh || \
die "To fix import sort order, run \`\"$(pwd)/build-support/bin/isort.sh\" -f\`"
+
# TODO(CMLivingston) Make lint use `-q` option again after addressing proper workunit labeling:
# https://github.com/pantsbuild/pants/issues/6633
# TODO: add a test case for this while including a pexrc file, as python checkstyle currently fails
# quite often with a pexrc available.
echo "* Checking lint" && ./pants --exclude-target-regexp='testprojects/.*' --changed-parent=master lint || exit 1
+
+ if git diff master --name-only | grep '\.rs$' > /dev/null; then
+ echo "* Checking formatting of rust files" && ./build-support/bin/check_rust_formatting.sh || exit 1
+ # Clippy happens on a different shard because of separate caching concerns.
+ if [[ "${RUNNING_VIA_TRAVIS_CI_SCRIPT}" != "1" ]]; then
+ echo "* Running cargo clippy" && ./build-support/bin/check_clippy.sh || exit 1
+ fi
+ echo "* Checking rust target headers" && build-support/bin/check_rust_target_headers.sh || exit 1
+ fi
+
+ if git diff master --name-only | grep build-support/travis > /dev/null; then
+ echo "* Checking .travis.yml generation" && \
+ actual_travis_yml=$(<.travis.yml) && \
+ expected_travis_yml=$(./pants --quiet run build-support/travis:generate_travis_yml) && \
+ [ "${expected_travis_yml}" == "${actual_travis_yml}" ] || \
+ die "Travis config generator changed but .travis.yml file not regenerated. See top of that file for instructions."
+ fi
else
- # When travis builds a tag, it does so in a shallow clone without master fetched, which
- # fails in pants changed.
echo "* Skipping import/lint checks in partial working copy."
fi
-
-if git diff master --name-only | grep build-support/travis > /dev/null; then
- echo "* Checking .travis.yml generation" && \
- actual_travis_yml=$(<.travis.yml) && \
- expected_travis_yml=$(./pants --quiet run build-support/travis:generate_travis_yml) && \
- [ "${expected_travis_yml}" == "${actual_travis_yml}" ] || \
- die "Travis config generator changed but .travis.yml file not regenerated. See top of that file for instructions."
-fi
diff --git a/build-support/bin/release.sh b/build-support/bin/release.sh
index ee8514ec292..cfbb9332c3e 100755
--- a/build-support/bin/release.sh
+++ b/build-support/bin/release.sh
@@ -114,7 +114,7 @@ function run_pex() {
curl -sSL "${PEX_DOWNLOAD_PREFIX}/v${PEX_VERSION}/${PEX_PEX}" > "${pex}"
chmod +x "${pex}"
- "${pex}" "$@"
+ "${pex}" -vvvvvvvvv "$@"
)
}
diff --git a/build-support/docker/travis_ci_py27_ucs2/Dockerfile b/build-support/docker/travis_ci_py27_ucs2/Dockerfile
new file mode 100644
index 00000000000..a3bd9df456b
--- /dev/null
+++ b/build-support/docker/travis_ci_py27_ucs2/Dockerfile
@@ -0,0 +1,45 @@
+# Copyright 2019 Pants project contributors (see CONTRIBUTORS.md).
+# Licensed under the Apache License, Version 2.0 (see LICENSE).
+
+# This file duplicates travis_ci/Dockerfile, except it installs Python 2.7 instead of Python 3.6
+# and adds the env var PYTHON_CONFIGURE_OPTS to install it with UCS2.
+
+# Use our custom Centos6 image for binary compatibility with old linux distros.
+FROM pantsbuild/centos6:latest
+
+# Note we use 2.7.15, rather than 2.7.13, as the centos6 image already comes with 2.7.13
+# installed, which uses UCS4 instead of UCS2. This allows us to disambiguate which Python 2
+# interpreter to use when `ci.sh` sets the interpreter constraints for Pants and PEX. We
+# set $PY to the exact Python 2.7 version we want to ensure the PEX is bootstrapped
+# with UCS 2.
+ARG PYTHON_2_VERSION=2.7.15
+RUN yum install sqlite-devel -y
+ENV PYENV_ROOT /pyenv-docker-build
+RUN mkdir ${PYENV_ROOT}
+RUN git clone https://github.com/pyenv/pyenv ${PYENV_ROOT}
+ENV PYTHON_CONFIGURE_OPTS --enable-unicode=ucs2
+RUN /usr/bin/scl enable devtoolset-7 -- bash -c '\
+ ${PYENV_ROOT}/bin/pyenv install ${PYTHON_2_VERSION} \
+ && ${PYENV_ROOT}/bin/pyenv global ${PYTHON_2_VERSION}'
+ENV PATH "${PYENV_ROOT}/shims:${PATH}"
+ENV PY "${PYENV_ROOT}/shims/python2.7"
+ENV PEX_PYTHON "${PYENV_ROOT}/shims/python2.7"
+
+# Setup mount points for the travis ci user & workdir.
+VOLUME /travis/home
+VOLUME /travis/workdir
+
+# Setup a non-root user to execute the build under (avoids problems with npm install).
+ARG TRAVIS_USER=travis_ci
+ARG TRAVIS_UID=1000
+ARG TRAVIS_GROUP=root
+ARG TRAVIS_GID=0
+
+RUN groupadd --gid ${TRAVIS_GID} ${TRAVIS_GROUP} || true
+RUN useradd -d /travis/home -g ${TRAVIS_GROUP} --uid ${TRAVIS_UID} ${TRAVIS_USER}
+USER ${TRAVIS_USER}:${TRAVIS_GROUP}
+
+# Our newly created user is unlikely to have a sane environment: set a locale at least.
+ENV LC_ALL="en_US.UTF-8"
+
+WORKDIR /travis/workdir
diff --git a/build-support/travis/env_osx_with_pyenv.mustache b/build-support/travis/env_osx_with_pyenv.mustache
new file mode 100644
index 00000000000..39d3ef68fe9
--- /dev/null
+++ b/build-support/travis/env_osx_with_pyenv.mustache
@@ -0,0 +1 @@
+PATH="/usr/local/opt/openssl/bin:$PATH" LDFLAGS="-L/usr/local/opt/openssl/lib" CPPFLAGS="-I/usr/local/opt/openssl/include" PYENV_ROOT="${HOME}/.pyenv" PATH="${PYENV_ROOT}/shims:${PATH}"
diff --git a/build-support/travis/generate_travis_yml.py b/build-support/travis/generate_travis_yml.py
index 0977b2e60b7..64568642fe0 100644
--- a/build-support/travis/generate_travis_yml.py
+++ b/build-support/travis/generate_travis_yml.py
@@ -25,12 +25,14 @@
def generate_travis_yml():
"""Generates content for a .travis.yml file from templates."""
- template = pkg_resources.resource_string(
- __name__, 'travis.yml.mustache').decode('utf-8')
- before_install_linux = pkg_resources.resource_string(
- __name__, 'before_install_linux.mustache').decode('utf-8')
- before_install_osx = pkg_resources.resource_string(
- __name__, 'before_install_osx.mustache').decode('utf-8')
+ def get_mustache_file(file_name):
+ return pkg_resources.resource_string(__name__, file_name).decode('utf-8')
+
+ template = get_mustache_file('travis.yml.mustache')
+ before_install_linux = get_mustache_file('before_install_linux.mustache')
+ before_install_osx = get_mustache_file('before_install_osx.mustache')
+ env_osx_with_pyenv = get_mustache_file('env_osx_with_pyenv.mustache')
+
context = {
'header': HEADER,
'py3_integration_shards': range(0, num_py3_integration_shards),
@@ -42,6 +44,7 @@ def generate_travis_yml():
}
renderer = pystache.Renderer(partials={
'before_install_linux': before_install_linux,
- 'before_install_osx': before_install_osx
+ 'before_install_osx': before_install_osx,
+ 'env_osx_with_pyenv': env_osx_with_pyenv
})
print(renderer.render(template, context))
diff --git a/build-support/travis/travis.yml.mustache b/build-support/travis/travis.yml.mustache
index 7034d8902bd..31fcdb138bd 100644
--- a/build-support/travis/travis.yml.mustache
+++ b/build-support/travis/travis.yml.mustache
@@ -156,13 +156,8 @@ py36_osx_config: &py36_osx_config
packages: &py36_osx_config_brew_packages
- openssl
env:
- # Fix Python 3 issue linking to OpenSSL
- &py36_osx_config_env >
- PATH="/usr/local/opt/openssl/bin:$PATH"
- LDFLAGS="-L/usr/local/opt/openssl/lib"
- CPPFLAGS="-I/usr/local/opt/openssl/include"
- PYENV_ROOT="${HOME}/.pyenv"
- PATH="${PYENV_ROOT}/shims:${PATH}"
+ {{>env_osx_with_pyenv}}
before_install:
{{>before_install_osx}}
# Clone pyenv directly from GitHub. For multiple osx images, brew's version of pyenv is too old to get
@@ -192,11 +187,7 @@ py36_osx_test_config: &py36_osx_test_config
env:
# Must duplicate py36_osx_config's env because it cannot be merged into a new anchor
- &py36_osx_test_config_env >
- PATH="/usr/local/opt/openssl/bin:$PATH"
- LDFLAGS="-L/usr/local/opt/openssl/lib"
- CPPFLAGS="-I/usr/local/opt/openssl/include"
- PYENV_ROOT="${HOME}/.pyenv"
- PATH="${PYENV_ROOT}/shims:${PATH}"
+ {{>env_osx_with_pyenv}}
BOOTSTRAPPED_PEX_KEY_SUFFIX=py36.osx
linux_with_fuse: &linux_with_fuse
@@ -360,10 +351,20 @@ cargo_audit: &cargo_audit
# Build wheels
# -------------------------------------------------------------------------
+# N.B. With Python 2, we must build pantsbuild.pants with both UCS2 and UCS4 to provide full
+# compatibility for end users. This is because we constrain our ABI due to the native engine.
+# See https://www.python.org/dev/peps/pep-0513/#ucs-2-vs-ucs-4-builds. Note this distinction is
+# not necessary with Python 3.3+ due to flexible storage of Unicode strings (https://www.python.org/dev/peps/pep-0393/).
+#
+# We treat both Linux UCS4 and OSX UCS2 normally, as these are the defaults for those environments.
+# The Linux UCS2 and OSX UCS4 shards, however, must rebuild Python with
+# `PYTHON_CONFIGURE_OPTS=--enable-unicode=ucs{2,4}` set, along with bootstrapping Pants again rather
+# than pulling the PEX from AWS.
+
base_build_wheels: &base_build_wheels
stage: *test
env:
- - &base_build_wheels_env RUN_PANTS_FROM_PEX=1 PREPARE_DEPLOY=1
+ - &base_build_wheels_env PREPARE_DEPLOY=1
base_linux_build_wheels: &base_linux_build_wheels
# Similar to the bootstrap shard, we build Linux wheels in a docker image to maximize compatibility.
@@ -377,51 +378,99 @@ base_linux_build_wheels: &base_linux_build_wheels
travis_ci:latest
sh -c "RUN_PANTS_FROM_PEX=1 ./build-support/bin/release.sh ${RELEASE_ARGS} -n"
-py27_linux_build_wheels: &py27_linux_build_wheels
- <<: *py27_linux_test_config
+py27_linux_build_wheels_ucs2: &py27_linux_build_wheels_ucs2
+ <<: *py27_linux_config
+ <<: *base_linux_build_wheels
+ <<: *native_engine_cache_config
+ name: "Build wheels - Linux and cp27m (UCS2)"
+ env:
+ - *base_build_wheels_env
+ - CACHE_NAME=linuxwheelsbuild.ucs2
+ script:
+ - docker build --rm -t travis_ci_py27_ucs2
+ --build-arg "TRAVIS_USER=$(id -un)"
+ --build-arg "TRAVIS_UID=$(id -u)"
+ --build-arg "TRAVIS_GROUP=$(id -gn)"
+ --build-arg "TRAVIS_GID=$(id -g)"
+ build-support/docker/travis_ci_py27_ucs2/
+ - &docker_dry_run_release docker run --rm -t
+ -v "${HOME}:/travis/home"
+ -v "${TRAVIS_BUILD_DIR}:/travis/workdir"
+ travis_ci_py27_ucs2:latest
+ sh -c "PEX_VERBOSE=9 ./build-support/bin/ci.sh -2b && RUN_PANTS_FROM_PEX=1 PEX_VERBOSE=9 ./build-support/bin/release.sh -n"
+
+py27_linux_build_wheels_ucs4: &py27_linux_build_wheels_ucs4
<<: *base_linux_build_wheels
- name: "Build Linux wheels (Py2.7 PEX)"
+ <<: *py27_linux_test_config
+ # `py27_linux_test_config` overrides the stage set by `base_build_wheels`, so we re-override it.
+ stage: *test
+ name: "Build wheels - Linux and cp27mu (UCS4)"
env:
- *py27_linux_test_config_env
- *base_build_wheels_env
- RELEASE_ARGS=''
- - CACHE_NAME=linuxwheelsbuild.py27
+ - CACHE_NAME=linuxwheelsbuild.ucs4
py36_linux_build_wheels: &py36_linux_build_wheels
- <<: *py36_linux_test_config
<<: *base_linux_build_wheels
- name: "Build Linux wheels (Py3.6 PEX)"
+ <<: *py36_linux_test_config
+ name: "Build wheels - Linux and abi3 (Py3.6+)"
env:
- *py36_linux_test_config_env
- *base_build_wheels_env
- RELEASE_ARGS='-3'
- - CACHE_NAME=linuxwheelsbuild.py36
+ - CACHE_NAME=linuxwheelsbuild.abi3
base_osx_build_wheels: &base_osx_build_wheels
<<: *base_build_wheels
osx_image: xcode8
script:
- - ./build-support/bin/release.sh ${RELEASE_ARGS} -n
+ - RUN_PANTS_FROM_PEX=1 ./build-support/bin/release.sh ${RELEASE_ARGS} -n
-py27_osx_build_wheels: &py27_osx_build_wheels
+py27_osx_build_wheels_ucs2: &py27_osx_build_wheels_ucs2
<<: *py27_osx_test_config
<<: *base_osx_build_wheels
- name: "Build OSX wheels (Py2.7 PEX)"
+ name: "Build wheels - OSX and cp27m (UCS2)"
env:
- *py27_osx_test_config_env
- *base_build_wheels_env
- RELEASE_ARGS=''
- - CACHE_NAME=osxwheelsbuild.py27
+ - CACHE_NAME=osxwheelsbuild.ucs2
+
+py27_osx_build_wheels_ucs4: &py27_osx_build_wheels_ucs4
+ <<: *py27_osx_config
+ <<: *base_osx_build_wheels
+ <<: *native_engine_cache_config
+ name: "Build wheels - OSX and cp27mu (UCS4)"
+ addons:
+ brew:
+ packages:
+ - openssl
+ env:
+ - *base_build_wheels_env
+ - CACHE_NAME=osxwheelsbuild.ucs4
+ - {{>env_osx_with_pyenv}}
+ - PYTHON_CONFIGURE_OPTS=--enable-unicode=ucs4
+ # We set $PY to ensure the UCS4 interpreter is used when bootstrapping the PEX.
+ - PY=${PYENV_ROOT}/shims/python2.7
+ before_install:
+ {{>before_install_osx}}
+ - git clone https://github.com/pyenv/pyenv ${PYENV_ROOT}
+ - ${PYENV_ROOT}/bin/pyenv install 2.7.13
+ - ${PYENV_ROOT}/bin/pyenv global 2.7.13
+ script:
+ - PEX_VERBOSE=9 ./build-support/bin/ci.sh -2b
+ - RUN_PANTS_FROM_PEX=1 ./build-support/bin/release.sh -n
py36_osx_build_wheels: &py36_osx_build_wheels
<<: *py36_osx_test_config
<<: *base_osx_build_wheels
- name: "Build OSX wheels (Py3.6 PEX)"
+ name: "Build wheels - OSX and abi3 (Py3.6+)"
env:
- *py36_osx_test_config_env
- *base_build_wheels_env
- RELEASE_ARGS='-3'
- - CACHE_NAME=osxwheelsbuild.py36
+ - CACHE_NAME=osxwheelsbuild.abi3
# -------------------------------------------------------------------------
# Rust tests
@@ -623,11 +672,6 @@ matrix:
- <<: *linux_rust_clippy
- <<: *cargo_audit
- - <<: *py27_linux_build_wheels
- - <<: *py36_linux_build_wheels
- - <<: *py27_osx_build_wheels
- - <<: *py36_osx_build_wheels
-
- <<: *py27_linux_test_config
name: "Unit tests for pants and pants-plugins (Py2.7 PEX)"
stage: *test
@@ -645,6 +689,14 @@ matrix:
script:
- ./build-support/bin/travis-ci.sh -lp
+ - <<: *py27_linux_build_wheels_ucs2
+ - <<: *py27_linux_build_wheels_ucs4
+ - <<: *py36_linux_build_wheels
+
+ - <<: *py27_osx_build_wheels_ucs2
+ - <<: *py27_osx_build_wheels_ucs4
+ - <<: *py36_osx_build_wheels
+
{{#py3_integration_shards}}
- <<: *py36_linux_test_config
name: "Integration tests for pants - shard {{.}} (Py3.6 PEX)"
diff --git a/contrib/scrooge/src/python/pants/contrib/scrooge/tasks/scrooge_gen.py b/contrib/scrooge/src/python/pants/contrib/scrooge/tasks/scrooge_gen.py
index 7f7841c4455..d81b9512c7a 100644
--- a/contrib/scrooge/src/python/pants/contrib/scrooge/tasks/scrooge_gen.py
+++ b/contrib/scrooge/src/python/pants/contrib/scrooge/tasks/scrooge_gen.py
@@ -24,7 +24,7 @@
from pants.contrib.scrooge.tasks.java_thrift_library_fingerprint_strategy import \
JavaThriftLibraryFingerprintStrategy
-from pants.contrib.scrooge.tasks.thrift_util import calculate_compile_sources
+from pants.contrib.scrooge.tasks.thrift_util import calculate_include_paths
class ScroogeGen(SimpleCodegenTask, NailgunTask):
@@ -148,7 +148,7 @@ def execute_codegen(self, target, target_workdir):
self.gen(partial_cmd, target, target_workdir)
def gen(self, partial_cmd, target, target_workdir):
- import_paths, _ = calculate_compile_sources([target], self.is_gentarget)
+ import_paths = calculate_include_paths([target], self.is_gentarget)
args = list(partial_cmd.compiler_args)
diff --git a/contrib/scrooge/src/python/pants/contrib/scrooge/tasks/thrift_linter.py b/contrib/scrooge/src/python/pants/contrib/scrooge/tasks/thrift_linter.py
index 33733b2f89a..ab32008fd11 100644
--- a/contrib/scrooge/src/python/pants/contrib/scrooge/tasks/thrift_linter.py
+++ b/contrib/scrooge/src/python/pants/contrib/scrooge/tasks/thrift_linter.py
@@ -15,7 +15,7 @@
from pants.option.ranked_value import RankedValue
from pants.task.lint_task_mixin import LintTaskMixin
-from pants.contrib.scrooge.tasks.thrift_util import calculate_compile_sources
+from pants.contrib.scrooge.tasks.thrift_util import calculate_include_paths
class ThriftLintError(Exception):
@@ -87,14 +87,14 @@ def _lint(self, target, classpath):
if not self._is_strict(target):
config_args.append('--ignore-errors')
- include_paths , paths = calculate_compile_sources([target], self._is_thrift)
+ paths = list(target.sources_relative_to_buildroot())
+ include_paths = calculate_include_paths([target], self._is_thrift)
if target.include_paths:
include_paths |= set(target.include_paths)
for p in include_paths:
config_args.extend(['--include-path', p])
- args = config_args + list(paths)
-
+ args = config_args + paths
# If runjava returns non-zero, this marks the workunit as a
# FAILURE, and there is no way to wrap this here.
diff --git a/contrib/scrooge/src/python/pants/contrib/scrooge/tasks/thrift_util.py b/contrib/scrooge/src/python/pants/contrib/scrooge/tasks/thrift_util.py
index 436513a07ec..6dc89cf4d4e 100644
--- a/contrib/scrooge/src/python/pants/contrib/scrooge/tasks/thrift_util.py
+++ b/contrib/scrooge/src/python/pants/contrib/scrooge/tasks/thrift_util.py
@@ -57,22 +57,19 @@ def find_root_thrifts(basedirs, sources, log=None):
return root_sources
-def calculate_compile_sources(targets, is_thrift_target):
- """Calculates the set of thrift source files that need to be compiled.
- It does not exclude sources that are included in other sources.
-
- A tuple of (include basedirs, thrift sources) is returned.
+def calculate_include_paths(targets, is_thrift_target):
+ """Calculates the set of import paths for the given targets.
:targets: The targets to examine.
:is_thrift_target: A predicate to pick out thrift targets for consideration in the analysis.
+
+ :returns: Include basedirs for the target.
"""
basedirs = set()
- sources = set()
- def collect_sources(target):
+ def collect_paths(target):
basedirs.add(target.target_base)
- sources.update(target.sources_relative_to_buildroot())
for target in targets:
- target.walk(collect_sources, predicate=is_thrift_target)
- return basedirs, sources
+ target.walk(collect_paths, predicate=is_thrift_target)
+ return basedirs
diff --git a/contrib/scrooge/tests/python/pants_test/contrib/scrooge/tasks/test_thrift_linter.py b/contrib/scrooge/tests/python/pants_test/contrib/scrooge/tasks/test_thrift_linter.py
index c40c8bc0ebf..1bf4be95537 100644
--- a/contrib/scrooge/tests/python/pants_test/contrib/scrooge/tasks/test_thrift_linter.py
+++ b/contrib/scrooge/tests/python/pants_test/contrib/scrooge/tasks/test_thrift_linter.py
@@ -31,25 +31,47 @@ def alias_groups(cls):
def task_type(cls):
return ThriftLinter
- @patch('pants.contrib.scrooge.tasks.thrift_linter.calculate_compile_sources')
- def test_lint(self, mock_calculate_compile_sources):
+ @patch('pants.contrib.scrooge.tasks.thrift_linter.calculate_include_paths')
+ def test_lint(self, mock_calculate_include_paths):
def get_default_jvm_options():
return self.task_type().get_jvm_options_default(self.context().options.for_global_scope())
- thrift_target = self.create_library('a', 'java_thrift_library', 'a', ['A.thrift'])
+ thrift_target = self.create_library('src/thrift/tweet', 'java_thrift_library', 'a', ['A.thrift'])
task = self.create_task(self.context(target_roots=thrift_target))
self._prepare_mocks(task)
expected_include_paths = ['src/thrift/users', 'src/thrift/tweet']
- expected_paths = ['src/thrift/tweet/a.thrift', 'src/thrift/tweet/b.thrift']
- mock_calculate_compile_sources.return_value = (expected_include_paths, expected_paths)
+ mock_calculate_include_paths.return_value = expected_include_paths
task._lint(thrift_target, task.tool_classpath('scrooge-linter'))
self._run_java_mock.assert_called_once_with(
classpath='foo_classpath',
main='com.twitter.scrooge.linter.Main',
args=['--fatal-warnings', '--ignore-errors', '--include-path', 'src/thrift/users',
- '--include-path', 'src/thrift/tweet', 'src/thrift/tweet/a.thrift',
- 'src/thrift/tweet/b.thrift'],
+ '--include-path', 'src/thrift/tweet', 'src/thrift/tweet/A.thrift'],
+ jvm_options=get_default_jvm_options(),
+ workunit_labels=[WorkUnitLabel.COMPILER, WorkUnitLabel.SUPPRESS_LABEL])
+
+ @patch('pants.contrib.scrooge.tasks.thrift_linter.calculate_include_paths')
+ def test_lint_direct_only(self, mock_calculate_include_paths):
+ # Validate that we do lint only the direct sources of a target, rather than including the
+ # sources of its transitive deps.
+
+ def get_default_jvm_options():
+ return self.task_type().get_jvm_options_default(self.context().options.for_global_scope())
+
+ self.create_library('src/thrift/tweet', 'java_thrift_library', 'a', ['A.thrift'])
+ target_b = self.create_library('src/thrift/tweet', 'java_thrift_library', 'b', ['B.thrift'], dependencies=[':a'])
+ task = self.create_task(self.context(target_roots=target_b))
+ self._prepare_mocks(task)
+ mock_calculate_include_paths.return_value = ['src/thrift/tweet']
+ task._lint(target_b, task.tool_classpath('scrooge-linter'))
+
+ # Confirm that we did not include the sources of the dependency.
+ self._run_java_mock.assert_called_once_with(
+ classpath='foo_classpath',
+ main='com.twitter.scrooge.linter.Main',
+ args=['--fatal-warnings', '--ignore-errors',
+ '--include-path', 'src/thrift/tweet', 'src/thrift/tweet/B.thrift'],
jvm_options=get_default_jvm_options(),
workunit_labels=[WorkUnitLabel.COMPILER, WorkUnitLabel.SUPPRESS_LABEL])
diff --git a/examples/src/python/example/3rdparty_py.md b/examples/src/python/example/3rdparty_py.md
index 64d8c328dfa..0b200bcc374 100644
--- a/examples/src/python/example/3rdparty_py.md
+++ b/examples/src/python/example/3rdparty_py.md
@@ -80,6 +80,9 @@ with which your binary is intended to be compatible in the `platforms` field of
wheel files for each package
and platform available at build time.
+Pants will use the explicitly specified `platforms` field of your `python_binary`
+target if set for both itself and its dependencies, or will otherwise fall back to the `python-setup.platforms` option value.
+
Pants will look for those files in the location specified in the
[[`python-repos`|pants('src/docs:setup_repo')#redirecting-python-requirements-to-other-servers]] field
in pants.ini. It can understand either a simple local directory of .whl files or a "find links"-friendly
diff --git a/examples/src/wire/org/pantsbuild/example/element/BUILD b/examples/src/wire/org/pantsbuild/example/element/BUILD
index 916a7f943a1..0afde70a649 100644
--- a/examples/src/wire/org/pantsbuild/example/element/BUILD
+++ b/examples/src/wire/org/pantsbuild/example/element/BUILD
@@ -3,10 +3,12 @@
java_wire_library(
sources=[
- 'elements.proto', # Order matters here.
+ # NB: Order matters for these two paths, so we set `ordered_sources=True` below.
+ 'elements.proto',
'compound.proto',
],
dependencies=[
'examples/src/wire/org/pantsbuild/example/temperature',
],
+ ordered_sources=True,
)
diff --git a/src/docs/common_tasks/BUILD b/src/docs/common_tasks/BUILD
index d799aed4088..4c9bc4e558e 100644
--- a/src/docs/common_tasks/BUILD
+++ b/src/docs/common_tasks/BUILD
@@ -132,3 +132,9 @@ page(
name='thrift_gen',
source='thrift_gen.md',
)
+
+
+page(
+ name='python_proto_gen',
+ source='python_proto_gen.md',
+)
diff --git a/src/docs/common_tasks/python_proto_gen.md b/src/docs/common_tasks/python_proto_gen.md
new file mode 100644
index 00000000000..124a94e6eea
--- /dev/null
+++ b/src/docs/common_tasks/python_proto_gen.md
@@ -0,0 +1,50 @@
+# Python gRPC + protobufs
+
+## Problem
+You have `.proto` files defining protobufs and grpc services and you want Pants to generate Python code from them that you can use from a Python application.
+
+## Solution
+Create `python_grpc_library` targets and use the gen goal to generate code from the `.proto` files. There is a codegen task grpcio-run, that uses Python's grpcio library https://grpc.io/ and generates python code from .proto files.
+
+## Usage
+
+in a `BUILD` file near your proto files, create a `python_grpcio_library` target with your protos as a `sources`.
+
+```build
+python_grpcio_library(
+ sources=['service.proto'],
+ dependencies=[
+ '3rdparty/python:protobuf',
+ ]
+)
+```
+
+Then, you can add a dependency on this target in your python binary's `BUILD` file `dependencies` section:
+
+```build
+python_binary(
+ source='server.py',
+ dependencies=[
+# [...]
+ 'examples/src/protobuf/org/pantsbuild/example/grpcio/service'
+ ],
+)
+```
+
+## Example:
+An example Python grpc client/server can be found in [/examples/src/python/example/grpcio](https://github.com/pantsbuild/pants/tree/master/examples/src/python/example/grpcio)
+
+to create a gRPC server execute
+```bash
+./pants run examples/src/python/example/grpcio/server
+```
+
+and when server is running, run client example:
+```bash
+./pants run examples/src/python/example/grpcio/client
+```
+
+generated code can be found as usual in pants output directory:
+```bash
+./pants.d/gen/grpcio-run/current/examples.src.protobuf.org.pantsbuild.example.service.service/current/org/pantsbuild/example/service
+```
diff --git a/src/docs/docsite.json b/src/docs/docsite.json
index d36c5bd33f3..bfbbc705ed8 100644
--- a/src/docs/docsite.json
+++ b/src/docs/docsite.json
@@ -96,6 +96,7 @@
"test_suite": "dist/markdown/html/src/docs/common_tasks/test_suite.html",
"thrift_deps": "dist/markdown/html/examples/src/thrift/org/pantsbuild/example/README.html",
"thrift_gen": "dist/markdown/html/src/docs/common_tasks/thrift_gen.html",
+ "grpcio_gen": "dist/markdown/html/src/docs/common_tasks/python_proto_gen.html",
"tshoot": "dist/markdown/html/src/docs/tshoot.html",
"why_use_pants": "dist/markdown/html/src/docs/why_use_pants.html"
},
@@ -173,6 +174,7 @@
{"heading": "Code & Doc Generation"},
{"pages" : [
"thrift_deps",
+ "grpcio_gen",
"page"
]
},
diff --git a/src/python/pants/BUILD b/src/python/pants/BUILD
index f35f51e30b7..e79b46788d2 100644
--- a/src/python/pants/BUILD
+++ b/src/python/pants/BUILD
@@ -10,7 +10,7 @@ target(
python_library(
name='pants-packaged',
- sources=[],
+ sources=['dummy.c'],
dependencies=[
':version',
],
@@ -18,7 +18,7 @@ python_library(
name='pantsbuild.pants',
description='A scalable build tool for large, complex, heterogeneous repos.',
namespace_packages=['pants', 'pants.backend'],
- ext_modules=[('native_engine', {'sources': []})],
+ ext_modules=[('native_engine', {'sources': ['src/pants/dummy.c']})],
).with_binaries(
pants='src/python/pants/bin:pants',
)
diff --git a/src/python/pants/VERSION b/src/python/pants/VERSION
index 2df5c067b0c..96cdf36dad0 100644
--- a/src/python/pants/VERSION
+++ b/src/python/pants/VERSION
@@ -1 +1 @@
-1.14.0rc0
+1.15.0.dev1
diff --git a/src/python/pants/backend/codegen/wire/java/java_wire_library.py b/src/python/pants/backend/codegen/wire/java/java_wire_library.py
index d5cbd9b3fbe..93391af49c4 100644
--- a/src/python/pants/backend/codegen/wire/java/java_wire_library.py
+++ b/src/python/pants/backend/codegen/wire/java/java_wire_library.py
@@ -32,6 +32,7 @@ def __init__(self,
registry_class=None,
enum_options=None,
no_options=None,
+ ordered_sources=None,
**kwargs):
"""
:param string service_writer: the name of the class to pass as the --service_writer option to
@@ -43,6 +44,9 @@ def __init__(self,
doubt, specify com.squareup.wire.SimpleServiceWriter
:param list enum_options: list of enums to pass to as the --enum-enum_options option, # optional
:param boolean no_options: boolean that determines if --no_options flag is passed
+ :param boolean ordered_sources: boolean that declares whether the sources argument represents
+ literal ordered sources to be passed directly to the compiler. If false, no ordering is
+ guaranteed for the sources passed to an individual compiler invoke.
"""
if not service_writer and service_writer_options:
@@ -59,6 +63,7 @@ def __init__(self,
'registry_class': PrimitiveField(registry_class or None),
'enum_options': PrimitiveField(enum_options or []),
'no_options': PrimitiveField(no_options or False),
+ 'ordered_sources': PrimitiveField(ordered_sources or False),
})
super(JavaWireLibrary, self).__init__(payload=payload, **kwargs)
diff --git a/src/python/pants/backend/codegen/wire/java/wire_gen.py b/src/python/pants/backend/codegen/wire/java/wire_gen.py
index 84b378d9136..acfb8d58f11 100644
--- a/src/python/pants/backend/codegen/wire/java/wire_gen.py
+++ b/src/python/pants/backend/codegen/wire/java/wire_gen.py
@@ -13,10 +13,12 @@
from pants.backend.jvm.targets.java_library import JavaLibrary
from pants.backend.jvm.tasks.nailgun_task import NailgunTaskBase
from pants.base.build_environment import get_buildroot
-from pants.base.exceptions import TaskError
+from pants.base.exceptions import TargetDefinitionException, TaskError
from pants.base.workunit import WorkUnitLabel
from pants.java.jar.jar_dependency import JarDependency
+from pants.source.filespec import globs_matches
from pants.task.simple_codegen_task import SimpleCodegenTask
+from pants.util.dirutil import fast_relpath
logger = logging.getLogger(__name__)
@@ -61,24 +63,47 @@ def synthetic_target_extra_dependencies(self, target, target_workdir):
wire_runtime_deps_spec = self.get_options().javadeps
return self.resolve_deps([wire_runtime_deps_spec])
- def format_args_for_target(self, target, target_workdir):
- """Calculate the arguments to pass to the command line for a single target."""
- sources = OrderedSet(target.sources_relative_to_buildroot())
-
+ def _compute_sources(self, target):
relative_sources = OrderedSet()
- source_roots = set()
- for source in sources:
+ source_roots = OrderedSet()
+
+ def capture_and_relativize_to_source_root(source):
source_root = self.context.source_roots.find_by_path(source)
if not source_root:
source_root = self.context.source_roots.find(target)
source_roots.add(source_root.path)
- relative_source = os.path.relpath(source, source_root.path)
- relative_sources.add(relative_source)
+ return fast_relpath(source, source_root.path)
+
+ if target.payload.get_field_value('ordered_sources'):
+ # Re-match the filespecs against the sources in order to apply them in the literal order
+ # they were specified in.
+ filespec = target.globs_relative_to_buildroot()
+ excludes = filespec.get('excludes', [])
+ for filespec in filespec.get('globs', []):
+ sources = [s for s in target.sources_relative_to_buildroot()
+ if globs_matches([s], [filespec], excludes)]
+ if len(sources) != 1:
+ raise TargetDefinitionException(
+ target,
+ 'With `ordered_sources=True`, expected one match for each file literal, '
+ 'but got: {} for literal `{}`.'.format(sources, filespec)
+ )
+ relative_sources.add(capture_and_relativize_to_source_root(sources[0]))
+ else:
+ # Otherwise, use the default (unspecified) snapshot ordering.
+ for source in target.sources_relative_to_buildroot():
+ relative_sources.add(capture_and_relativize_to_source_root(source))
+ return relative_sources, source_roots
+
+ def format_args_for_target(self, target, target_workdir):
+ """Calculate the arguments to pass to the command line for a single target."""
args = ['--java_out={0}'.format(target_workdir)]
# Add all params in payload to args
+ relative_sources, source_roots = self._compute_sources(target)
+
if target.payload.get_field_value('no_options'):
args.append('--no_options')
diff --git a/src/python/pants/backend/graph_info/tasks/cloc.py b/src/python/pants/backend/graph_info/tasks/cloc.py
index 6019ef33792..e1903d9dfd5 100644
--- a/src/python/pants/backend/graph_info/tasks/cloc.py
+++ b/src/python/pants/backend/graph_info/tasks/cloc.py
@@ -40,7 +40,7 @@ def console_output(self, targets):
input_snapshots = tuple(
target.sources_snapshot(scheduler=self.context._scheduler) for target in targets
)
- input_files = {f.path for snapshot in input_snapshots for f in snapshot.files}
+ input_files = {f for snapshot in input_snapshots for f in snapshot.files}
# TODO: Work out a nice library-like utility for writing an argfile, as this will be common.
with temporary_dir() as tmpdir:
diff --git a/src/python/pants/backend/jvm/tasks/coursier/coursier_subsystem.py b/src/python/pants/backend/jvm/tasks/coursier/coursier_subsystem.py
index fd6ffc2fb3d..a0f92ba78bb 100644
--- a/src/python/pants/backend/jvm/tasks/coursier/coursier_subsystem.py
+++ b/src/python/pants/backend/jvm/tasks/coursier/coursier_subsystem.py
@@ -77,9 +77,8 @@ def bootstrap_coursier(self, workunit_factory):
bootstrap_jar_path = os.path.join(coursier_bootstrap_dir, 'coursier.jar')
- with workunit_factory(name='bootstrap-coursier', labels=[WorkUnitLabel.TOOL]) as workunit:
-
- if not os.path.exists(bootstrap_jar_path):
+ if not os.path.exists(bootstrap_jar_path):
+ with workunit_factory(name='bootstrap-coursier', labels=[WorkUnitLabel.TOOL]) as workunit:
with safe_concurrent_creation(bootstrap_jar_path) as temp_path:
fetcher = Fetcher(get_buildroot())
checksummer = fetcher.ChecksumListener(digest=hashlib.sha1())
@@ -98,4 +97,4 @@ def bootstrap_coursier(self, workunit_factory):
else:
workunit.set_outcome(WorkUnit.SUCCESS)
- return bootstrap_jar_path
+ return bootstrap_jar_path
diff --git a/src/python/pants/backend/jvm/tasks/jvm_compile/javac/javac_compile.py b/src/python/pants/backend/jvm/tasks/jvm_compile/javac/javac_compile.py
index f9f783947ab..dee55bc492f 100644
--- a/src/python/pants/backend/jvm/tasks/jvm_compile/javac/javac_compile.py
+++ b/src/python/pants/backend/jvm/tasks/jvm_compile/javac/javac_compile.py
@@ -209,8 +209,8 @@ def _execute_hermetic_compile(self, cmd, ctx):
# Assume no extra .class files to grab. We'll fix up that case soon.
# Drop the source_root from the file path.
# Assumes `-d .` has been put in the command.
- os.path.relpath(f.path.replace('.java', '.class'), ctx.target.target_base)
- for f in input_snapshot.files if f.path.endswith('.java')
+ os.path.relpath(f.replace('.java', '.class'), ctx.target.target_base)
+ for f in input_snapshot.files if f.endswith('.java')
)
exec_process_request = ExecuteProcessRequest(
argv=tuple(cmd),
diff --git a/src/python/pants/backend/jvm/tasks/jvm_compile/jvm_compile.py b/src/python/pants/backend/jvm/tasks/jvm_compile/jvm_compile.py
index 0d0732f59dc..6eada1e9327 100644
--- a/src/python/pants/backend/jvm/tasks/jvm_compile/jvm_compile.py
+++ b/src/python/pants/backend/jvm/tasks/jvm_compile/jvm_compile.py
@@ -401,7 +401,7 @@ def do_compile(self, invalidation_check, compile_contexts, classpath_product):
valid_targets = [vt.target for vt in invalidation_check.all_vts if vt.valid]
if self.execution_strategy == self.HERMETIC:
- self._set_direcotry_digests_for_valid_target_classpath_directories(valid_targets, compile_contexts)
+ self._set_directory_digests_for_valid_target_classpath_directories(valid_targets, compile_contexts)
for valid_target in valid_targets:
cc = self.select_runtime_context(compile_contexts[valid_target])
@@ -451,12 +451,12 @@ def _record_compile_classpath(self, classpath, target, outdir):
with open(path, 'w') as f:
f.write(text)
- def _set_direcotry_digests_for_valid_target_classpath_directories(self, valid_targets, compile_contexts):
+ def _set_directory_digests_for_valid_target_classpath_directories(self, valid_targets, compile_contexts):
snapshots = self.context._scheduler.capture_snapshots(
tuple(PathGlobsAndRoot(PathGlobs(
[self._get_relative_classes_dir_from_target(target, compile_contexts)]
), get_buildroot()) for target in valid_targets))
- [self._set_direcotry_digest_for_compile_context(
+ [self._set_directory_digest_for_compile_context(
snapshot.directory_digest, target, compile_contexts)
for target, snapshot in list(zip(valid_targets, snapshots))]
@@ -464,7 +464,7 @@ def _get_relative_classes_dir_from_target(self, target, compile_contexts):
cc = self.select_runtime_context(compile_contexts[target])
return fast_relpath(cc.classes_dir.path, get_buildroot()) + '/**'
- def _set_direcotry_digest_for_compile_context(self, directory_digest, target, compile_contexts):
+ def _set_directory_digest_for_compile_context(self, directory_digest, target, compile_contexts):
cc = self.select_runtime_context(compile_contexts[target])
new_classpath_entry = ClasspathEntry(cc.classes_dir.path, directory_digest)
cc.classes_dir = new_classpath_entry
diff --git a/src/python/pants/backend/jvm/tasks/jvm_compile/rsc/rsc_compile.py b/src/python/pants/backend/jvm/tasks/jvm_compile/rsc/rsc_compile.py
index eec90ae5ecb..4266f4f5e02 100644
--- a/src/python/pants/backend/jvm/tasks/jvm_compile/rsc/rsc_compile.py
+++ b/src/python/pants/backend/jvm/tasks/jvm_compile/rsc/rsc_compile.py
@@ -33,8 +33,7 @@
from pants.java.jar.jar_dependency import JarDependency
from pants.reporting.reporting_utils import items_to_report_element
from pants.util.contextutil import Timer
-from pants.util.dirutil import (fast_relpath, fast_relpath_optional, maybe_read_file,
- safe_file_dump, safe_mkdir)
+from pants.util.dirutil import fast_relpath, fast_relpath_optional, safe_mkdir
from pants.util.memo import memoized_property
@@ -60,20 +59,6 @@ def stdout_contents(wu):
return f.read().rstrip()
-def dump_digest(output_dir, digest):
- safe_file_dump('{}.digest'.format(output_dir),
- '{}:{}'.format(digest.fingerprint, digest.serialized_bytes_length), mode='w')
-
-
-def load_digest(output_dir):
- read_file = maybe_read_file('{}.digest'.format(output_dir), binary_mode=False)
- if read_file:
- fingerprint, length = read_file.split(':')
- return Digest(fingerprint, int(length))
- else:
- return None
-
-
def _create_desandboxify_fn(possible_path_patterns):
# Takes a collection of possible canonical prefixes, and returns a function that
# if it finds a matching prefix, strips the path prior to the prefix and returns it
@@ -130,7 +115,7 @@ def __init__(self, *args, **kwargs):
@classmethod
def implementation_version(cls):
- return super(RscCompile, cls).implementation_version() + [('RscCompile', 171)]
+ return super(RscCompile, cls).implementation_version() + [('RscCompile', 172)]
@classmethod
def register_options(cls, register):
@@ -199,11 +184,11 @@ def _nailgunnable_combined_classpath(self):
# Overrides the normal zinc compiler classpath, which only contains zinc.
def get_zinc_compiler_classpath(self):
- return self.do_for_execution_strategy_variant({
+ return self.execution_strategy_enum.resolve_for_enum_variant({
self.HERMETIC: lambda: super(RscCompile, self).get_zinc_compiler_classpath(),
self.SUBPROCESS: lambda: super(RscCompile, self).get_zinc_compiler_classpath(),
self.NAILGUN: lambda: self._nailgunnable_combined_classpath,
- })
+ })()
def register_extra_products_from_contexts(self, targets, compile_contexts):
super(RscCompile, self).register_extra_products_from_contexts(targets, compile_contexts)
@@ -216,7 +201,7 @@ def pathglob_for(filename):
def to_classpath_entries(paths, scheduler):
# list of path ->
# list of (path, optional) ->
- path_and_digests = [(p, load_digest(os.path.dirname(p))) for p in paths]
+ path_and_digests = [(p, Digest.load(os.path.dirname(p))) for p in paths]
# partition: list of path, list of tuples
paths_without_digests = [p for (p, d) in path_and_digests if not d]
if paths_without_digests:
@@ -823,7 +808,7 @@ def _runtool_hermetic(self, main, tool_name, args, distribution, tgt=None, input
raise TaskError(res.stderr)
if output_dir:
- dump_digest(output_dir, res.output_directory_digest)
+ res.output_directory_digest.dump(output_dir)
self.context._scheduler.materialize_directories((
DirectoryToMaterialize(
# NB the first element here is the root to materialize into, not the dir to snapshot
@@ -859,7 +844,7 @@ def _runtool_nonhermetic(self, parent_workunit, classpath, main, tool_name, args
def _runtool(self, main, tool_name, args, distribution,
tgt=None, input_files=tuple(), input_digest=None, output_dir=None):
with self.context.new_workunit(tool_name) as wu:
- return self.do_for_execution_strategy_variant({
+ return self.execution_strategy_enum.resolve_for_enum_variant({
self.HERMETIC: lambda: self._runtool_hermetic(
main, tool_name, args, distribution,
tgt=tgt, input_files=input_files, input_digest=input_digest, output_dir=output_dir),
@@ -867,7 +852,7 @@ def _runtool(self, main, tool_name, args, distribution,
wu, self.tool_classpath(tool_name), main, tool_name, args, distribution),
self.NAILGUN: lambda: self._runtool_nonhermetic(
wu, self._nailgunnable_combined_classpath, main, tool_name, args, distribution),
- })
+ })()
def _run_metai_tool(self,
distribution,
diff --git a/src/python/pants/backend/jvm/tasks/jvm_compile/zinc/zinc_compile.py b/src/python/pants/backend/jvm/tasks/jvm_compile/zinc/zinc_compile.py
index 4cdef91fb23..a447cdb0af1 100644
--- a/src/python/pants/backend/jvm/tasks/jvm_compile/zinc/zinc_compile.py
+++ b/src/python/pants/backend/jvm/tasks/jvm_compile/zinc/zinc_compile.py
@@ -386,71 +386,90 @@ def relative_to_exec_root(path):
with open(ctx.zinc_args_file, 'w') as fp:
for arg in zinc_args:
# NB: in Python 2, options are stored sometimes as bytes and sometimes as unicode in the OptionValueContainer.
- # This is due to how Python 2 natively stores attributes as a map of `str` (aka `bytes`) to their value. So,
+ # This is due to how Python 2 natively stores attributes as a map of `str` (aka `bytes`) to their value. So,
# the setattr() and getattr() functions sometimes use bytes.
if PY2:
arg = ensure_text(arg)
fp.write(arg)
fp.write('\n')
- if self.execution_strategy == self.HERMETIC:
- zinc_relpath = fast_relpath(self._zinc.zinc, get_buildroot())
-
- snapshots = [
- self._zinc.snapshot(self.context._scheduler),
- ctx.target.sources_snapshot(self.context._scheduler),
- ]
-
- relevant_classpath_entries = dependency_classpath + [compiler_bridge_classpath_entry]
- directory_digests = tuple(
- entry.directory_digest for entry in relevant_classpath_entries if entry.directory_digest
- )
- if len(directory_digests) != len(relevant_classpath_entries):
- for dep in relevant_classpath_entries:
- if dep.directory_digest is None:
- logger.warning(
- "ClasspathEntry {} didn't have a Digest, so won't be present for hermetic "
- "execution".format(dep)
- )
-
- snapshots.extend(
- classpath_entry.directory_digest for classpath_entry in scalac_classpath_entries
- )
-
- merged_input_digest = self.context._scheduler.merge_directories(
- tuple(s.directory_digest for s in (snapshots)) + directory_digests
- )
-
- # TODO: Extract something common from Executor._create_command to make the command line
- # TODO: Lean on distribution for the bin/java appending here
- argv = tuple(['.jdk/bin/java'] + jvm_options + ['-cp', zinc_relpath, Zinc.ZINC_COMPILE_MAIN] + zinc_args)
- req = ExecuteProcessRequest(
- argv=argv,
- input_files=merged_input_digest,
- output_directories=(classes_dir,),
- description="zinc compile for {}".format(ctx.target.address.spec),
- # TODO: These should always be unicodes
- # Since this is always hermetic, we need to use `underlying_dist`
- jdk_home=text_type(self._zinc.underlying_dist.home),
- )
- res = self.context.execute_process_synchronously_or_raise(req, self.name(), [WorkUnitLabel.COMPILER])
-
- # TODO: Materialize as a batch in do_compile or somewhere
- self.context._scheduler.materialize_directories((
- DirectoryToMaterialize(get_buildroot(), res.output_directory_digest),
- ))
-
- # TODO: This should probably return a ClasspathEntry rather than a Digest
- return res.output_directory_digest
- else:
- if self.runjava(classpath=self.get_zinc_compiler_classpath(),
- main=Zinc.ZINC_COMPILE_MAIN,
- jvm_options=jvm_options,
- args=zinc_args,
- workunit_name=self.name(),
- workunit_labels=[WorkUnitLabel.COMPILER],
- dist=self._zinc.dist):
- raise TaskError('Zinc compile failed.')
+ return self.execution_strategy_enum.resolve_for_enum_variant({
+ self.HERMETIC: lambda: self._compile_hermetic(
+ jvm_options, ctx, classes_dir, zinc_args, compiler_bridge_classpath_entry,
+ dependency_classpath, scalac_classpath_entries),
+ self.SUBPROCESS: lambda: self._compile_nonhermetic(jvm_options, zinc_args),
+ self.NAILGUN: lambda: self._compile_nonhermetic(jvm_options, zinc_args),
+ })()
+
+ class ZincCompileError(TaskError):
+ """An exception type specifically to signal a failed zinc execution."""
+
+ def _compile_nonhermetic(self, jvm_options, zinc_args):
+ exit_code = self.runjava(classpath=self.get_zinc_compiler_classpath(),
+ main=Zinc.ZINC_COMPILE_MAIN,
+ jvm_options=jvm_options,
+ args=zinc_args,
+ workunit_name=self.name(),
+ workunit_labels=[WorkUnitLabel.COMPILER],
+ dist=self._zinc.dist)
+ if exit_code != 0:
+ raise self.ZincCompileError('Zinc compile failed.', exit_code=exit_code)
+
+ def _compile_hermetic(self, jvm_options, ctx, classes_dir, zinc_args,
+ compiler_bridge_classpath_entry, dependency_classpath,
+ scalac_classpath_entries):
+ zinc_relpath = fast_relpath(self._zinc.zinc, get_buildroot())
+
+ snapshots = [
+ self._zinc.snapshot(self.context._scheduler),
+ ctx.target.sources_snapshot(self.context._scheduler),
+ ]
+
+ relevant_classpath_entries = dependency_classpath + [compiler_bridge_classpath_entry]
+ directory_digests = tuple(
+ entry.directory_digest for entry in relevant_classpath_entries if entry.directory_digest
+ )
+ if len(directory_digests) != len(relevant_classpath_entries):
+ for dep in relevant_classpath_entries:
+ if dep.directory_digest is None:
+ logger.warning(
+ "ClasspathEntry {} didn't have a Digest, so won't be present for hermetic "
+ "execution".format(dep)
+ )
+
+ snapshots.extend(
+ classpath_entry.directory_digest for classpath_entry in scalac_classpath_entries
+ )
+
+ # TODO: Extract something common from Executor._create_command to make the command line
+ # TODO: Lean on distribution for the bin/java appending here
+ merged_input_digest = self.context._scheduler.merge_directories(
+ tuple(s.directory_digest for s in snapshots) + directory_digests
+ )
+ argv = ['.jdk/bin/java'] + jvm_options + [
+ '-cp', zinc_relpath,
+ Zinc.ZINC_COMPILE_MAIN
+ ] + zinc_args
+
+ req = ExecuteProcessRequest(
+ argv=tuple(argv),
+ input_files=merged_input_digest,
+ output_directories=(classes_dir,),
+ description="zinc compile for {}".format(ctx.target.address.spec),
+ # TODO: These should always be unicodes
+ # Since this is always hermetic, we need to use `underlying_dist`
+ jdk_home=text_type(self._zinc.underlying_dist.home),
+ )
+ res = self.context.execute_process_synchronously_or_raise(
+ req, self.name(), [WorkUnitLabel.COMPILER])
+
+ # TODO: Materialize as a batch in do_compile or somewhere
+ self.context._scheduler.materialize_directories((
+ DirectoryToMaterialize(get_buildroot(), res.output_directory_digest),
+ ))
+
+ # TODO: This should probably return a ClasspathEntry rather than a Digest
+ return res.output_directory_digest
def get_zinc_compiler_classpath(self):
"""Get the classpath for the zinc compiler JVM tool.
diff --git a/src/python/pants/backend/jvm/tasks/nailgun_task.py b/src/python/pants/backend/jvm/tasks/nailgun_task.py
index ef6cef73318..dbc3229a5e3 100644
--- a/src/python/pants/backend/jvm/tasks/nailgun_task.py
+++ b/src/python/pants/backend/jvm/tasks/nailgun_task.py
@@ -15,6 +15,7 @@
from pants.process.subprocess import Subprocess
from pants.task.task import Task, TaskBase
from pants.util.memo import memoized_property
+from pants.util.objects import enum, register_enum_option
class NailgunTaskBase(JvmToolTaskMixin, TaskBase):
@@ -24,30 +25,16 @@ class NailgunTaskBase(JvmToolTaskMixin, TaskBase):
SUBPROCESS = 'subprocess'
HERMETIC = 'hermetic'
- class InvalidExecutionStrategyMapping(Exception): pass
-
- _all_execution_strategies = frozenset([NAILGUN, SUBPROCESS, HERMETIC])
-
- def do_for_execution_strategy_variant(self, mapping):
- """Invoke the method in `mapping` with the key corresponding to the execution strategy.
-
- `mapping` is a dict mapping execution strategy -> zero-argument lambda.
- """
- variants = frozenset(mapping.keys())
- if variants != self._all_execution_strategies:
- raise self.InvalidExecutionStrategyMapping(
- 'Must specify a mapping with exactly the keys {} (was: {})'
- .format(self._all_execution_strategies, variants))
- method_for_variant = mapping[self.execution_strategy]
- # The methods need not return a value, but we pass it along if they do.
- return method_for_variant()
+ class ExecutionStrategy(enum([NAILGUN, SUBPROCESS, HERMETIC])): pass
@classmethod
def register_options(cls, register):
super(NailgunTaskBase, cls).register_options(register)
- register('--execution-strategy', choices=[cls.NAILGUN, cls.SUBPROCESS, cls.HERMETIC], default=cls.NAILGUN,
- help='If set to nailgun, nailgun will be enabled and repeated invocations of this '
- 'task will be quicker. If set to subprocess, then the task will be run without nailgun.')
+ register_enum_option(
+ register, cls.ExecutionStrategy, '--execution-strategy',
+ help='If set to nailgun, nailgun will be enabled and repeated invocations of this '
+ 'task will be quicker. If set to subprocess, then the task will be run without nailgun. '
+ 'Hermetic execution is an experimental subprocess execution framework.')
register('--nailgun-timeout-seconds', advanced=True, default=10, type=float,
help='Timeout (secs) for nailgun startup.')
register('--nailgun-connect-attempts', advanced=True, default=5, type=int,
@@ -60,6 +47,13 @@ def register_options(cls, register):
rev='0.9.1'),
])
+ @memoized_property
+ def execution_strategy_enum(self):
+ # TODO: This .create() call can be removed when the enum interface is more stable as the option
+ # is converted into an instance of self.ExecutionStrategy via the `type` argument through
+ # register_enum_option().
+ return self.ExecutionStrategy.create(self.get_options().execution_strategy)
+
@classmethod
def subsystem_dependencies(cls):
return super(NailgunTaskBase, cls).subsystem_dependencies() + (Subprocess.Factory,)
@@ -76,9 +70,10 @@ def __init__(self, *args, **kwargs):
self._executor_workdir = os.path.join(self.context.options.for_global_scope().pants_workdir,
*id_tuple)
- @memoized_property
+ # TODO: eventually deprecate this when we can move all subclasses to use the enum!
+ @property
def execution_strategy(self):
- return self.get_options().execution_strategy
+ return self.execution_strategy_enum.value
def create_java_executor(self, dist=None):
"""Create java executor that uses this task's ng daemon, if allowed.
diff --git a/src/python/pants/backend/native/config/environment.py b/src/python/pants/backend/native/config/environment.py
index 6d8a55b30ea..b433535668f 100644
--- a/src/python/pants/backend/native/config/environment.py
+++ b/src/python/pants/backend/native/config/environment.py
@@ -5,46 +5,123 @@
from __future__ import absolute_import, division, print_function, unicode_literals
import os
-from abc import abstractproperty
+from abc import abstractmethod, abstractproperty
from pants.engine.rules import SingletonRule
+from pants.util.memo import memoized_classproperty
from pants.util.meta import AbstractClass
-from pants.util.objects import datatype
+from pants.util.objects import datatype, enum
from pants.util.osutil import all_normalized_os_names, get_normalized_os_name
from pants.util.strutil import create_path_env_var
-class Platform(datatype(['normalized_os_name'])):
+class Platform(enum('normalized_os_name', all_normalized_os_names())):
- class UnsupportedPlatformError(Exception):
- """Thrown if pants is running on an unrecognized platform."""
+ default_value = get_normalized_os_name()
- @classmethod
- def create(cls):
- return Platform(get_normalized_os_name())
- _NORMALIZED_OS_NAMES = frozenset(all_normalized_os_names())
+def _list_field(func):
+ """A decorator for methods corresponding to list-valued fields of an `ExtensibleAlgebraic`.
- def resolve_platform_specific(self, platform_specific_funs):
- arg_keys = frozenset(platform_specific_funs.keys())
- unknown_plats = self._NORMALIZED_OS_NAMES - arg_keys
- if unknown_plats:
- raise self.UnsupportedPlatformError(
- "platform_specific_funs {} must support platforms {}"
- .format(platform_specific_funs, list(unknown_plats)))
- extra_plats = arg_keys - self._NORMALIZED_OS_NAMES
- if extra_plats:
- raise self.UnsupportedPlatformError(
- "platform_specific_funs {} has unrecognized platforms {}"
- .format(platform_specific_funs, list(extra_plats)))
+ The result is also wrapped in `abstractproperty`.
+ """
+ wrapped = abstractproperty(func)
+ wrapped._field_type = 'list'
+ return wrapped
- fun_for_platform = platform_specific_funs[self.normalized_os_name]
- return fun_for_platform()
+def _algebraic_data(metaclass):
+ """A class decorator to pull out `_list_fields` from a mixin class for use with a `datatype`."""
+ def wrapper(cls):
+ cls.__bases__ += (metaclass,)
+ cls._list_fields = metaclass._list_fields
+ return cls
+ return wrapper
-class Executable(AbstractClass):
- @abstractproperty
+# NB: prototypal inheritance seems *deeply* linked with the idea here!
+# TODO: since we are calling these methods from other files, we should remove the leading underscore
+# and add testing!
+class _ExtensibleAlgebraic(AbstractClass):
+ """A mixin to make it more concise to coalesce datatypes with related collection fields."""
+
+ @memoized_classproperty
+ def _list_fields(cls):
+ all_list_fields = []
+ for field_name in cls.__abstractmethods__:
+ f = getattr(cls, field_name)
+ if getattr(f, '_field_type', None) == 'list':
+ all_list_fields.append(field_name)
+ return frozenset(all_list_fields)
+
+ @abstractmethod
+ def copy(self, **kwargs):
+ """Implementations should have the same behavior as a `datatype()`'s `copy()` method."""
+
+ class AlgebraicDataError(Exception): pass
+
+ def _single_list_field_operation(self, field_name, list_value, prepend=True):
+ if field_name not in self._list_fields:
+ raise self.AlgebraicDataError(
+ "Field '{}' is not in this object's set of declared list fields: {} (this object is : {})."
+ .format(field_name, self._list_fields, self))
+ cur_value = getattr(self, field_name)
+
+ if prepend:
+ new_value = list_value + cur_value
+ else:
+ new_value = cur_value + list_value
+
+ arg_dict = {field_name: new_value}
+ return self.copy(**arg_dict)
+
+ def prepend_field(self, field_name, list_value):
+ """Return a copy of this object with `list_value` prepended to the field named `field_name`."""
+ return self._single_list_field_operation(field_name, list_value, prepend=True)
+
+ def append_field(self, field_name, list_value):
+ """Return a copy of this object with `list_value` appended to the field named `field_name`."""
+ return self._single_list_field_operation(field_name, list_value, prepend=False)
+
+ def sequence(self, other, exclude_list_fields=None):
+ """Return a copy of this object which combines all the fields common to both `self` and `other`.
+
+ List fields will be concatenated.
+
+ The return type of this method is the type of `self` (or whatever `.copy()` returns), but the
+ `other` argument can be any `_ExtensibleAlgebraic` instance.
+ """
+ exclude_list_fields = frozenset(exclude_list_fields or [])
+ overwrite_kwargs = {}
+
+ nonexistent_excluded_fields = exclude_list_fields - self._list_fields
+ if nonexistent_excluded_fields:
+ raise self.AlgebraicDataError(
+ "Fields {} to exclude from a sequence() were not found in this object's list fields: {}. "
+ "This object is {}, the other object is {}."
+ .format(nonexistent_excluded_fields, self._list_fields, self, other))
+
+ shared_list_fields = (self._list_fields
+ & other._list_fields
+ - exclude_list_fields)
+ if not shared_list_fields:
+ raise self.AlgebraicDataError(
+ "Objects to sequence have no shared fields after excluding {}. "
+ "This object is {}, with list fields: {}. "
+ "The other object is {}, with list fields: {}."
+ .format(exclude_list_fields, self, self._list_fields, other, other._list_fields))
+
+ for list_field_name in shared_list_fields:
+ lhs_value = getattr(self, list_field_name)
+ rhs_value = getattr(other, list_field_name)
+ overwrite_kwargs[list_field_name] = lhs_value + rhs_value
+
+ return self.copy(**overwrite_kwargs)
+
+
+class _Executable(_ExtensibleAlgebraic):
+
+ @_list_field
def path_entries(self):
"""A list of directory paths containing this executable, to be used in a subprocess's PATH.
@@ -60,63 +137,65 @@ def exe_filename(self):
:rtype: str
"""
- # TODO: rename this to 'runtime_library_dirs'!
- @abstractproperty
- def library_dirs(self):
+ @_list_field
+ def runtime_library_dirs(self):
"""Directories containing shared libraries that must be on the runtime library search path.
- Note: this is for libraries needed for the current Executable to run -- see LinkerMixin below
+ Note: this is for libraries needed for the current _Executable to run -- see _LinkerMixin below
for libraries that are needed at link time.
-
:rtype: list of str
"""
- @property
+ @_list_field
def extra_args(self):
- """Additional arguments used when invoking this Executable.
+ """Additional arguments used when invoking this _Executable.
These are typically placed before the invocation-specific command line arguments.
:rtype: list of str
"""
- return []
_platform = Platform.create()
@property
- def as_invocation_environment_dict(self):
- """A dict to use as this Executable's execution environment.
+ def invocation_environment_dict(self):
+ """A dict to use as this _Executable's execution environment.
+
+ This isn't made into an "algebraic" field because its contents (the keys of the dict) are
+ generally known to the specific class which is overriding this property. Implementations of this
+ property can then make use of the data in the algebraic fields to populate this dict.
:rtype: dict of string -> string
"""
- lib_env_var = self._platform.resolve_platform_specific({
- 'darwin': lambda: 'DYLD_LIBRARY_PATH',
- 'linux': lambda: 'LD_LIBRARY_PATH',
+ lib_env_var = self._platform.resolve_for_enum_variant({
+ 'darwin': 'DYLD_LIBRARY_PATH',
+ 'linux': 'LD_LIBRARY_PATH',
})
return {
'PATH': create_path_env_var(self.path_entries),
- lib_env_var: create_path_env_var(self.library_dirs),
+ lib_env_var: create_path_env_var(self.runtime_library_dirs),
}
+@_algebraic_data(_Executable)
class Assembler(datatype([
'path_entries',
'exe_filename',
- 'library_dirs',
-]), Executable):
- pass
+ 'runtime_library_dirs',
+ 'extra_args',
+])): pass
-class LinkerMixin(Executable):
+class _LinkerMixin(_Executable):
- @abstractproperty
+ @_list_field
def linking_library_dirs(self):
"""Directories to search for libraries needed at link time.
:rtype: list of str
"""
- @abstractproperty
+ @_list_field
def extra_object_files(self):
"""A list of object files required to perform a successful link.
@@ -126,8 +205,8 @@ def extra_object_files(self):
"""
@property
- def as_invocation_environment_dict(self):
- ret = super(LinkerMixin, self).as_invocation_environment_dict.copy()
+ def invocation_environment_dict(self):
+ ret = super(_LinkerMixin, self).invocation_environment_dict.copy()
full_library_path_dirs = self.linking_library_dirs + [
os.path.dirname(f) for f in self.extra_object_files
@@ -141,19 +220,20 @@ def as_invocation_environment_dict(self):
return ret
+@_algebraic_data(_LinkerMixin)
class Linker(datatype([
'path_entries',
'exe_filename',
- 'library_dirs',
+ 'runtime_library_dirs',
'linking_library_dirs',
'extra_args',
'extra_object_files',
-]), LinkerMixin): pass
+])): pass
-class CompilerMixin(Executable):
+class _CompilerMixin(_Executable):
- @abstractproperty
+ @_list_field
def include_dirs(self):
"""Directories to search for header files to #include during compilation.
@@ -161,8 +241,8 @@ def include_dirs(self):
"""
@property
- def as_invocation_environment_dict(self):
- ret = super(CompilerMixin, self).as_invocation_environment_dict.copy()
+ def invocation_environment_dict(self):
+ ret = super(_CompilerMixin, self).invocation_environment_dict.copy()
if self.include_dirs:
ret['CPATH'] = create_path_env_var(self.include_dirs)
@@ -170,34 +250,36 @@ def as_invocation_environment_dict(self):
return ret
+@_algebraic_data(_CompilerMixin)
class CCompiler(datatype([
'path_entries',
'exe_filename',
- 'library_dirs',
+ 'runtime_library_dirs',
'include_dirs',
'extra_args',
-]), CompilerMixin):
+])):
@property
- def as_invocation_environment_dict(self):
- ret = super(CCompiler, self).as_invocation_environment_dict.copy()
+ def invocation_environment_dict(self):
+ ret = super(CCompiler, self).invocation_environment_dict.copy()
ret['CC'] = self.exe_filename
return ret
+@_algebraic_data(_CompilerMixin)
class CppCompiler(datatype([
'path_entries',
'exe_filename',
- 'library_dirs',
+ 'runtime_library_dirs',
'include_dirs',
'extra_args',
-]), CompilerMixin):
+])):
@property
- def as_invocation_environment_dict(self):
- ret = super(CppCompiler, self).as_invocation_environment_dict.copy()
+ def invocation_environment_dict(self):
+ ret = super(CppCompiler, self).invocation_environment_dict.copy()
ret['CXX'] = self.exe_filename
diff --git a/src/python/pants/backend/native/subsystems/binaries/binutils.py b/src/python/pants/backend/native/subsystems/binaries/binutils.py
index 69c50463001..d8b3375b0c4 100644
--- a/src/python/pants/backend/native/subsystems/binaries/binutils.py
+++ b/src/python/pants/backend/native/subsystems/binaries/binutils.py
@@ -24,13 +24,14 @@ def assembler(self):
return Assembler(
path_entries=self.path_entries(),
exe_filename='as',
- library_dirs=[])
+ runtime_library_dirs=[],
+ extra_args=[])
def linker(self):
return Linker(
path_entries=self.path_entries(),
exe_filename='ld',
- library_dirs=[],
+ runtime_library_dirs=[],
linking_library_dirs=[],
extra_args=[],
extra_object_files=[],
diff --git a/src/python/pants/backend/native/subsystems/binaries/gcc.py b/src/python/pants/backend/native/subsystems/binaries/gcc.py
index 5f48e12fb85..b0696375d79 100644
--- a/src/python/pants/backend/native/subsystems/binaries/gcc.py
+++ b/src/python/pants/backend/native/subsystems/binaries/gcc.py
@@ -44,9 +44,9 @@ def path_entries(self):
@memoized_method
def _common_lib_dirs(self, platform):
- lib64_tuples = platform.resolve_platform_specific({
- 'darwin': lambda: [],
- 'linux': lambda: [('lib64',)],
+ lib64_tuples = platform.resolve_for_enum_variant({
+ 'darwin': [],
+ 'linux': [('lib64',)],
})
return self._filemap(lib64_tuples + [
('lib',),
@@ -65,7 +65,7 @@ def c_compiler(self, platform):
return CCompiler(
path_entries=self.path_entries,
exe_filename='gcc',
- library_dirs=self._common_lib_dirs(platform),
+ runtime_library_dirs=self._common_lib_dirs(platform),
include_dirs=self._common_include_dirs,
extra_args=[])
@@ -91,7 +91,7 @@ def cpp_compiler(self, platform):
return CppCompiler(
path_entries=self.path_entries,
exe_filename='g++',
- library_dirs=self._common_lib_dirs(platform),
+ runtime_library_dirs=self._common_lib_dirs(platform),
include_dirs=(self._common_include_dirs + self._cpp_include_dirs),
extra_args=[])
diff --git a/src/python/pants/backend/native/subsystems/binaries/llvm.py b/src/python/pants/backend/native/subsystems/binaries/llvm.py
index a49146cbbd1..9786e5c3990 100644
--- a/src/python/pants/backend/native/subsystems/binaries/llvm.py
+++ b/src/python/pants/backend/native/subsystems/binaries/llvm.py
@@ -80,16 +80,13 @@ def _filemap(self, all_components_list):
def path_entries(self):
return self._filemap([('bin',)])
- _PLATFORM_SPECIFIC_LINKER_NAME = {
- 'darwin': lambda: 'ld64.lld',
- 'linux': lambda: 'lld',
- }
-
def linker(self, platform):
return Linker(
path_entries=self.path_entries,
- exe_filename=platform.resolve_platform_specific(
- self._PLATFORM_SPECIFIC_LINKER_NAME),
+ exe_filename=platform.resolve_for_enum_variant({
+ 'darwin': 'ld64.lld',
+ 'linux': 'lld',
+ }),
library_dirs=[],
linking_library_dirs=[],
extra_args=[],
@@ -108,7 +105,7 @@ def c_compiler(self):
return CCompiler(
path_entries=self.path_entries,
exe_filename='clang',
- library_dirs=self._common_lib_dirs,
+ runtime_library_dirs=self._common_lib_dirs,
include_dirs=self._common_include_dirs,
extra_args=[])
@@ -120,7 +117,7 @@ def cpp_compiler(self):
return CppCompiler(
path_entries=self.path_entries,
exe_filename='clang++',
- library_dirs=self._common_lib_dirs,
+ runtime_library_dirs=self._common_lib_dirs,
include_dirs=(self._cpp_include_dirs + self._common_include_dirs),
extra_args=[])
diff --git a/src/python/pants/backend/native/subsystems/conan.py b/src/python/pants/backend/native/subsystems/conan.py
index fb8fef89172..ae7d53880bd 100644
--- a/src/python/pants/backend/native/subsystems/conan.py
+++ b/src/python/pants/backend/native/subsystems/conan.py
@@ -20,6 +20,7 @@ class Conan(PythonToolBase):
'pylint==1.9.3',
]
default_entry_point = 'conans.conan'
+ default_interpreter_constraints = ['CPython>=2.7,<4']
@classmethod
def register_options(cls, register):
diff --git a/src/python/pants/backend/native/subsystems/native_build_step.py b/src/python/pants/backend/native/subsystems/native_build_step.py
index b30f51c07e6..55ffea10dd4 100644
--- a/src/python/pants/backend/native/subsystems/native_build_step.py
+++ b/src/python/pants/backend/native/subsystems/native_build_step.py
@@ -10,14 +10,10 @@
from pants.subsystem.subsystem import Subsystem
from pants.util.memo import memoized_property
from pants.util.meta import classproperty
-from pants.util.objects import enum
+from pants.util.objects import enum, register_enum_option
-class ToolchainVariant(enum('descriptor', ['gnu', 'llvm'])):
-
- @property
- def is_gnu(self):
- return self.descriptor == 'gnu'
+class ToolchainVariant(enum(['gnu', 'llvm'])): pass
class NativeBuildStep(CompilerOptionSetsMixin, MirroredTargetOptionMixin, Subsystem):
@@ -39,11 +35,10 @@ def register_options(cls, register):
help='The default for the "compiler_option_sets" argument '
'for targets of this language.')
- register('--toolchain-variant', type=str, fingerprint=True, advanced=True,
- choices=ToolchainVariant.allowed_values,
- default=ToolchainVariant.default_value,
- help="Whether to use gcc (gnu) or clang (llvm) to compile C and C++. Currently all "
- "linking is done with binutils ld on Linux, and the XCode CLI Tools on MacOS.")
+ register_enum_option(
+ register, ToolchainVariant, '--toolchain-variant', advanced=True,
+ help="Whether to use gcc (gnu) or clang (llvm) to compile C and C++. Currently all "
+ "linking is done with binutils ld on Linux, and the XCode CLI Tools on MacOS.")
def get_compiler_option_sets_for_target(self, target):
return self.get_target_mirrored_option('compiler_option_sets', target)
diff --git a/src/python/pants/backend/native/subsystems/native_toolchain.py b/src/python/pants/backend/native/subsystems/native_toolchain.py
index 7f4dec31efc..1885c68e0d9 100644
--- a/src/python/pants/backend/native/subsystems/native_toolchain.py
+++ b/src/python/pants/backend/native/subsystems/native_toolchain.py
@@ -4,6 +4,8 @@
from __future__ import absolute_import, division, print_function, unicode_literals
+from builtins import object
+
from pants.backend.native.config.environment import (Assembler, CCompiler, CppCompiler,
CppToolchain, CToolchain, Linker, Platform)
from pants.backend.native.subsystems.binaries.binutils import Binutils
@@ -67,10 +69,21 @@ def _libc_dev(self):
class LibcObjects(datatype(['crti_object_paths'])): pass
-class GCCLinker(datatype([('linker', Linker)])): pass
+class LinkerWrapperMixin(object):
+
+ def for_compiler(self, compiler, platform):
+ """Return a Linker object which is intended to be compatible with the given `compiler`."""
+ return (self.linker
+ # TODO(#6143): describe why the compiler needs to be first on the PATH!
+ .sequence(compiler, exclude_list_fields=['extra_args', 'path_entries'])
+ .prepend_field('path_entries', compiler.path_entries)
+ .copy(exe_filename=compiler.exe_filename))
+
+
+class GCCLinker(datatype([('linker', Linker)]), LinkerWrapperMixin): pass
-class LLVMLinker(datatype([('linker', Linker)])): pass
+class LLVMLinker(datatype([('linker', Linker)]), LinkerWrapperMixin): pass
class GCCCToolchain(datatype([('c_toolchain', CToolchain)])): pass
@@ -87,10 +100,11 @@ class LLVMCppToolchain(datatype([('cpp_toolchain', CppToolchain)])): pass
@rule(LibcObjects, [Select(Platform), Select(NativeToolchain)])
def select_libc_objects(platform, native_toolchain):
- paths = platform.resolve_platform_specific({
+ # We use lambdas here to avoid searching for libc on osx, where it will fail.
+ paths = platform.resolve_for_enum_variant({
'darwin': lambda: [],
'linux': lambda: native_toolchain._libc_dev.get_libc_objects(),
- })
+ })()
yield LibcObjects(paths)
@@ -127,8 +141,7 @@ def select_gcc_linker(native_toolchain):
base_linker = yield Get(BaseLinker, NativeToolchain, native_toolchain)
linker = base_linker.linker
libc_objects = yield Get(LibcObjects, NativeToolchain, native_toolchain)
- linker_with_libc = linker.copy(
- extra_object_files=(linker.extra_object_files + libc_objects.crti_object_paths))
+ linker_with_libc = linker.append_field('extra_object_files', libc_objects.crti_object_paths)
yield GCCLinker(linker_with_libc)
@@ -159,36 +172,24 @@ def select_gcc_install_location(gcc):
def select_llvm_c_toolchain(platform, native_toolchain):
provided_clang = yield Get(CCompiler, LLVM, native_toolchain._llvm)
- # These arguments are shared across platforms.
- llvm_c_compiler_args = [
- '-x', 'c', '-std=c11',
- ]
-
if platform.normalized_os_name == 'darwin':
xcode_clang = yield Get(CCompiler, XCodeCLITools, native_toolchain._xcode_cli_tools)
- working_c_compiler = provided_clang.copy(
- path_entries=(provided_clang.path_entries + xcode_clang.path_entries),
- library_dirs=(provided_clang.library_dirs + xcode_clang.library_dirs),
- include_dirs=(provided_clang.include_dirs + xcode_clang.include_dirs),
- extra_args=(provided_clang.extra_args + llvm_c_compiler_args + xcode_clang.extra_args))
+ joined_c_compiler = provided_clang.sequence(xcode_clang)
else:
gcc_install = yield Get(GCCInstallLocationForLLVM, GCC, native_toolchain._gcc)
provided_gcc = yield Get(CCompiler, GCC, native_toolchain._gcc)
- working_c_compiler = provided_clang.copy(
- # We need g++'s version of the GLIBCXX library to be able to run, unfortunately.
- library_dirs=(provided_gcc.library_dirs + provided_clang.library_dirs),
- include_dirs=provided_gcc.include_dirs,
- extra_args=(llvm_c_compiler_args + provided_clang.extra_args + gcc_install.as_clang_argv))
+ joined_c_compiler = (provided_clang
+ .sequence(provided_gcc)
+ .append_field('extra_args', gcc_install.as_clang_argv)
+ # We need g++'s version of the GLIBCXX library to be able to run.
+ .prepend_field('runtime_library_dirs', provided_gcc.runtime_library_dirs))
- llvm_linker_wrapper = yield Get(LLVMLinker, NativeToolchain, native_toolchain)
- llvm_linker = llvm_linker_wrapper.linker
+ working_c_compiler = joined_c_compiler.prepend_field('extra_args', [
+ '-x', 'c', '-std=c11',
+ ])
- # TODO(#6855): introduce a more concise way to express these compositions of executables.
- working_linker = llvm_linker.copy(
- path_entries=(llvm_linker.path_entries + working_c_compiler.path_entries),
- exe_filename=working_c_compiler.exe_filename,
- library_dirs=(llvm_linker.library_dirs + working_c_compiler.library_dirs),
- )
+ llvm_linker_wrapper = yield Get(LLVMLinker, NativeToolchain, native_toolchain)
+ working_linker = llvm_linker_wrapper.for_compiler(working_c_compiler, platform)
yield LLVMCToolchain(CToolchain(working_c_compiler, working_linker))
@@ -197,52 +198,42 @@ def select_llvm_c_toolchain(platform, native_toolchain):
def select_llvm_cpp_toolchain(platform, native_toolchain):
provided_clangpp = yield Get(CppCompiler, LLVM, native_toolchain._llvm)
- # These arguments are shared across platforms.
- llvm_cpp_compiler_args = [
- '-x', 'c++', '-std=c++11',
- # This flag is intended to avoid using any of the headers from our LLVM distribution's C++
- # stdlib implementation, or any from the host system, and instead, use include dirs from the
- # XCodeCLITools or GCC.
- # TODO(#6143): Determine precisely what this flag does and why it's necessary.
- '-nostdinc++',
- ]
-
+ # On OSX, we use the libc++ (LLVM) C++ standard library implementation. This is feature-complete
+ # for OSX, but not for Linux (see https://libcxx.llvm.org/ for more info).
if platform.normalized_os_name == 'darwin':
- xcode_clangpp = yield Get(CppCompiler, XCodeCLITools, native_toolchain._xcode_cli_tools)
- working_cpp_compiler = provided_clangpp.copy(
- path_entries=(provided_clangpp.path_entries + xcode_clangpp.path_entries),
- library_dirs=(provided_clangpp.library_dirs + xcode_clangpp.library_dirs),
- include_dirs=(provided_clangpp.include_dirs + xcode_clangpp.include_dirs),
- # On OSX, this uses the libc++ (LLVM) C++ standard library implementation. This is
- # feature-complete for OSX, but not for Linux (see https://libcxx.llvm.org/ for more info).
- extra_args=(llvm_cpp_compiler_args + provided_clangpp.extra_args + xcode_clangpp.extra_args))
- extra_linking_library_dirs = []
+ xcode_clang = yield Get(CppCompiler, XCodeCLITools, native_toolchain._xcode_cli_tools)
+ joined_cpp_compiler = provided_clangpp.sequence(xcode_clang)
+ extra_llvm_linking_library_dirs = []
linker_extra_args = []
else:
gcc_install = yield Get(GCCInstallLocationForLLVM, GCC, native_toolchain._gcc)
provided_gpp = yield Get(CppCompiler, GCC, native_toolchain._gcc)
- working_cpp_compiler = provided_clangpp.copy(
- # We need g++'s version of the GLIBCXX library to be able to run, unfortunately.
- library_dirs=(provided_gpp.library_dirs + provided_clangpp.library_dirs),
- # NB: we use g++'s headers on Linux, and therefore their C++ standard library.
- include_dirs=provided_gpp.include_dirs,
- extra_args=(llvm_cpp_compiler_args + provided_clangpp.extra_args + gcc_install.as_clang_argv))
- # TODO(#6855): why are these necessary? this is very mysterious.
- extra_linking_library_dirs = provided_gpp.library_dirs + provided_clangpp.library_dirs
+ joined_cpp_compiler = (provided_clangpp
+ .sequence(provided_gpp)
+ # NB: we use g++'s headers on Linux, and therefore their C++ standard
+ # library.
+ .copy(include_dirs=provided_gpp.include_dirs)
+ .append_field('extra_args', gcc_install.as_clang_argv)
+ # We need g++'s version of the GLIBCXX library to be able to run.
+ .prepend_field('runtime_library_dirs', provided_gpp.runtime_library_dirs))
+ extra_llvm_linking_library_dirs = provided_gpp.runtime_library_dirs + provided_clangpp.runtime_library_dirs
# Ensure we use libstdc++, provided by g++, during the linking stage.
linker_extra_args=['-stdlib=libstdc++']
- llvm_linker_wrapper = yield Get(LLVMLinker, NativeToolchain, native_toolchain)
- llvm_linker = llvm_linker_wrapper.linker
+ working_cpp_compiler = joined_cpp_compiler.prepend_field('extra_args', [
+ '-x', 'c++', '-std=c++11',
+ # This flag is intended to avoid using any of the headers from our LLVM distribution's C++
+ # stdlib implementation, or any from the host system, and instead, use include dirs from the
+ # XCodeCLITools or GCC.
+ # TODO(#6143): Determine precisely what this flag does and why it's necessary.
+ '-nostdinc++',
+ ])
- working_linker = llvm_linker.copy(
- path_entries=(llvm_linker.path_entries + working_cpp_compiler.path_entries),
- exe_filename=working_cpp_compiler.exe_filename,
- library_dirs=(llvm_linker.library_dirs + working_cpp_compiler.library_dirs),
- linking_library_dirs=(llvm_linker.linking_library_dirs +
- extra_linking_library_dirs),
- extra_args=(llvm_linker.extra_args + linker_extra_args),
- )
+ llvm_linker_wrapper = yield Get(LLVMLinker, NativeToolchain, native_toolchain)
+ working_linker = (llvm_linker_wrapper
+ .for_compiler(working_cpp_compiler, platform)
+ .append_field('linking_library_dirs', extra_llvm_linking_library_dirs)
+ .prepend_field('extra_args', linker_extra_args))
yield LLVMCppToolchain(CppToolchain(working_cpp_compiler, working_linker))
@@ -251,35 +242,23 @@ def select_llvm_cpp_toolchain(platform, native_toolchain):
def select_gcc_c_toolchain(platform, native_toolchain):
provided_gcc = yield Get(CCompiler, GCC, native_toolchain._gcc)
- # GCC needs an assembler, so we provide that (platform-specific) tool here.
- assembler = yield Get(Assembler, NativeToolchain, native_toolchain)
-
- gcc_c_compiler_args = [
- '-x', 'c', '-std=c11',
- ]
-
if platform.normalized_os_name == 'darwin':
# GCC needs access to some headers that are only provided by the XCode toolchain
# currently (e.g. "_stdio.h"). These headers are unlikely to change across versions, so this is
# probably safe.
xcode_clang = yield Get(CCompiler, XCodeCLITools, native_toolchain._xcode_cli_tools)
- new_include_dirs = provided_gcc.include_dirs + xcode_clang.include_dirs
+ joined_c_compiler = provided_gcc.sequence(xcode_clang)
else:
- new_include_dirs = provided_gcc.include_dirs
+ joined_c_compiler = provided_gcc
- working_c_compiler = provided_gcc.copy(
- path_entries=(provided_gcc.path_entries + assembler.path_entries),
- include_dirs=new_include_dirs,
- extra_args=gcc_c_compiler_args)
+ # GCC needs an assembler, so we provide that (platform-specific) tool here.
+ assembler = yield Get(Assembler, NativeToolchain, native_toolchain)
+ working_c_compiler = joined_c_compiler.sequence(assembler).prepend_field('extra_args', [
+ '-x', 'c', '-std=c11',
+ ])
gcc_linker_wrapper = yield Get(GCCLinker, NativeToolchain, native_toolchain)
- gcc_linker = gcc_linker_wrapper.linker
-
- working_linker = gcc_linker.copy(
- path_entries=(working_c_compiler.path_entries + gcc_linker.path_entries),
- exe_filename=working_c_compiler.exe_filename,
- library_dirs=(gcc_linker.library_dirs + working_c_compiler.library_dirs),
- )
+ working_linker = gcc_linker_wrapper.for_compiler(working_c_compiler, platform)
yield GCCCToolchain(CToolchain(working_c_compiler, working_linker))
@@ -288,18 +267,6 @@ def select_gcc_c_toolchain(platform, native_toolchain):
def select_gcc_cpp_toolchain(platform, native_toolchain):
provided_gpp = yield Get(CppCompiler, GCC, native_toolchain._gcc)
- # GCC needs an assembler, so we provide that (platform-specific) tool here.
- assembler = yield Get(Assembler, NativeToolchain, native_toolchain)
-
- gcc_cpp_compiler_args = [
- '-x', 'c++', '-std=c++11',
- # This flag is intended to avoid using any of the headers from our LLVM distribution's C++
- # stdlib implementation, or any from the host system, and instead, use include dirs from the
- # XCodeCLITools or GCC.
- # TODO(#6143): Determine precisely what this flag does and why it's necessary.
- '-nostdinc++',
- ]
-
if platform.normalized_os_name == 'darwin':
# GCC needs access to some headers that are only provided by the XCode toolchain
# currently (e.g. "_stdio.h"). These headers are unlikely to change across versions, so this is
@@ -307,29 +274,23 @@ def select_gcc_cpp_toolchain(platform, native_toolchain):
# TODO: we should be providing all of these (so we can eventually phase out XCodeCLITools
# entirely).
xcode_clangpp = yield Get(CppCompiler, XCodeCLITools, native_toolchain._xcode_cli_tools)
- working_cpp_compiler = provided_gpp.copy(
- path_entries=(provided_gpp.path_entries + assembler.path_entries),
- include_dirs=(provided_gpp.include_dirs + xcode_clangpp.include_dirs),
- extra_args=(gcc_cpp_compiler_args + provided_gpp.extra_args + xcode_clangpp.extra_args),
- )
- extra_linking_library_dirs = []
+ joined_cpp_compiler = provided_gpp.sequence(xcode_clangpp)
else:
- provided_clangpp = yield Get(CppCompiler, LLVM, native_toolchain._llvm)
- working_cpp_compiler = provided_gpp.copy(
- path_entries=(provided_gpp.path_entries + assembler.path_entries),
- extra_args=(gcc_cpp_compiler_args + provided_gpp.extra_args),
- )
- extra_linking_library_dirs = provided_gpp.library_dirs + provided_clangpp.library_dirs
+ joined_cpp_compiler = provided_gpp
- gcc_linker_wrapper = yield Get(GCCLinker, NativeToolchain, native_toolchain)
- gcc_linker = gcc_linker_wrapper.linker
+ # GCC needs an assembler, so we provide that (platform-specific) tool here.
+ assembler = yield Get(Assembler, NativeToolchain, native_toolchain)
+ working_cpp_compiler = joined_cpp_compiler.sequence(assembler).prepend_field('extra_args', [
+ '-x', 'c++', '-std=c++11',
+ # This flag is intended to avoid using any of the headers from our LLVM distribution's C++
+ # stdlib implementation, or any from the host system, and instead, use include dirs from the
+ # XCodeCLITools or GCC.
+ # TODO(#6143): Determine precisely what this flag does and why it's necessary.
+ '-nostdinc++',
+ ])
- working_linker = gcc_linker.copy(
- path_entries=(working_cpp_compiler.path_entries + gcc_linker.path_entries),
- exe_filename=working_cpp_compiler.exe_filename,
- library_dirs=(gcc_linker.library_dirs + working_cpp_compiler.library_dirs),
- linking_library_dirs=(gcc_linker.linking_library_dirs + extra_linking_library_dirs),
- )
+ gcc_linker_wrapper = yield Get(GCCLinker, NativeToolchain, native_toolchain)
+ working_linker = gcc_linker_wrapper.for_compiler(working_cpp_compiler, platform)
yield GCCCppToolchain(CppToolchain(working_cpp_compiler, working_linker))
@@ -343,8 +304,12 @@ class ToolchainVariantRequest(datatype([
@rule(CToolchain, [Select(ToolchainVariantRequest)])
def select_c_toolchain(toolchain_variant_request):
native_toolchain = toolchain_variant_request.toolchain
- # TODO: make an enum exhaustiveness checking method that works with `yield Get(...)` statements!
- if toolchain_variant_request.variant.is_gnu:
+ # TODO(#5933): make an enum exhaustiveness checking method that works with `yield Get(...)`!
+ use_gcc = toolchain_variant_request.variant.resolve_for_enum_variant({
+ 'gnu': True,
+ 'llvm': False,
+ })
+ if use_gcc:
toolchain_resolved = yield Get(GCCCToolchain, NativeToolchain, native_toolchain)
else:
toolchain_resolved = yield Get(LLVMCToolchain, NativeToolchain, native_toolchain)
@@ -354,7 +319,12 @@ def select_c_toolchain(toolchain_variant_request):
@rule(CppToolchain, [Select(ToolchainVariantRequest)])
def select_cpp_toolchain(toolchain_variant_request):
native_toolchain = toolchain_variant_request.toolchain
- if toolchain_variant_request.variant.is_gnu:
+ # TODO(#5933): make an enum exhaustiveness checking method that works with `yield Get(...)`!
+ use_gcc = toolchain_variant_request.variant.resolve_for_enum_variant({
+ 'gnu': True,
+ 'llvm': False,
+ })
+ if use_gcc:
toolchain_resolved = yield Get(GCCCppToolchain, NativeToolchain, native_toolchain)
else:
toolchain_resolved = yield Get(LLVMCppToolchain, NativeToolchain, native_toolchain)
diff --git a/src/python/pants/backend/native/subsystems/xcode_cli_tools.py b/src/python/pants/backend/native/subsystems/xcode_cli_tools.py
index 4ea8fceaf53..3c2e472785c 100644
--- a/src/python/pants/backend/native/subsystems/xcode_cli_tools.py
+++ b/src/python/pants/backend/native/subsystems/xcode_cli_tools.py
@@ -134,14 +134,15 @@ def assembler(self):
return Assembler(
path_entries=self.path_entries(),
exe_filename='as',
- library_dirs=[])
+ runtime_library_dirs=[],
+ extra_args=[])
@memoized_method
def linker(self):
return Linker(
path_entries=self.path_entries(),
exe_filename='ld',
- library_dirs=[],
+ runtime_library_dirs=[],
linking_library_dirs=[],
extra_args=[MIN_OSX_VERSION_ARG],
extra_object_files=[],
@@ -152,7 +153,7 @@ def c_compiler(self):
return CCompiler(
path_entries=self.path_entries(),
exe_filename='clang',
- library_dirs=self.lib_dirs(),
+ runtime_library_dirs=self.lib_dirs(),
include_dirs=self.include_dirs(),
extra_args=[MIN_OSX_VERSION_ARG])
@@ -161,7 +162,7 @@ def cpp_compiler(self):
return CppCompiler(
path_entries=self.path_entries(),
exe_filename='clang++',
- library_dirs=self.lib_dirs(),
+ runtime_library_dirs=self.lib_dirs(),
include_dirs=self.include_dirs(include_cpp_inc=True),
extra_args=[MIN_OSX_VERSION_ARG])
diff --git a/src/python/pants/backend/native/targets/native_artifact.py b/src/python/pants/backend/native/targets/native_artifact.py
index dc8461d642c..b6ba3bb132b 100644
--- a/src/python/pants/backend/native/targets/native_artifact.py
+++ b/src/python/pants/backend/native/targets/native_artifact.py
@@ -22,9 +22,9 @@ def alias(cls):
def as_shared_lib(self, platform):
# TODO: check that the name conforms to some format in the constructor (e.g. no dots?).
- return platform.resolve_platform_specific({
- 'darwin': lambda: 'lib{}.dylib'.format(self.lib_name),
- 'linux': lambda: 'lib{}.so'.format(self.lib_name),
+ return platform.resolve_for_enum_variant({
+ 'darwin': 'lib{}.dylib'.format(self.lib_name),
+ 'linux': 'lib{}.so'.format(self.lib_name),
})
def _compute_fingerprint(self):
diff --git a/src/python/pants/backend/native/tasks/conan_fetch.py b/src/python/pants/backend/native/tasks/conan_fetch.py
index 6ffa7fe4416..5f9eb11a14f 100644
--- a/src/python/pants/backend/native/tasks/conan_fetch.py
+++ b/src/python/pants/backend/native/tasks/conan_fetch.py
@@ -124,9 +124,9 @@ def _conan_user_home(self, conan, in_workdir=False):
@memoized_property
def _conan_os_name(self):
- return Platform.create().resolve_platform_specific({
- 'darwin': lambda: 'Macos',
- 'linux': lambda: 'Linux',
+ return Platform.create().resolve_for_enum_variant({
+ 'darwin': 'Macos',
+ 'linux': 'Linux',
})
@property
diff --git a/src/python/pants/backend/native/tasks/link_shared_libraries.py b/src/python/pants/backend/native/tasks/link_shared_libraries.py
index 4f3efc0b69f..913fa9f334a 100644
--- a/src/python/pants/backend/native/tasks/link_shared_libraries.py
+++ b/src/python/pants/backend/native/tasks/link_shared_libraries.py
@@ -142,11 +142,6 @@ def _make_link_request(self, vt, compiled_objects_product):
return link_request
- _SHARED_CMDLINE_ARGS = {
- 'darwin': lambda: ['-Wl,-dylib'],
- 'linux': lambda: ['-shared'],
- }
-
def _execute_link_request(self, link_request):
object_files = link_request.object_files
@@ -163,7 +158,10 @@ def _execute_link_request(self, link_request):
self.context.log.debug("resulting_shared_lib_path: {}".format(resulting_shared_lib_path))
# We are executing in the results_dir, so get absolute paths for everything.
cmd = ([linker.exe_filename] +
- self.platform.resolve_platform_specific(self._SHARED_CMDLINE_ARGS) +
+ self.platform.resolve_for_enum_variant({
+ 'darwin': ['-Wl,-dylib'],
+ 'linux': ['-shared'],
+ }) +
linker.extra_args +
['-o', os.path.abspath(resulting_shared_lib_path)] +
['-L{}'.format(lib_dir) for lib_dir in link_request.external_lib_dirs] +
@@ -173,7 +171,7 @@ def _execute_link_request(self, link_request):
self.context.log.info("selected linker exe name: '{}'".format(linker.exe_filename))
self.context.log.debug("linker argv: {}".format(cmd))
- env = linker.as_invocation_environment_dict
+ env = linker.invocation_environment_dict
self.context.log.debug("linker invocation environment: {}".format(env))
with self.context.new_workunit(name='link-shared-libraries',
diff --git a/src/python/pants/backend/native/tasks/native_compile.py b/src/python/pants/backend/native/tasks/native_compile.py
index 8533b9a9567..1c313e2e3ca 100644
--- a/src/python/pants/backend/native/tasks/native_compile.py
+++ b/src/python/pants/backend/native/tasks/native_compile.py
@@ -8,19 +8,18 @@
from abc import abstractmethod
from collections import defaultdict
-from pants.backend.native.config.environment import Executable
from pants.backend.native.tasks.native_task import NativeTask
from pants.base.build_environment import get_buildroot
from pants.base.exceptions import TaskError
from pants.base.workunit import WorkUnit, WorkUnitLabel
from pants.util.memo import memoized_method, memoized_property
from pants.util.meta import AbstractClass, classproperty
-from pants.util.objects import SubclassesOf, datatype
+from pants.util.objects import datatype
from pants.util.process_handler import subprocess
class NativeCompileRequest(datatype([
- ('compiler', SubclassesOf(Executable)),
+ 'compiler',
# TODO: add type checking for Collection.of()!
'include_dirs',
'sources',
@@ -134,11 +133,11 @@ def _compile_settings(self):
@abstractmethod
def get_compiler(self, native_library_target):
- """An instance of `Executable` which can be invoked to compile files.
+ """An instance of `_CompilerMixin` which can be invoked to compile files.
NB: Subclasses will be queried for the compiler instance once and the result cached.
- :return: :class:`pants.backend.native.config.environment.Executable`
+ :return: :class:`pants.backend.native.config.environment._CompilerMixin`
"""
def _compiler(self, native_library_target):
@@ -229,7 +228,7 @@ def _compile(self, compile_request):
compiler = compile_request.compiler
output_dir = compile_request.output_dir
- env = compiler.as_invocation_environment_dict
+ env = compiler.invocation_environment_dict
with self.context.new_workunit(
name=self.workunit_label, labels=[WorkUnitLabel.COMPILER]) as workunit:
diff --git a/src/python/pants/backend/python/interpreter_cache.py b/src/python/pants/backend/python/interpreter_cache.py
index e0126f74e13..95908ddbd75 100644
--- a/src/python/pants/backend/python/interpreter_cache.py
+++ b/src/python/pants/backend/python/interpreter_cache.py
@@ -115,9 +115,13 @@ def select_interpreter_for_targets(self, targets):
# Return the lowest compatible interpreter.
return min(allowed_interpreters)
- def _interpreter_from_path(self, path, filters=()):
+ def _interpreter_from_relpath(self, path, filters=()):
+ path = os.path.join(self._cache_dir, path)
try:
executable = os.readlink(os.path.join(path, 'python'))
+ if not os.path.exists(executable):
+ self._purge_interpreter(path)
+ return None
except OSError:
return None
interpreter = PythonInterpreter.from_binary(executable, include_site_extras=False)
@@ -125,7 +129,8 @@ def _interpreter_from_path(self, path, filters=()):
return self._resolve(interpreter)
return None
- def _setup_interpreter(self, interpreter, cache_target_path):
+ def _setup_interpreter(self, interpreter, identity_str):
+ cache_target_path = os.path.join(self._cache_dir, identity_str)
with safe_concurrent_creation(cache_target_path) as safe_path:
os.mkdir(safe_path) # Parent will already have been created by safe_concurrent_creation.
os.symlink(interpreter.binary, os.path.join(safe_path, 'python'))
@@ -134,22 +139,19 @@ def _setup_interpreter(self, interpreter, cache_target_path):
def _setup_cached(self, filters=()):
"""Find all currently-cached interpreters."""
for interpreter_dir in os.listdir(self._cache_dir):
- path = os.path.join(self._cache_dir, interpreter_dir)
- if os.path.isdir(path):
- pi = self._interpreter_from_path(path, filters=filters)
- if pi:
- logger.debug('Detected interpreter {}: {}'.format(pi.binary, str(pi.identity)))
- yield pi
+ pi = self._interpreter_from_relpath(interpreter_dir, filters=filters)
+ if pi:
+ logger.debug('Detected interpreter {}: {}'.format(pi.binary, str(pi.identity)))
+ yield pi
def _setup_paths(self, paths, filters=()):
"""Find interpreters under paths, and cache them."""
for interpreter in self._matching(PythonInterpreter.all(paths), filters=filters):
identity_str = str(interpreter.identity)
- cache_path = os.path.join(self._cache_dir, identity_str)
- pi = self._interpreter_from_path(cache_path, filters=filters)
+ pi = self._interpreter_from_relpath(identity_str, filters=filters)
if pi is None:
- self._setup_interpreter(interpreter, cache_path)
- pi = self._interpreter_from_path(cache_path, filters=filters)
+ self._setup_interpreter(interpreter, identity_str)
+ pi = self._interpreter_from_relpath(identity_str, filters=filters)
if pi:
yield pi
@@ -251,3 +253,14 @@ def _resolve_and_link(self, interpreter, requirement, target_link):
_safe_link(target_location, target_link)
logger.debug(' installed {}'.format(target_location))
return Package.from_href(target_location)
+
+ def _purge_interpreter(self, interpreter_dir):
+ try:
+ logger.info('Detected stale interpreter `{}` in the interpreter cache, purging.'
+ .format(interpreter_dir))
+ shutil.rmtree(interpreter_dir, ignore_errors=True)
+ except Exception as e:
+ logger.warn(
+ 'Caught exception {!r} during interpreter purge. Please run `./pants clean-all`!'
+ .format(e)
+ )
diff --git a/src/python/pants/backend/python/rules/python_test_runner.py b/src/python/pants/backend/python/rules/python_test_runner.py
index 70c6457f77c..26ac23c39e5 100644
--- a/src/python/pants/backend/python/rules/python_test_runner.py
+++ b/src/python/pants/backend/python/rules/python_test_runner.py
@@ -64,7 +64,7 @@ def run_python_test(transitive_hydrated_target, pytest):
# pex27, where it should be hermetically provided in some way.
output_pytest_requirements_pex_filename = 'pytest-with-requirements.pex'
requirements_pex_argv = [
- './{}'.format(pex_snapshot.files[0].path),
+ './{}'.format(pex_snapshot.files[0]),
'--python', python_binary,
'-e', 'pytest:main',
'-o', output_pytest_requirements_pex_filename,
diff --git a/src/python/pants/backend/python/subsystems/pex_build_util.py b/src/python/pants/backend/python/subsystems/pex_build_util.py
index 1e2140f3794..417875eaeea 100644
--- a/src/python/pants/backend/python/subsystems/pex_build_util.py
+++ b/src/python/pants/backend/python/subsystems/pex_build_util.py
@@ -7,6 +7,7 @@
import logging
import os
from builtins import str
+from collections import defaultdict
from future.utils import PY2
from pex.fetcher import Fetcher
@@ -50,6 +51,19 @@ def has_python_requirements(tgt):
return isinstance(tgt, PythonRequirementLibrary)
+def can_have_python_platform(tgt):
+ return isinstance(tgt, (PythonBinary, PythonDistribution))
+
+
+def targets_by_platform(targets, python_setup):
+ d = defaultdict(OrderedSet)
+ for target in targets:
+ if can_have_python_platform(target):
+ for platform in target.platforms if target.platforms else python_setup.platforms:
+ d[platform].add(target)
+ return d
+
+
def _create_source_dumper(builder, tgt):
if type(tgt) == Files:
# Loose `Files` as opposed to `Resources` or `PythonTarget`s have no (implied) package structure
@@ -241,8 +255,9 @@ def add_interpreter_constraints_from(self, constraint_tgts):
# TODO this would be a great place to validate the constraints and present a good error message
# if they are incompatible because all the sources of the constraints are available.
# See: https://github.com/pantsbuild/pex/blob/584b6e367939d24bc28aa9fa36eb911c8297dac8/pex/interpreter_constraints.py
- for tgt in constraint_tgts:
- for constraint in tgt.compatibility:
+ constraint_tuples = {self._python_setup_subsystem.compatibility_or_constraints(tgt) for tgt in constraint_tgts}
+ for constraint_tuple in constraint_tuples:
+ for constraint in constraint_tuple:
self.add_interpreter_constraint(constraint)
def add_direct_requirements(self, reqs):
diff --git a/src/python/pants/backend/python/subsystems/pytest.py b/src/python/pants/backend/python/subsystems/pytest.py
index e3eb7f9e1b6..cbf53f953dd 100644
--- a/src/python/pants/backend/python/subsystems/pytest.py
+++ b/src/python/pants/backend/python/subsystems/pytest.py
@@ -14,7 +14,9 @@ class PyTest(Subsystem):
def register_options(cls, register):
super(PyTest, cls).register_options(register)
# TODO: This is currently bounded below `3.7` due to #6282.
- register('--requirements', advanced=True, default='pytest>=3.0.7,<3.7',
+ # TODO: Additionally, this is temporarily pinned to 3.0.7 due to more-itertools 6.0.0 dropping
+ # Python 2 support: https://github.com/pytest-dev/pytest/issues/4770.
+ register('--requirements', advanced=True, default='pytest==3.0.7',
help='Requirements string for the pytest library.')
register('--timeout-requirements', advanced=True, default='pytest-timeout>=1.2,<1.3',
help='Requirements string for the pytest-timeout library.')
diff --git a/src/python/pants/backend/python/subsystems/python_native_code.py b/src/python/pants/backend/python/subsystems/python_native_code.py
index 1b7dc702905..7c8bde33385 100644
--- a/src/python/pants/backend/python/subsystems/python_native_code.py
+++ b/src/python/pants/backend/python/subsystems/python_native_code.py
@@ -5,13 +5,12 @@
from __future__ import absolute_import, division, print_function, unicode_literals
from builtins import str
-from collections import defaultdict
from pants.backend.native.subsystems.native_toolchain import NativeToolchain
from pants.backend.native.targets.native_library import NativeLibrary
from pants.backend.python.python_requirement import PythonRequirement
+from pants.backend.python.subsystems import pex_build_util
from pants.backend.python.subsystems.python_setup import PythonSetup
-from pants.backend.python.targets.python_binary import PythonBinary
from pants.backend.python.targets.python_distribution import PythonDistribution
from pants.base.exceptions import IncompatiblePlatformsError
from pants.binaries.executable_pex_tool import ExecutablePexTool
@@ -75,7 +74,7 @@ def _any_targets_have_native_sources(self, targets):
return True
return False
- def get_targets_by_declared_platform(self, targets):
+ def _get_targets_by_declared_platform_with_placeholders(self, targets_by_platform):
"""
Aggregates a dict that maps a platform string to a list of targets that specify the platform.
If no targets have platforms arguments, return a dict containing platforms inherited from
@@ -84,19 +83,12 @@ def get_targets_by_declared_platform(self, targets):
:param tgts: a list of :class:`Target` objects.
:returns: a dict mapping a platform string to a list of targets that specify the platform.
"""
- targets_by_platforms = defaultdict(list)
- for tgt in targets:
- for platform in tgt.platforms:
- targets_by_platforms[platform].append(tgt)
-
- if not targets_by_platforms:
+ if not targets_by_platform:
for platform in self._python_setup.platforms:
- targets_by_platforms[platform] = ['(No target) Platform inherited from either the '
+ targets_by_platform[platform] = ['(No target) Platform inherited from either the '
'--platforms option or a pants.ini file.']
- return targets_by_platforms
-
- _PYTHON_PLATFORM_TARGETS_CONSTRAINT = SubclassesOf(PythonBinary, PythonDistribution)
+ return targets_by_platform
def check_build_for_current_platform_only(self, targets):
"""
@@ -110,9 +102,8 @@ def check_build_for_current_platform_only(self, targets):
if not self._any_targets_have_native_sources(targets):
return False
- targets_with_platforms = [target for target in targets
- if self._PYTHON_PLATFORM_TARGETS_CONSTRAINT.satisfied_by(target)]
- platforms_with_sources = self.get_targets_by_declared_platform(targets_with_platforms)
+ targets_by_platform = pex_build_util.targets_by_platform(targets, self._python_setup)
+ platforms_with_sources = self._get_targets_by_declared_platform_with_placeholders(targets_by_platform)
platform_names = list(platforms_with_sources.keys())
if len(platform_names) < 1:
diff --git a/src/python/pants/backend/python/subsystems/python_tool_base.py b/src/python/pants/backend/python/subsystems/python_tool_base.py
index 149f671a82c..54b6495e1b0 100644
--- a/src/python/pants/backend/python/subsystems/python_tool_base.py
+++ b/src/python/pants/backend/python/subsystems/python_tool_base.py
@@ -13,10 +13,16 @@ class PythonToolBase(Subsystem):
# Subclasses must set.
default_requirements = None
default_entry_point = None
+ # Subclasses need not override.
+ default_interpreter_constraints = []
@classmethod
def register_options(cls, register):
super(PythonToolBase, cls).register_options(register)
+ register('--interpreter-constraints', type=list, advanced=True, fingerprint=True,
+ default=cls.default_interpreter_constraints,
+ help='Python interpreter constraints for this tool. An empty list uses the default '
+ 'interpreter constraints for the repo.')
register('--requirements', type=list, advanced=True, fingerprint=True,
default=cls.default_requirements,
help='Python requirement strings for the tool.')
@@ -24,6 +30,9 @@ def register_options(cls, register):
default=cls.default_entry_point,
help='The main module for the tool.')
+ def get_interpreter_constraints(self):
+ return self.get_options().interpreter_constraints
+
def get_requirement_specs(self):
return self.get_options().requirements
diff --git a/src/python/pants/backend/python/tasks/python_binary_create.py b/src/python/pants/backend/python/tasks/python_binary_create.py
index 84debe8333a..b4abb43d5e7 100644
--- a/src/python/pants/backend/python/tasks/python_binary_create.py
+++ b/src/python/pants/backend/python/tasks/python_binary_create.py
@@ -140,7 +140,7 @@ def _create_binary(self, binary_tgt, results_dir):
if is_python_target(tgt):
constraint_tgts.append(tgt)
- # Add target's interpreter compatibility constraints to pex info.
+ # Add target-level and possibly global interpreter compatibility constraints to pex info.
pex_builder.add_interpreter_constraints_from(constraint_tgts)
# Dump everything into the builder's chroot.
diff --git a/src/python/pants/backend/python/tasks/python_tool_prep_base.py b/src/python/pants/backend/python/tasks/python_tool_prep_base.py
index 7f0de9db4d6..203da570afd 100644
--- a/src/python/pants/backend/python/tasks/python_tool_prep_base.py
+++ b/src/python/pants/backend/python/tasks/python_tool_prep_base.py
@@ -5,6 +5,7 @@
from __future__ import absolute_import, division, print_function, unicode_literals
import os
+from builtins import str
from contextlib import contextmanager
from pex.pex import PEX
@@ -13,6 +14,8 @@
from pants.backend.python.interpreter_cache import PythonInterpreterCache
from pants.backend.python.python_requirement import PythonRequirement
from pants.backend.python.subsystems.pex_build_util import PexBuilderWrapper
+from pants.base.build_environment import get_pants_cachedir
+from pants.base.hash_utils import stable_json_sha1
from pants.base.workunit import WorkUnitLabel
from pants.task.task import Task
from pants.util.dirutil import safe_concurrent_creation
@@ -23,11 +26,16 @@
class PythonToolInstance(object):
def __init__(self, pex_path, interpreter):
self._pex = PEX(pex_path, interpreter=interpreter)
+ self._interpreter = interpreter
@property
def pex(self):
return self._pex
+ @property
+ def interpreter(self):
+ return self._interpreter
+
def _pretty_cmdline(self, args):
return safe_shlex_join(self._pex.cmdline(args))
@@ -63,6 +71,12 @@ def run(self, *args, **kwargs):
return cmdline, exit_code
+# TODO: This python tool setup ends up eagerly generating each pex for each task in every goal which
+# is transitively required by the command-line goals, even for tasks which no-op. This requires each
+# pex for each relevant python tool to be buildable on the current host, even if it may never be
+# intended to be invoked. Especially given the existing clear separation of concerns into
+# PythonToolBase/PythonToolInstance/PythonToolPrepBase, this seems like an extremely ripe use case
+# for some v2 rules for free caching and no-op when not required for the command-line goals.
class PythonToolPrepBase(Task):
"""Base class for tasks that resolve a python tool to be invoked out-of-process."""
@@ -97,16 +111,30 @@ def _build_tool_pex(self, tool_subsystem, interpreter, pex_path):
pex_builder.set_entry_point(tool_subsystem.get_entry_point())
pex_builder.freeze()
+ def _generate_fingerprinted_pex_path(self, tool_subsystem, interpreter):
+ # `tool_subsystem.get_requirement_specs()` is a list, but order shouldn't actually matter. This
+ # should probably be sorted, but it's possible a user could intentionally tweak order to work
+ # around a particular requirement resolution resolve-order issue. In practice the lists are
+ # expected to be mostly static, so we accept the risk of too-fine-grained caching creating lots
+ # of pexes in the cache dir.
+ specs_fingerprint = stable_json_sha1(tool_subsystem.get_requirement_specs())
+ return os.path.join(
+ get_pants_cachedir(),
+ 'python',
+ str(interpreter.identity),
+ self.fingerprint,
+ '{}-{}.pex'.format(tool_subsystem.options_scope, specs_fingerprint),
+ )
+
def execute(self):
tool_subsystem = self.tool_subsystem_cls.scoped_instance(self)
- pex_name = tool_subsystem.options_scope
- pex_path = os.path.join(self.workdir, self.fingerprint, '{}.pex'.format(pex_name))
interpreter_cache = PythonInterpreterCache.global_instance()
- interpreter = interpreter_cache.select_interpreter_for_targets([])
+ interpreter = min(interpreter_cache.setup(filters=tool_subsystem.get_interpreter_constraints()))
+ pex_path = self._generate_fingerprinted_pex_path(tool_subsystem, interpreter)
if not os.path.exists(pex_path):
- with self.context.new_workunit(name='create-{}-pex'.format(pex_name),
+ with self.context.new_workunit(name='create-{}-pex'.format(tool_subsystem.options_scope),
labels=[WorkUnitLabel.PREP]):
self._build_tool_pex(tool_subsystem=tool_subsystem,
interpreter=interpreter,
diff --git a/src/python/pants/backend/python/tasks/resolve_requirements_task_base.py b/src/python/pants/backend/python/tasks/resolve_requirements_task_base.py
index 74598a77b34..29c665c3c9f 100644
--- a/src/python/pants/backend/python/tasks/resolve_requirements_task_base.py
+++ b/src/python/pants/backend/python/tasks/resolve_requirements_task_base.py
@@ -13,8 +13,10 @@
from pex.pex_builder import PEXBuilder
from pants.backend.python.python_requirement import PythonRequirement
+from pants.backend.python.subsystems import pex_build_util
from pants.backend.python.subsystems.pex_build_util import PexBuilderWrapper
from pants.backend.python.subsystems.python_native_code import PythonNativeCode
+from pants.backend.python.subsystems.python_setup import PythonSetup
from pants.backend.python.targets.python_requirement_library import PythonRequirementLibrary
from pants.base.hash_utils import hash_all
from pants.invalidation.cache_manager import VersionedTargetSet
@@ -36,12 +38,17 @@ def subsystem_dependencies(cls):
return super(ResolveRequirementsTaskBase, cls).subsystem_dependencies() + (
PexBuilderWrapper.Factory,
PythonNativeCode.scoped(cls),
+ PythonSetup.scoped(cls),
)
@memoized_property
def _python_native_code_settings(self):
return PythonNativeCode.scoped_instance(self)
+ @memoized_property
+ def _python_setup(self):
+ return PythonSetup.global_instance()
+
@classmethod
def prepare(cls, options, round_manager):
super(ResolveRequirementsTaskBase, cls).prepare(options, round_manager)
@@ -70,11 +77,11 @@ def resolve_requirements(self, interpreter, req_libs):
# We need to ensure that we are resolving for only the current platform if we are
# including local python dist targets that have native extensions.
- tgts = self.context.targets()
- if self._python_native_code_settings.check_build_for_current_platform_only(tgts):
- maybe_platforms = ['current']
+ targets_by_platform = pex_build_util.targets_by_platform(self.context.targets(), self._python_setup)
+ if self._python_native_code_settings.check_build_for_current_platform_only(targets_by_platform):
+ platforms = ['current']
else:
- maybe_platforms = None
+ platforms = list(sorted(targets_by_platform.keys()))
path = os.path.realpath(os.path.join(self.workdir, str(interpreter.identity), target_set_id))
# Note that we check for the existence of the directory, instead of for invalid_vts,
@@ -84,7 +91,7 @@ def resolve_requirements(self, interpreter, req_libs):
pex_builder = PexBuilderWrapper.Factory.create(
builder=PEXBuilder(path=safe_path, interpreter=interpreter, copy=True),
log=self.context.log)
- pex_builder.add_requirement_libs_from(req_libs, platforms=maybe_platforms)
+ pex_builder.add_requirement_libs_from(req_libs, platforms=platforms)
pex_builder.freeze()
return PEX(path, interpreter=interpreter)
diff --git a/src/python/pants/backend/python/tasks/select_interpreter.py b/src/python/pants/backend/python/tasks/select_interpreter.py
index 8c0905a90e2..24a06809f47 100644
--- a/src/python/pants/backend/python/tasks/select_interpreter.py
+++ b/src/python/pants/backend/python/tasks/select_interpreter.py
@@ -78,6 +78,9 @@ def execute(self):
interpreter_path_file = self._interpreter_path_file(target_set_id)
if not os.path.exists(interpreter_path_file):
self._create_interpreter_path_file(interpreter_path_file, python_tgts)
+ else:
+ if self._detect_and_purge_invalid_interpreter(interpreter_path_file):
+ self._create_interpreter_path_file(interpreter_path_file, python_tgts)
interpreter = self._get_interpreter(interpreter_path_file)
self.context.products.register_data(PythonInterpreter, interpreter)
@@ -95,6 +98,15 @@ def _create_interpreter_path_file(self, interpreter_path_file, targets):
def _interpreter_path_file(self, target_set_id):
return os.path.join(self.workdir, target_set_id, 'interpreter.info')
+ def _detect_and_purge_invalid_interpreter(self, interpreter_path_file):
+ interpreter = self._get_interpreter(interpreter_path_file)
+ if not os.path.exists(interpreter.binary):
+ self.context.log.info('Stale interpreter reference detected: {}, removing reference and '
+ 'selecting a new interpreter.'.format(interpreter.binary))
+ os.remove(interpreter_path_file)
+ return True
+ return False
+
@staticmethod
def _get_interpreter(interpreter_path_file):
with open(interpreter_path_file, 'r') as infile:
diff --git a/src/python/pants/backend/python/tasks/unpack_wheels.py b/src/python/pants/backend/python/tasks/unpack_wheels.py
index 632395e0cd3..e66483a584e 100644
--- a/src/python/pants/backend/python/tasks/unpack_wheels.py
+++ b/src/python/pants/backend/python/tasks/unpack_wheels.py
@@ -105,9 +105,9 @@ def _name_and_platform(whl):
@memoized_classproperty
def _current_platform_abbreviation(cls):
- return NativeBackendPlatform.create().resolve_platform_specific({
- 'darwin': lambda: 'macosx',
- 'linux': lambda: 'linux',
+ return NativeBackendPlatform.create().resolve_for_enum_variant({
+ 'darwin': 'macosx',
+ 'linux': 'linux',
})
@classmethod
diff --git a/src/python/pants/bin/daemon_pants_runner.py b/src/python/pants/bin/daemon_pants_runner.py
index 02055340ff7..ff0da1ed964 100644
--- a/src/python/pants/bin/daemon_pants_runner.py
+++ b/src/python/pants/bin/daemon_pants_runner.py
@@ -231,18 +231,16 @@ def nailgunned_stdio(cls, sock, env, handle_stdin=True):
) as finalizer:
yield finalizer
+ # TODO: there's no testing for this method, and this caused a user-visible failure -- see #7008!
def _raise_deferred_exc(self):
"""Raises deferred exceptions from the daemon's synchronous path in the post-fork client."""
if self._deferred_exception:
- exc_type, exc_value, exc_traceback = self._deferred_exception
- if exc_type == GracefulTerminationException:
- self._exiter.exit(exc_value.exit_code)
try:
- # Expect `_deferred_exception` to be a 3-item tuple of the values returned by sys.exc_info().
- # This permits use the 3-arg form of the `raise` statement to preserve the original traceback.
- raise_with_traceback(exc_type(exc_value), exc_traceback)
- except ValueError:
- # If `_deferred_exception` isn't a 3-item tuple, treat it like a bare exception.
+ exc_type, exc_value, exc_traceback = self._deferred_exception
+ raise_with_traceback(exc_value, exc_traceback)
+ except TypeError:
+ # If `_deferred_exception` isn't a 3-item tuple (raising a TypeError on the above
+ # destructuring), treat it like a bare exception.
raise self._deferred_exception
def _maybe_get_client_start_time_from_env(self, env):
@@ -327,6 +325,10 @@ def post_fork_child(self):
runner.run()
except KeyboardInterrupt:
self._exiter.exit_and_fail('Interrupted by user.\n')
+ except GracefulTerminationException as e:
+ ExceptionSink.log_exception(
+ 'Encountered graceful termination exception {}; exiting'.format(e))
+ self._exiter.exit(e.exit_code)
except Exception:
ExceptionSink._log_unhandled_exception_and_exit()
else:
diff --git a/src/python/pants/build_graph/build_graph.py b/src/python/pants/build_graph/build_graph.py
index cca13ecd14f..4a6af6e05d5 100644
--- a/src/python/pants/build_graph/build_graph.py
+++ b/src/python/pants/build_graph/build_graph.py
@@ -448,7 +448,7 @@ def _walk_rec(addr):
_walk_rec(address)
def transitive_dependees_of_addresses(self, addresses, predicate=None, postorder=False):
- """Returns all transitive dependees of `address`.
+ """Returns all transitive dependees of `addresses`.
Note that this uses `walk_transitive_dependee_graph` and the predicate is passed through,
hence it trims graphs rather than just filtering out Targets that do not match the predicate.
diff --git a/src/python/pants/build_graph/target_filter_subsystem.py b/src/python/pants/build_graph/target_filter_subsystem.py
new file mode 100644
index 00000000000..ae53b7cd743
--- /dev/null
+++ b/src/python/pants/build_graph/target_filter_subsystem.py
@@ -0,0 +1,44 @@
+# coding=utf-8
+# Copyright 2019 Pants project contributors (see CONTRIBUTORS.md).
+# Licensed under the Apache License, Version 2.0 (see LICENSE).
+
+from __future__ import absolute_import, division, print_function, unicode_literals
+
+import logging
+from builtins import object, set
+
+from pants.subsystem.subsystem import Subsystem
+
+
+logger = logging.getLogger(__name__)
+
+
+class TargetFilter(Subsystem):
+ """Filter targets matching configured criteria.
+
+ :API: public
+ """
+
+ options_scope = 'target-filter'
+
+ @classmethod
+ def register_options(cls, register):
+ super(TargetFilter, cls).register_options(register)
+
+ register('--exclude-tags', type=list,
+ default=[], fingerprint=True,
+ help='Skip targets with given tag(s).')
+
+ def apply(self, targets):
+ exclude_tags = set(self.get_options().exclude_tags)
+ return TargetFiltering(exclude_tags).apply_tag_blacklist(targets)
+
+
+class TargetFiltering(object):
+ """Apply filtering logic against targets."""
+
+ def __init__(self, exclude_tags):
+ self.exclude_tags = exclude_tags
+
+ def apply_tag_blacklist(self, targets):
+ return [t for t in targets if not self.exclude_tags.intersection(t.tags)]
diff --git a/testprojects/src/resources/org/pantsbuild/testproject/ordering/a b/src/python/pants/dummy.c
similarity index 100%
rename from testprojects/src/resources/org/pantsbuild/testproject/ordering/a
rename to src/python/pants/dummy.c
diff --git a/src/python/pants/engine/BUILD b/src/python/pants/engine/BUILD
index 168201a5719..cab6d0cbcee 100644
--- a/src/python/pants/engine/BUILD
+++ b/src/python/pants/engine/BUILD
@@ -50,6 +50,7 @@ python_library(
dependencies=[
'3rdparty/python/twitter/commons:twitter.common.collections',
'3rdparty/python:future',
+ ':objects',
':rules',
':selectors',
'src/python/pants/base:project_tree',
@@ -121,7 +122,10 @@ python_library(
name='objects',
sources=['objects.py'],
dependencies=[
+ '3rdparty/python:future',
'src/python/pants/util:meta',
+ 'src/python/pants/util:memo',
+ 'src/python/pants/util:objects',
]
)
@@ -172,6 +176,7 @@ python_library(
':isolated_process',
':native',
':nodes',
+ ':objects',
':rules',
'src/python/pants/base:exceptions',
'src/python/pants/base:specs',
diff --git a/src/python/pants/engine/addressable.py b/src/python/pants/engine/addressable.py
index 508a9471c61..25f4233c701 100644
--- a/src/python/pants/engine/addressable.py
+++ b/src/python/pants/engine/addressable.py
@@ -11,9 +11,9 @@
from future.utils import string_types
from pants.build_graph.address import Address, BuildFileAddress
-from pants.engine.objects import Resolvable, Serializable
+from pants.engine.objects import Collection, Resolvable, Serializable
from pants.util.collections_abc_backport import MutableMapping, MutableSequence
-from pants.util.objects import Collection, TypeConstraintError
+from pants.util.objects import TypeConstraintError
Addresses = Collection.of(Address)
diff --git a/src/python/pants/engine/build_files.py b/src/python/pants/engine/build_files.py
index ee65bd92119..84430073f35 100644
--- a/src/python/pants/engine/build_files.py
+++ b/src/python/pants/engine/build_files.py
@@ -219,7 +219,7 @@ def addresses_from_address_families(address_mapper, specs):
"""
# Capture a Snapshot covering all paths for these Specs, then group by directory.
snapshot = yield Get(Snapshot, PathGlobs, _spec_to_globs(address_mapper, specs))
- dirnames = {dirname(f.stat.path) for f in snapshot.files}
+ dirnames = {dirname(f) for f in snapshot.files}
address_families = yield [Get(AddressFamily, Dir(d)) for d in dirnames]
address_family_by_directory = {af.namespace: af for af in address_families}
diff --git a/src/python/pants/engine/fs.py b/src/python/pants/engine/fs.py
index c43b41cf858..8d009f01c06 100644
--- a/src/python/pants/engine/fs.py
+++ b/src/python/pants/engine/fs.py
@@ -4,13 +4,16 @@
from __future__ import absolute_import, division, print_function, unicode_literals
+import os
+
from future.utils import binary_type, text_type
-from pants.base.project_tree import Dir, File
+from pants.engine.objects import Collection
from pants.engine.rules import RootRule
from pants.option.custom_types import GlobExpansionConjunction
from pants.option.global_options import GlobMatchErrorBehavior
-from pants.util.objects import Collection, datatype
+from pants.util.dirutil import maybe_read_file, safe_delete, safe_file_dump
+from pants.util.objects import Exactly, datatype
class FileContent(datatype([('path', text_type), ('content', binary_type)])):
@@ -56,12 +59,9 @@ def __new__(cls, include, exclude=(), glob_match_error_behavior=None, conjunctio
cls,
include=tuple(include),
exclude=tuple(exclude),
- glob_match_error_behavior=GlobMatchErrorBehavior.create(glob_match_error_behavior),
- conjunction=GlobExpansionConjunction.create(conjunction))
-
-
-class PathGlobsAndRoot(datatype([('path_globs', PathGlobs), ('root', text_type)])):
- pass
+ glob_match_error_behavior=GlobMatchErrorBehavior.create(glob_match_error_behavior,
+ none_is_default=True),
+ conjunction=GlobExpansionConjunction.create(conjunction, none_is_default=True))
class Digest(datatype([('fingerprint', text_type), ('serialized_bytes_length', int)])):
@@ -82,6 +82,33 @@ class Digest(datatype([('fingerprint', text_type), ('serialized_bytes_length', i
https://github.com/pantsbuild/pants/issues/5802
"""
+ @classmethod
+ def _path(cls, directory):
+ return '{}.digest'.format(directory.rstrip(os.sep))
+
+ @classmethod
+ def clear(cls, directory):
+ """Clear any existing Digest file adjacent to the given directory."""
+ safe_delete(cls._path(directory))
+
+ @classmethod
+ def load(cls, directory):
+ """Load a Digest from a `.digest` file adjacent to the given directory.
+
+ :return: A Digest, or None if the Digest did not exist.
+ """
+ read_file = maybe_read_file(cls._path(directory), binary_mode=False)
+ if read_file:
+ fingerprint, length = read_file.split(':')
+ return Digest(fingerprint, int(length))
+ else:
+ return None
+
+ def dump(self, directory):
+ """Dump this Digest object adjacent to the given directory."""
+ payload = '{}:{}'.format(self.fingerprint, self.serialized_bytes_length)
+ safe_file_dump(self._path(directory), payload=payload, mode='w')
+
def __repr__(self):
return '''Digest(fingerprint={}, serialized_bytes_length={})'''.format(
self.fingerprint,
@@ -92,8 +119,25 @@ def __str__(self):
return repr(self)
-class Snapshot(datatype([('directory_digest', Digest), ('path_stats', tuple)])):
- """A Snapshot is a collection of Files and Dirs fingerprinted by their names/content.
+class PathGlobsAndRoot(datatype([
+ ('path_globs', PathGlobs),
+ ('root', text_type),
+ ('digest_hint', Exactly(Digest, type(None))),
+])):
+ """A set of PathGlobs to capture relative to some root (which may exist outside of the buildroot).
+
+ If the `digest_hint` is set, it must be the Digest that we would expect to get if we were to
+ expand and Digest the globs. The hint is an optimization that allows for bypassing filesystem
+ operations in cases where the expected Digest is known, and the content for the Digest is already
+ stored.
+ """
+
+ def __new__(cls, path_globs, root, digest_hint=None):
+ return super(PathGlobsAndRoot, cls).__new__(cls, path_globs, root, digest_hint)
+
+
+class Snapshot(datatype([('directory_digest', Digest), ('files', tuple), ('dirs', tuple)])):
+ """A Snapshot is a collection of file paths and dir paths fingerprinted by their names/content.
Snapshots are used to make it easier to isolate process execution by fixing the contents
of the files being operated on and easing their movement to and from isolated execution
@@ -104,22 +148,6 @@ class Snapshot(datatype([('directory_digest', Digest), ('path_stats', tuple)])):
def is_empty(self):
return self == EMPTY_SNAPSHOT
- @property
- def dirs(self):
- return [p for p in self.path_stats if type(p.stat) == Dir]
-
- @property
- def dir_stats(self):
- return [p.stat for p in self.dirs]
-
- @property
- def files(self):
- return [p for p in self.path_stats if type(p.stat) == File]
-
- @property
- def file_stats(self):
- return [p.stat for p in self.files]
-
class MergedDirectories(datatype([('directories', tuple)])):
pass
@@ -148,7 +176,8 @@ class UrlToFetch(datatype([('url', text_type), ('digest', Digest)])):
EMPTY_SNAPSHOT = Snapshot(
directory_digest=EMPTY_DIRECTORY_DIGEST,
- path_stats=(),
+ files=(),
+ dirs=()
)
diff --git a/src/python/pants/engine/legacy/BUILD b/src/python/pants/engine/legacy/BUILD
index cb8086944d8..8db00aa36d4 100644
--- a/src/python/pants/engine/legacy/BUILD
+++ b/src/python/pants/engine/legacy/BUILD
@@ -75,6 +75,7 @@ python_library(
'src/python/pants/build_graph',
'src/python/pants/engine:build_files',
'src/python/pants/engine:mapper',
+ 'src/python/pants/engine:objects',
'src/python/pants/engine:parser',
'src/python/pants/engine:selectors',
'src/python/pants/option',
diff --git a/src/python/pants/engine/legacy/graph.py b/src/python/pants/engine/legacy/graph.py
index 72368f1a6db..7bd0efbeaf8 100644
--- a/src/python/pants/engine/legacy/graph.py
+++ b/src/python/pants/engine/legacy/graph.py
@@ -26,13 +26,14 @@
from pants.engine.legacy.address_mapper import LegacyAddressMapper
from pants.engine.legacy.structs import BundleAdaptor, BundlesField, SourcesField, TargetAdaptor
from pants.engine.mapper import AddressMapper
+from pants.engine.objects import Collection
from pants.engine.parser import SymbolTable, TargetAdaptorContainer
from pants.engine.rules import RootRule, rule
from pants.engine.selectors import Get, Select
from pants.option.global_options import GlobMatchErrorBehavior
from pants.source.filespec import any_matches_filespec
from pants.source.wrapped_globs import EagerFilesetWithSpec, FilesetRelPathWrapper
-from pants.util.objects import Collection, datatype
+from pants.util.objects import datatype
logger = logging.getLogger(__name__)
diff --git a/src/python/pants/engine/native.py b/src/python/pants/engine/native.py
index 5026b055382..960e397621a 100644
--- a/src/python/pants/engine/native.py
+++ b/src/python/pants/engine/native.py
@@ -376,6 +376,12 @@ def extern_store_i64(self, context_handle, i64):
c = self._ffi.from_handle(context_handle)
return c.to_value(i64)
+ @_extern_decl('Handle', ['ExternContext*', 'double'])
+ def extern_store_f64(self, context_handle, f64):
+ """Given a context and double, return a new Handle to represent the double."""
+ c = self._ffi.from_handle(context_handle)
+ return c.to_value(f64)
+
@_extern_decl('Handle', ['ExternContext*', '_Bool'])
def extern_store_bool(self, context_handle, b):
"""Given a context and _Bool, return a new Handle to represent the _Bool."""
@@ -634,6 +640,7 @@ def init_externs():
self.ffi_lib.extern_store_bytes,
self.ffi_lib.extern_store_utf8,
self.ffi_lib.extern_store_i64,
+ self.ffi_lib.extern_store_f64,
self.ffi_lib.extern_store_bool,
self.ffi_lib.extern_project_ignoring_type,
self.ffi_lib.extern_project_multi,
@@ -690,10 +697,6 @@ def new_scheduler(self,
construct_snapshot,
construct_file_content,
construct_files_content,
- construct_path_stat,
- construct_dir,
- construct_file,
- construct_link,
construct_process_result,
constraint_address,
constraint_path_globs,
@@ -722,10 +725,6 @@ def tc(constraint):
func(construct_snapshot),
func(construct_file_content),
func(construct_files_content),
- func(construct_path_stat),
- func(construct_dir),
- func(construct_file),
- func(construct_link),
func(construct_process_result),
# TypeConstraints.
tc(constraint_address),
diff --git a/src/python/pants/engine/objects.py b/src/python/pants/engine/objects.py
index a0b0e784a7c..48b017d688d 100644
--- a/src/python/pants/engine/objects.py
+++ b/src/python/pants/engine/objects.py
@@ -5,10 +5,16 @@
from __future__ import absolute_import, division, print_function, unicode_literals
import inspect
+import sys
from abc import abstractmethod, abstractproperty
+from builtins import object
from collections import namedtuple
+from future.utils import PY2
+
+from pants.util.memo import memoized_classmethod
from pants.util.meta import AbstractClass
+from pants.util.objects import Exactly, TypedCollection, datatype
class SerializationError(Exception):
@@ -146,3 +152,38 @@ def validate(self):
:raises: :class:`ValidationError` if this object is invalid.
"""
+
+
+class Collection(object):
+ """Constructs classes representing collections of objects of a particular type.
+
+ The produced class will expose its values under a field named dependencies - this is a stable API
+ which may be consumed e.g. over FFI from the engine.
+
+ Python consumers of a Collection should prefer to use its standard iteration API.
+
+ Note that elements of a Collection are type-checked upon construction.
+ """
+
+ @memoized_classmethod
+ def of(cls, *element_types):
+ union = '|'.join(element_type.__name__ for element_type in element_types)
+ type_name = '{}.of({})'.format(cls.__name__, union)
+ if PY2:
+ type_name = type_name.encode('utf-8')
+ type_checked_collection_class = datatype([
+ # Create a datatype with a single field 'dependencies' which is type-checked on construction
+ # to be a collection containing elements of only the exact `element_types` specified.
+ ('dependencies', TypedCollection(Exactly(*element_types)))
+ ], superclass_name=cls.__name__)
+ supertypes = (cls, type_checked_collection_class)
+ properties = {'element_types': element_types}
+ collection_of_type = type(type_name, supertypes, properties)
+
+ # Expose the custom class type at the module level to be pickle compatible.
+ setattr(sys.modules[cls.__module__], type_name, collection_of_type)
+
+ return collection_of_type
+
+ def __iter__(self):
+ return iter(self.dependencies)
diff --git a/src/python/pants/engine/scheduler.py b/src/python/pants/engine/scheduler.py
index 72d04e661f8..c4374c2f713 100644
--- a/src/python/pants/engine/scheduler.py
+++ b/src/python/pants/engine/scheduler.py
@@ -14,17 +14,17 @@
from pants.base.project_tree import Dir, File, Link
from pants.build_graph.address import Address
from pants.engine.fs import (Digest, DirectoryToMaterialize, FileContent, FilesContent,
- MergedDirectories, Path, PathGlobs, PathGlobsAndRoot, Snapshot,
- UrlToFetch)
+ MergedDirectories, PathGlobs, PathGlobsAndRoot, Snapshot, UrlToFetch)
from pants.engine.isolated_process import ExecuteProcessRequest, FallibleExecuteProcessResult
from pants.engine.native import Function, TypeConstraint, TypeId
from pants.engine.nodes import Return, Throw
+from pants.engine.objects import Collection
from pants.engine.rules import RuleIndex, SingletonRule, TaskRule
from pants.engine.selectors import Params, Select, constraint_for
from pants.rules.core.exceptions import GracefulTerminationException
from pants.util.contextutil import temporary_file_path
from pants.util.dirutil import check_no_overlapping_paths
-from pants.util.objects import Collection, datatype
+from pants.util.objects import datatype
from pants.util.strutil import pluralize
@@ -100,10 +100,6 @@ def __init__(
construct_snapshot=Snapshot,
construct_file_content=FileContent,
construct_files_content=FilesContent,
- construct_path_stat=Path,
- construct_dir=Dir,
- construct_file=File,
- construct_link=Link,
construct_process_result=FallibleExecuteProcessResult,
constraint_address=constraint_for(Address),
constraint_path_globs=constraint_for(PathGlobs),
@@ -282,8 +278,7 @@ def visualize_to_dir(self):
return self._visualize_to_dir
def _metrics(self, session):
- metrics_val = self._native.lib.scheduler_metrics(self._scheduler, session)
- return {k: v for k, v in self._from_value(metrics_val)}
+ return self._from_value(self._native.lib.scheduler_metrics(self._scheduler, session))
def with_fork_context(self, func):
"""See the rustdocs for `scheduler_fork_context` for more information."""
diff --git a/src/python/pants/goal/pantsd_stats.py b/src/python/pants/goal/pantsd_stats.py
index 12fc801451d..5eefef92058 100644
--- a/src/python/pants/goal/pantsd_stats.py
+++ b/src/python/pants/goal/pantsd_stats.py
@@ -11,24 +11,18 @@ class PantsDaemonStats(object):
"""Tracks various stats about the daemon."""
def __init__(self):
- self.target_root_size = 0
- self.affected_targets_size = 0
- self.affected_targets_file_count = 0
self.scheduler_metrics = {}
def set_scheduler_metrics(self, scheduler_metrics):
self.scheduler_metrics = scheduler_metrics
def set_target_root_size(self, size):
- self.target_root_size = size
+ self.scheduler_metrics['target_root_size'] = size
def set_affected_targets_size(self, size):
- self.affected_targets_size = size
+ self.scheduler_metrics['affected_targets_size'] = size
def get_all(self):
- res = dict(self.scheduler_metrics)
- res.update({
- 'target_root_size': self.target_root_size,
- 'affected_targets_size': self.affected_targets_size,
- })
- return res
+ for key in ['target_root_size', 'affected_targets_size']:
+ self.scheduler_metrics.setdefault(key, 0)
+ return self.scheduler_metrics
diff --git a/src/python/pants/init/engine_initializer.py b/src/python/pants/init/engine_initializer.py
index 1b35f31692d..41f042c8844 100644
--- a/src/python/pants/init/engine_initializer.py
+++ b/src/python/pants/init/engine_initializer.py
@@ -346,7 +346,8 @@ def setup_legacy_graph_extended(
rules = (
[
RootRule(Console),
- SingletonRule.from_instance(GlobMatchErrorBehavior.create(glob_match_error_behavior)),
+ SingletonRule.from_instance(GlobMatchErrorBehavior.create(glob_match_error_behavior,
+ none_is_default=True)),
SingletonRule.from_instance(build_configuration),
SingletonRule(SymbolTable, symbol_table),
] +
diff --git a/src/python/pants/java/nailgun_executor.py b/src/python/pants/java/nailgun_executor.py
index d4a16123dd0..5f118496531 100644
--- a/src/python/pants/java/nailgun_executor.py
+++ b/src/python/pants/java/nailgun_executor.py
@@ -228,8 +228,8 @@ def ensure_connectable(self, nailgun):
def _spawn_nailgun_server(self, fingerprint, jvm_options, classpath, stdout, stderr, stdin):
"""Synchronously spawn a new nailgun server."""
# Truncate the nailguns stdout & stderr.
- safe_file_dump(self._ng_stdout, b'')
- safe_file_dump(self._ng_stderr, b'')
+ safe_file_dump(self._ng_stdout, b'', mode='wb')
+ safe_file_dump(self._ng_stderr, b'', mode='wb')
jvm_options = jvm_options + [self._PANTS_NG_BUILDROOT_ARG,
self._create_owner_arg(self._workdir),
diff --git a/src/python/pants/notes/1.14.x.rst b/src/python/pants/notes/1.14.x.rst
index 45e5658ae23..8b2d4e2ef9a 100644
--- a/src/python/pants/notes/1.14.x.rst
+++ b/src/python/pants/notes/1.14.x.rst
@@ -3,6 +3,67 @@
This document describes releases leading up to the ``1.14.x`` ``stable`` series.
+1.14.0rc3 (2/21/2019)
+---------------------
+
+API Changes
+~~~~~~~~~~~
+
+* Add flags to processs_executor that say where to materialize output and what output is (#7201)
+ `PR #7201 `_
+
+* Resolve all platforms from all python targets (#7156)
+ `PR #7156 `_
+
+* Remove deprecated test classes (#7243)
+ `PR #7243 `_
+
+Bugfixes
+~~~~~~~~
+
+* Revert remote execution from tower to grpcio (#7256)
+ `PR #7256 `_
+
+* Avoid capturing Snapshots for previously digested codegen outputs (#7241)
+ `PR #7241 `_
+
+* Validate and maybe prune interpreter cache run over run (#7225)
+ `PR #7225 `_
+
+1.14.0rc2 (2/15/2019)
+---------------------
+
+API Changes
+~~~~~~~~~~~
+
+* Pin pytest version to avoid induced breakage from more-itertools transitive dep (#7238)
+ `PR #7238 `_
+ `PR #7240 `_
+
+1.14.0rc1 (2/06/2019)
+---------------------
+
+Bugfixes
+~~~~~~~~
+
+* Only lint the direct sources of a linted target. (#7219)
+ `PR #7219 `_
+
+* Do not render the coursier workunit unless it will run. (#7218)
+ `PR #7218 `_
+
+* Revert "make GoTest subclass PartitionedTestRunnerTaskMixin to test transitively" (#7212)
+ `PR #7212 `_
+
+* Only run master-dependent commithooks on master (#7214)
+ `PR #7214 `_
+
+* Absolute-ify GIT_DIR (#7210)
+ `PR #7210 `_
+
+* Fix release script ownership check command name. (#7204)
+ `PR #7204 `_
+
1.14.0rc0 (2/01/2019)
---------------------
diff --git a/src/python/pants/notes/master.rst b/src/python/pants/notes/master.rst
index 5661e9b3489..7c4448c07fb 100644
--- a/src/python/pants/notes/master.rst
+++ b/src/python/pants/notes/master.rst
@@ -4,6 +4,138 @@ Master Pre-Releases
This document describes development releases which occur weekly from master, and which have
not undergone the vetting associated with ``stable`` releases.
+1.15.0.dev1 (2/22/2019)
+-----------------------
+
+New Features
+~~~~~~~~~~~~
+
+* Allow passing floating point numbers from rust to python (#7259)
+ `PR #7259 `_
+
+Bugfixes
+~~~~~~~~
+
+* Fix nightly cron ctypes enum failure (#7249)
+ `PR #7249 `_
+
+* Revert remote execution from tower to grpcio (#7256)
+ `PR #7256 `_
+
+* Pin pytest version to avoid induced breakage from more-itertools transitive dep (#7238)
+ `Issue #4770#issuecomment-462869367, `_
+ `PR #7238 `_
+
+* Validate and maybe prune interpreter cache run over run (#7225)
+ `PR #7225 `_
+
+Documentation
+~~~~~~~~~~~~~
+
+* [jvm-compile] fix typo: s/direcotry/directory/ (#7265)
+ `PR #7265 `_
+
+Refactoring, Improvements, and Tooling
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+* cache python tools in ~/.cache/pants (#7236)
+ `PR #7236 `_
+
+* Prepare 1.14.0rc3 (#7274)
+ `PR #7274 `_
+
+* Node is Display (#7264)
+ `PR #7264 `_
+
+* Scheduler returns metrics as a dictionary instead of a tuple of tuples (#7255)
+ `PR #7255 `_
+
+* Prepare 1.14.0.rc2 instead. (#7251)
+ `PR #7251 `_
+
+* Prepare 1.14.0 (#7246)
+ `PR #7246 `_
+
+* Avoid capturing Snapshots for previously digested codegen outputs (#7241)
+ `PR #7241 `_
+
+* Add checks if values of flags zipkin-trace-id and zipkin-parent-id are valid (#7242)
+ `PR #7242 `_
+
+* Remove deprecated test classes (#7243)
+ `PR #7243 `_
+
+* fix expected pytest output for pytest integration after pinning to 3.0.7 (#7240)
+ `PR #7240 `_
+
+* Canonicalize enum pattern matching for execution strategy, platform, and elsewhere (#7226)
+ `PR #7226 `_
+
+* add a TypedCollection type constraint to reduce boilerplate for datatype tuple fields (#7115)
+ `PR #7115 `_
+
+1.15.0.dev0 (2/8/2019)
+----------------------
+
+API Changes
+~~~~~~~~~~~
+
+* deprecate implicit usage of binary_mode=True and mode='wb' in dirutil methods (#7120)
+ `PR #7120 `_
+
+* Resolve all platforms from all python targets (#7156)
+ `PR #7156 `_
+
+* Only lint the direct sources of a linted target. (#7219)
+ `PR #7219 `_
+
+New Features
+~~~~~~~~~~~~
+
+* Add flag reporting-zipkin-sample-rate (#7211)
+ `PR #7211 `_
+
+* Add flags to prosecc_executor that say where to materialize output and what is output (#7201)
+ `PR #7201 `_
+
+Bugfixes
+~~~~~~~~
+
+* Do not render the coursier workunit unless it will run. (#7218)
+ `PR #7218 `_
+
+* Output non-empty digest for empty directory as output_directories when running ExecuteProcessRequest (#7208)
+ `PR #7208 `_
+
+Documentation
+~~~~~~~~~~~~~
+
+* documentation for grpcio (#7155)
+ `PR #7155 `_
+
+Refactoring, Improvements, and Tooling
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+* Make Resettable lazy again (#7222)
+ `PR #7222 `_
+
+* fix _raise_deferred_exc() (#7008)
+ `PR #7008 `_
+
+* Skip flaky test (#7209)
+ `PR #7209 `_
+
+* Only run master-dependent commithooks on master (#7214)
+ `PR #7214 `_
+
+* Revert "make GoTest subclass PartitionedTestRunnerTaskMixin to test transitively" (#7212)
+ `PR #7212 `_
+
+* Absolute-ify GIT_DIR (#7210)
+ `PR #7210 `_
+
+* Fix release script ownership check command name. (#7204)
+ `PR #7204 `_
1.14.0rc0 (2/01/2019)
---------------------
diff --git a/src/python/pants/option/global_options.py b/src/python/pants/option/global_options.py
index 31160095c0e..c346f85c4fd 100644
--- a/src/python/pants/option/global_options.py
+++ b/src/python/pants/option/global_options.py
@@ -16,7 +16,7 @@
from pants.option.optionable import Optionable
from pants.option.scope import ScopeInfo
from pants.subsystem.subsystem_client_mixin import SubsystemClientMixin
-from pants.util.objects import datatype, enum
+from pants.util.objects import datatype, enum, register_enum_option
class GlobMatchErrorBehavior(enum('failure_behavior', ['ignore', 'warn', 'error'])):
@@ -26,8 +26,6 @@ class GlobMatchErrorBehavior(enum('failure_behavior', ['ignore', 'warn', 'error'
be aware of any changes to this object's definition.
"""
- default_option_value = 'warn'
-
class ExecutionOptions(datatype([
'remote_store_server',
@@ -197,12 +195,12 @@ def register_bootstrap_options(cls, register):
help='Paths to ignore for all filesystem operations performed by pants '
'(e.g. BUILD file scanning, glob matching, etc). '
'Patterns use the gitignore syntax (https://git-scm.com/docs/gitignore).')
- register('--glob-expansion-failure', type=str,
- choices=GlobMatchErrorBehavior.allowed_values,
- default=GlobMatchErrorBehavior.default_option_value,
- advanced=True,
- help="Raise an exception if any targets declaring source files "
- "fail to match any glob provided in the 'sources' argument.")
+ register_enum_option(
+ # TODO: allow using the attribute `GlobMatchErrorBehavior.warn` for more safety!
+ register, GlobMatchErrorBehavior, '--glob-expansion-failure', default='warn',
+ advanced=True,
+ help="Raise an exception if any targets declaring source files "
+ "fail to match any glob provided in the 'sources' argument.")
register('--exclude-target-regexp', advanced=True, type=list, default=[], daemon=False,
metavar='', help='Exclude target roots that match these regexes.')
diff --git a/src/python/pants/option/options_bootstrapper.py b/src/python/pants/option/options_bootstrapper.py
index 0d727329532..09ff274e610 100644
--- a/src/python/pants/option/options_bootstrapper.py
+++ b/src/python/pants/option/options_bootstrapper.py
@@ -103,7 +103,7 @@ def create(cls, env=None, args=None):
short_flags = set()
def filecontent_for(path):
- return FileContent(ensure_text(path), read_file(path))
+ return FileContent(ensure_text(path), read_file(path, binary_mode=True))
def capture_the_flags(*args, **kwargs):
for arg in args:
diff --git a/src/python/pants/pantsd/process_manager.py b/src/python/pants/pantsd/process_manager.py
index e42cbfa6ecc..4c14f418bae 100644
--- a/src/python/pants/pantsd/process_manager.py
+++ b/src/python/pants/pantsd/process_manager.py
@@ -191,7 +191,7 @@ def write_metadata_by_name(self, name, metadata_key, metadata_value):
"""
self._maybe_init_metadata_dir_by_name(name)
file_path = self._metadata_file_path(name, metadata_key)
- safe_file_dump(file_path, metadata_value, binary_mode=False)
+ safe_file_dump(file_path, metadata_value, mode='w')
def await_metadata_by_name(self, name, metadata_key, timeout, caster=None):
"""Block up to a timeout for process metadata to arrive on disk.
diff --git a/src/python/pants/pantsd/watchman.py b/src/python/pants/pantsd/watchman.py
index 09be21e948d..409c7f0f725 100644
--- a/src/python/pants/pantsd/watchman.py
+++ b/src/python/pants/pantsd/watchman.py
@@ -82,7 +82,7 @@ def _normalize_watchman_path(self, watchman_path):
def _maybe_init_metadata(self):
safe_mkdir(self._watchman_work_dir)
# Initialize watchman with an empty, but valid statefile so it doesn't complain on startup.
- safe_file_dump(self._state_file, b'{}')
+ safe_file_dump(self._state_file, b'{}', mode='wb')
def _construct_cmd(self, cmd_parts, state_file, sock_file, pid_file, log_file, log_level):
return [part for part in cmd_parts] + ['--no-save-state',
diff --git a/src/python/pants/releases/reversion.py b/src/python/pants/releases/reversion.py
index 541fcdb6524..8ed8a5638e2 100644
--- a/src/python/pants/releases/reversion.py
+++ b/src/python/pants/releases/reversion.py
@@ -30,7 +30,7 @@ def replace_in_file(workspace, src_file_path, from_str, to_str):
return None
dst_file_path = src_file_path.replace(from_str, to_str)
- safe_file_dump(os.path.join(workspace, dst_file_path), data.replace(from_bytes, to_bytes))
+ safe_file_dump(os.path.join(workspace, dst_file_path), data.replace(from_bytes, to_bytes), mode='wb')
if src_file_path != dst_file_path:
os.unlink(os.path.join(workspace, src_file_path))
return dst_file_path
@@ -88,7 +88,7 @@ def rewrite_record_file(workspace, src_record_file, mutated_file_tuples):
output_line = line
output_records.append(output_line)
- safe_file_dump(file_name, '\r\n'.join(output_records) + '\r\n', binary_mode=False)
+ safe_file_dump(file_name, '\r\n'.join(output_records) + '\r\n', mode='w')
# The wheel METADATA file will contain a line like: `Version: 1.11.0.dev3+7951ec01`.
diff --git a/src/python/pants/reporting/reporting.py b/src/python/pants/reporting/reporting.py
index 25d7c82c16e..245f4eef4cc 100644
--- a/src/python/pants/reporting/reporting.py
+++ b/src/python/pants/reporting/reporting.py
@@ -51,15 +51,17 @@ def register_options(cls, register):
help='The full HTTP URL of a zipkin server to which traces should be posted. '
'No traces will be made if this is not set.')
register('--zipkin-trace-id', advanced=True, default=None,
- help='The overall 64 or 128-bit ID of the trace. '
- 'Set if Pants trace should be a part of larger trace '
- 'for systems that invoke Pants. If zipkin-trace-id '
- 'and zipkin-parent-id are not set, a trace_id value is randomly generated for a '
- 'Zipkin trace')
+ help='The overall 64 or 128-bit ID of the trace (the format is 16-character or '
+ '32-character hex string). Set if the Pants trace should be a part of a larger '
+ 'trace for systems that invoke Pants. If flags zipkin-trace-id and '
+ 'zipkin-parent-id are not set, a trace_id value is randomly generated '
+ 'for a Zipkin trace.')
register('--zipkin-parent-id', advanced=True, default=None,
- help='The 64-bit ID for a parent span that invokes Pants. '
- 'zipkin-trace-id and zipkin-parent-id must both either be set or not set '
- 'when run Pants command')
+ help='The 64-bit ID for a parent span that invokes Pants (the format is 16-character '
+ 'hex string). Flags zipkin-trace-id and zipkin-parent-id must both either be set '
+ 'or not set when running a Pants command.')
+ register('--zipkin-sample-rate', advanced=True, default=100.0,
+ help='Rate at which to sample Zipkin traces. Value 0.0 - 100.0.')
def initialize(self, run_tracker, all_options, start_time=None):
"""Initialize with the given RunTracker.
@@ -100,6 +102,7 @@ def initialize(self, run_tracker, all_options, start_time=None):
zipkin_endpoint = self.get_options().zipkin_endpoint
trace_id = self.get_options().zipkin_trace_id
parent_id = self.get_options().zipkin_parent_id
+ sample_rate = self.get_options().zipkin_sample_rate
if zipkin_endpoint is None and trace_id is not None and parent_id is not None:
raise ValueError(
@@ -109,11 +112,21 @@ def initialize(self, run_tracker, all_options, start_time=None):
raise ValueError(
"Flags zipkin-trace-id and zipkin-parent-id must both either be set or not set."
)
+ if trace_id and (len(trace_id) != 16 and len(trace_id) != 32 or not is_hex_string(trace_id)):
+ raise ValueError(
+ "Value of the flag zipkin-trace-id must be a 16-character or 32-character hex string. "
+ + "Got {}.".format(trace_id)
+ )
+ if parent_id and (len(parent_id) != 16 or not is_hex_string(parent_id)):
+ raise ValueError(
+ "Value of the flag zipkin-parent-id must be a 16-character hex string. "
+ + "Got {}.".format(parent_id)
+ )
if zipkin_endpoint is not None:
zipkin_reporter_settings = ZipkinReporter.Settings(log_level=Report.INFO)
zipkin_reporter = ZipkinReporter(
- run_tracker, zipkin_reporter_settings, zipkin_endpoint, trace_id, parent_id
+ run_tracker, zipkin_reporter_settings, zipkin_endpoint, trace_id, parent_id, sample_rate
)
report.add_reporter('zipkin', zipkin_reporter)
@@ -192,3 +205,12 @@ def update_reporting(self, global_options, is_quiet, run_tracker):
invalidation_report.set_filename(outfile)
return invalidation_report
+
+
+def is_hex_string(id_value):
+ return all(is_hex_ch(ch) for ch in id_value)
+
+
+def is_hex_ch(ch):
+ num = ord(ch)
+ return ord('0') <= num <= ord('9') or ord('a') <= num <= ord('f') or ord('A') <= num <= ord('F')
diff --git a/src/python/pants/reporting/zipkin_reporter.py b/src/python/pants/reporting/zipkin_reporter.py
index 48deac3c892..68dd252df43 100644
--- a/src/python/pants/reporting/zipkin_reporter.py
+++ b/src/python/pants/reporting/zipkin_reporter.py
@@ -10,7 +10,7 @@
from py_zipkin import Encoding
from py_zipkin.transport import BaseTransportHandler
from py_zipkin.util import generate_random_64bit_string
-from py_zipkin.zipkin import ZipkinAttrs, zipkin_span
+from py_zipkin.zipkin import ZipkinAttrs, create_attrs_for_span, zipkin_span
from pants.base.workunit import WorkUnitLabel
from pants.reporting.reporter import Reporter
@@ -42,7 +42,7 @@ class ZipkinReporter(Reporter):
Reporter that implements Zipkin tracing.
"""
- def __init__(self, run_tracker, settings, endpoint, trace_id, parent_id):
+ def __init__(self, run_tracker, settings, endpoint, trace_id, parent_id, sample_rate):
"""
When trace_id and parent_id are set a Zipkin trace will be created with given trace_id
and parent_id. If trace_id and parent_id are set to None, a trace_id will be randomly
@@ -53,6 +53,7 @@ def __init__(self, run_tracker, settings, endpoint, trace_id, parent_id):
:param string endpoint: The full HTTP URL of a zipkin server to which traces should be posted.
:param string trace_id: The overall 64 or 128-bit ID of the trace. May be None.
:param string parent_id: The 64-bit ID for a parent span that invokes Pants. May be None.
+ :param float sample_rate: Rate at which to sample Zipkin traces. Value 0.0 - 100.0.
"""
super(ZipkinReporter, self).__init__(run_tracker, settings)
# We keep track of connection between workunits and spans
@@ -61,6 +62,7 @@ def __init__(self, run_tracker, settings, endpoint, trace_id, parent_id):
self.handler = HTTPTransportHandler(endpoint)
self.trace_id = trace_id
self.parent_id = parent_id
+ self.sample_rate = float(sample_rate)
def start_workunit(self, workunit):
"""Implementation of Reporter callback."""
@@ -84,13 +86,14 @@ def start_workunit(self, workunit):
is_sampled=True,
)
else:
- zipkin_attrs = None
+ zipkin_attrs = create_attrs_for_span(
+ sample_rate=self.sample_rate, # Value between 0.0 and 100.0
+ )
span = zipkin_span(
service_name=service_name,
span_name=workunit.name,
transport_handler=self.handler,
- sample_rate=100.0, # Value between 0.0 and 100.0
encoding=Encoding.V1_THRIFT,
zipkin_attrs=zipkin_attrs
)
@@ -104,7 +107,7 @@ def start_workunit(self, workunit):
# Goals and tasks save their start time at the beginning of their run.
# This start time is passed to workunit, because the workunit may be created much later.
span.start_timestamp = workunit.start_time
- if first_span:
+ if first_span and span.zipkin_attrs.is_sampled:
span.logging_context.start_timestamp = workunit.start_time
def end_workunit(self, workunit):
diff --git a/src/python/pants/source/filespec.py b/src/python/pants/source/filespec.py
index c36d37176a4..77e967043f5 100644
--- a/src/python/pants/source/filespec.py
+++ b/src/python/pants/source/filespec.py
@@ -9,6 +9,9 @@
def glob_to_regex(pattern):
"""Given a glob pattern, return an equivalent regex expression.
+
+ TODO: Replace with implementation in `fs.rs`. See https://github.com/pantsbuild/pants/issues/6795.
+
:param string glob: The glob pattern. "**" matches 0 or more dirs recursively.
"*" only matches patterns in a single dir.
:returns: A regex string that matches same paths as the input glob does.
diff --git a/src/python/pants/source/wrapped_globs.py b/src/python/pants/source/wrapped_globs.py
index 2e68472e748..1deab4c9dda 100644
--- a/src/python/pants/source/wrapped_globs.py
+++ b/src/python/pants/source/wrapped_globs.py
@@ -99,8 +99,10 @@ def files(self):
@memoized_property
def files_relative_to_buildroot(self):
- fds = self._snapshot.path_stats if self._include_dirs else self._snapshot.files
- return tuple(fd.path for fd in fds)
+ res = self._snapshot.files
+ if self._include_dirs:
+ res += self._snapshot.dirs
+ return res
@property
def files_hash(self):
diff --git a/src/python/pants/task/fmt_task_mixin.py b/src/python/pants/task/fmt_task_mixin.py
index 25c1d7a6aa3..d4279d36089 100644
--- a/src/python/pants/task/fmt_task_mixin.py
+++ b/src/python/pants/task/fmt_task_mixin.py
@@ -11,3 +11,4 @@
class FmtTaskMixin(HasSkipAndTransitiveGoalOptionsMixin):
"""A mixin to combine with code formatting tasks."""
goal_options_registrar_cls = SkipAndTransitiveGoalOptionsRegistrar
+ target_filtering_enabled = True
diff --git a/src/python/pants/task/lint_task_mixin.py b/src/python/pants/task/lint_task_mixin.py
index 549a5978be0..d64100b0dda 100644
--- a/src/python/pants/task/lint_task_mixin.py
+++ b/src/python/pants/task/lint_task_mixin.py
@@ -11,3 +11,4 @@
class LintTaskMixin(HasSkipAndTransitiveGoalOptionsMixin):
"""A mixin to combine with lint tasks."""
goal_options_registrar_cls = SkipAndTransitiveGoalOptionsRegistrar
+ target_filtering_enabled = True
diff --git a/src/python/pants/task/simple_codegen_task.py b/src/python/pants/task/simple_codegen_task.py
index 276c50fe370..162b8dd4d2e 100644
--- a/src/python/pants/task/simple_codegen_task.py
+++ b/src/python/pants/task/simple_codegen_task.py
@@ -18,11 +18,11 @@
from pants.base.workunit import WorkUnitLabel
from pants.build_graph.address import Address
from pants.build_graph.address_lookup_error import AddressLookupError
-from pants.engine.fs import PathGlobs, PathGlobsAndRoot
+from pants.engine.fs import Digest, PathGlobs, PathGlobsAndRoot
from pants.source.wrapped_globs import EagerFilesetWithSpec, FilesetRelPathWrapper
from pants.task.task import Task
from pants.util.collections_abc_backport import OrderedDict
-from pants.util.dirutil import safe_delete
+from pants.util.dirutil import fast_relpath, safe_delete
logger = logging.getLogger(__name__)
@@ -113,6 +113,10 @@ def synthetic_target_extra_dependencies(self, target, target_workdir):
"""
return []
+ @classmethod
+ def implementation_version(cls):
+ return super(SimpleCodegenTask, cls).implementation_version() + [('SimpleCodegenTask', 2)]
+
def synthetic_target_extra_exports(self, target, target_workdir):
"""Gets any extra exports generated synthetic targets should have.
@@ -206,7 +210,7 @@ def _do_validate_sources_present(self, target):
def _get_synthetic_address(self, target, target_workdir):
synthetic_name = target.id
- sources_rel_path = os.path.relpath(target_workdir, get_buildroot())
+ sources_rel_path = fast_relpath(target_workdir, get_buildroot())
synthetic_address = Address(sources_rel_path, synthetic_name)
return synthetic_address
@@ -230,32 +234,26 @@ def execute(self):
with self.context.new_workunit(name='execute', labels=[WorkUnitLabel.MULTITOOL]):
vts_to_sources = OrderedDict()
for vt in invalidation_check.all_vts:
- synthetic_target_dir = self.synthetic_target_dir(vt.target, vt.results_dir)
- key = (vt, synthetic_target_dir)
- vts_to_sources[key] = None
+ vts_to_sources[vt] = None
# Build the target and handle duplicate sources.
if not vt.valid:
if self._do_validate_sources_present(vt.target):
- self.execute_codegen(vt.target, vt.results_dir)
- sources = self._capture_sources((key,))[0]
+ self.execute_codegen(vt.target, vt.current_results_dir)
+ sources = self._capture_sources((vt,))[0]
# _handle_duplicate_sources may delete files from the filesystem, so we need to
# re-capture the sources.
- if not self._handle_duplicate_sources(vt.target, vt.results_dir, sources):
- vts_to_sources[key] = sources
+ if not self._handle_duplicate_sources(vt, sources):
+ vts_to_sources[vt] = sources
vt.update()
vts_to_capture = tuple(key for key, sources in vts_to_sources.items() if sources is None)
filesets = self._capture_sources(vts_to_capture)
for key, fileset in zip(vts_to_capture, filesets):
vts_to_sources[key] = fileset
- for (vt, synthetic_target_dir), fileset in vts_to_sources.items():
- self._inject_synthetic_target(
- vt.target,
- synthetic_target_dir,
- fileset,
- )
+ for vt, fileset in vts_to_sources.items():
+ self._inject_synthetic_target(vt, fileset)
self._mark_transitive_invalidation_hashes_dirty(
vt.target.address for vt in invalidation_check.all_vts
)
@@ -280,17 +278,23 @@ def synthetic_target_dir(self, target, target_workdir):
"""
return target_workdir
- # Accepts tuple of tuples of (target, synthetic_target_dir)
+ # Accepts tuple of VersionedTarget instances.
# Returns tuple of EagerFilesetWithSpecs in matching order.
- def _capture_sources(self, targets_and_dirs):
+ def _capture_sources(self, vts):
to_capture = []
results_dirs = []
filespecs = []
- for target, synthetic_target_dir in targets_and_dirs:
+ for vt in vts:
+ target = vt.target
+ # Compute the (optional) subdirectory of the results_dir to generate code to. This
+ # path will end up in the generated FilesetWithSpec and target, and thus needs to be
+ # located below the stable/symlinked `vt.results_dir`.
+ synthetic_target_dir = self.synthetic_target_dir(target, vt.results_dir)
+
files = self.sources_globs
- results_dir_relpath = os.path.relpath(synthetic_target_dir, get_buildroot())
+ results_dir_relpath = fast_relpath(synthetic_target_dir, get_buildroot())
buildroot_relative_globs = tuple(os.path.join(results_dir_relpath, file) for file in files)
buildroot_relative_excludes = tuple(
os.path.join(results_dir_relpath, file)
@@ -300,6 +304,8 @@ def _capture_sources(self, targets_and_dirs):
PathGlobsAndRoot(
PathGlobs(buildroot_relative_globs, buildroot_relative_excludes),
text_type(get_buildroot()),
+ # The digest is stored adjacent to the hash-versioned `vt.current_results_dir`.
+ Digest.load(vt.current_results_dir),
)
)
results_dirs.append(results_dir_relpath)
@@ -307,33 +313,35 @@ def _capture_sources(self, targets_and_dirs):
snapshots = self.context._scheduler.capture_snapshots(tuple(to_capture))
+ for snapshot, vt in zip(snapshots, vts):
+ snapshot.directory_digest.dump(vt.current_results_dir)
+
return tuple(EagerFilesetWithSpec(
results_dir_relpath,
filespec,
snapshot,
) for (results_dir_relpath, filespec, snapshot) in zip(results_dirs, filespecs, snapshots))
- def _inject_synthetic_target(
- self,
- target,
- target_workdir,
- sources,
- ):
+ def _inject_synthetic_target(self, vt, sources):
"""Create, inject, and return a synthetic target for the given target and workdir.
- :param target: The target to inject a synthetic target for.
- :param target_workdir: The work directory containing the generated code for the target.
+ :param vt: A codegen input VersionedTarget to inject a synthetic target for.
+ :param sources: A FilesetWithSpec to inject for the target.
"""
+ target = vt.target
+ # NB: For stability, the injected target exposes the stable-symlinked `vt.results_dir`,
+ # rather than the hash-named `vt.current_results_dir`.
+ synthetic_target_dir = self.synthetic_target_dir(target, vt.results_dir)
synthetic_target_type = self.synthetic_target_type(target)
- synthetic_extra_dependencies = self.synthetic_target_extra_dependencies(target, target_workdir)
+ synthetic_extra_dependencies = self.synthetic_target_extra_dependencies(target, synthetic_target_dir)
copied_attributes = {}
for attribute in self._copy_target_attributes:
copied_attributes[attribute] = getattr(target, attribute)
if self._supports_exports(synthetic_target_type):
- extra_exports = self.synthetic_target_extra_exports(target, target_workdir)
+ extra_exports = self.synthetic_target_extra_exports(target, synthetic_target_dir)
extra_exports_not_in_extra_dependencies = set(extra_exports).difference(
set(synthetic_extra_dependencies))
@@ -349,7 +357,7 @@ def _inject_synthetic_target(
copied_attributes['exports'] = sorted(union)
synthetic_target = self.context.add_new_target(
- address=self._get_synthetic_address(target, target_workdir),
+ address=self._get_synthetic_address(target, synthetic_target_dir),
target_type=synthetic_target_type,
dependencies=synthetic_extra_dependencies,
sources=sources,
@@ -405,7 +413,7 @@ def execute_codegen(self, target, target_workdir):
:param target_workdir: A clean directory into which to generate code
"""
- def _handle_duplicate_sources(self, target, target_workdir, sources):
+ def _handle_duplicate_sources(self, vt, sources):
"""Handles duplicate sources generated by the given gen target by either failure or deletion.
This method should be called after all dependencies have been injected into the graph, but
@@ -420,6 +428,8 @@ def _handle_duplicate_sources(self, target, target_workdir, sources):
default, this behavior is disabled, and duplication in generated sources will raise a
TaskError. This is controlled by the --allow-dups flag.
"""
+ target = vt.target
+ target_workdir = vt.results_dir
# Walk dependency gentargets and record any sources owned by those targets that are also
# owned by this target.
@@ -457,6 +467,8 @@ def record_duplicates(dep):
for duped_source in duped_sources:
safe_delete(os.path.join(target_workdir, duped_source))
did_modify = True
+ if did_modify:
+ Digest.clear(vt.current_results_dir)
return did_modify
class DuplicateSourceError(TaskError):
diff --git a/src/python/pants/task/task.py b/src/python/pants/task/task.py
index d7b5e28dc90..1c1ff115fa0 100644
--- a/src/python/pants/task/task.py
+++ b/src/python/pants/task/task.py
@@ -7,7 +7,7 @@
import os
import sys
from abc import abstractmethod
-from builtins import filter, map, object, str, zip
+from builtins import filter, map, object, set, str, zip
from contextlib import contextmanager
from hashlib import sha1
from itertools import repeat
@@ -16,6 +16,7 @@
from pants.base.exceptions import TaskError
from pants.base.worker_pool import Work
+from pants.build_graph.target_filter_subsystem import TargetFilter
from pants.cache.artifact_cache import UnreadableArtifact, call_insert, call_use_cached_files
from pants.cache.cache_setup import CacheSetup
from pants.invalidation.build_invalidator import (BuildInvalidator, CacheKeyGenerator,
@@ -29,7 +30,7 @@
from pants.subsystem.subsystem_client_mixin import SubsystemClientMixin
from pants.util.dirutil import safe_mkdir, safe_rm_oldest_items_in_dir
from pants.util.memo import memoized_method, memoized_property
-from pants.util.meta import AbstractClass
+from pants.util.meta import AbstractClass, classproperty
class TaskBase(SubsystemClientMixin, Optionable, AbstractClass):
@@ -96,7 +97,8 @@ def _compute_stable_name(cls):
@classmethod
def subsystem_dependencies(cls):
return (super(TaskBase, cls).subsystem_dependencies() +
- (CacheSetup.scoped(cls), BuildInvalidator.Factory, SourceRootConfig))
+ (CacheSetup.scoped(cls), BuildInvalidator.Factory, SourceRootConfig) +
+ ((TargetFilter.scoped(cls),) if cls.target_filtering_enabled else tuple()))
@classmethod
def product_types(cls):
@@ -222,6 +224,17 @@ def act_transitively(self):
"""
return True
+ @classproperty
+ def target_filtering_enabled(cls):
+ """Whether this task should apply configured filters against targets.
+
+ Tasks can override to enable target filtering (e.g. based on tags) and must
+ access targets via get_targets()
+
+ :API: public
+ """
+ return False
+
def get_targets(self, predicate=None):
"""Returns the candidate targets this task should act on.
@@ -237,8 +250,24 @@ def get_targets(self, predicate=None):
:API: public
"""
- return (self.context.targets(predicate) if self.act_transitively
- else list(filter(predicate, self.context.target_roots)))
+ initial_targets = (self.context.targets(predicate) if self.act_transitively
+ else list(filter(predicate, self.context.target_roots)))
+
+ if not self.target_filtering_enabled:
+ return initial_targets
+ else:
+ return self._filter_targets(initial_targets)
+
+ def _filter_targets(self, targets):
+ included_targets = TargetFilter.scoped_instance(self).apply(targets)
+ excluded_targets = set(targets).difference(included_targets)
+
+ if excluded_targets:
+ self.context.log.info("{} target(s) excluded".format(len(excluded_targets)))
+ for target in excluded_targets:
+ self.context.log.debug("{} excluded".format(target.address.spec))
+
+ return included_targets
@memoized_property
def workdir(self):
diff --git a/src/python/pants/util/BUILD b/src/python/pants/util/BUILD
index bf1b1f55b64..5e2bd8f4a00 100644
--- a/src/python/pants/util/BUILD
+++ b/src/python/pants/util/BUILD
@@ -56,6 +56,7 @@ python_library(
dependencies = [
':strutil',
'3rdparty/python:future',
+ 'src/python/pants/base:deprecated',
],
)
diff --git a/src/python/pants/util/dirutil.py b/src/python/pants/util/dirutil.py
index 7dd542b9dcb..7391f072b15 100644
--- a/src/python/pants/util/dirutil.py
+++ b/src/python/pants/util/dirutil.py
@@ -16,6 +16,7 @@
from collections import defaultdict
from contextlib import contextmanager
+from pants.base.deprecated import deprecated_conditional
from pants.util.strutil import ensure_text
@@ -100,15 +101,17 @@ def safe_mkdir_for_all(paths):
created_dirs.add(dir_to_make)
-def safe_file_dump(filename, payload, binary_mode=None, mode=None):
+# TODO(#6742): payload should be Union[str, bytes] in type hint syntax, but from
+# https://pythonhosted.org/an_example_pypi_project/sphinx.html#full-code-example it doesn't appear
+# that is possible to represent in docstring type syntax.
+def safe_file_dump(filename, payload='', binary_mode=None, mode=None):
"""Write a string to a file.
This method is "safe" to the extent that `safe_open` is "safe". See the explanation on the method
doc there.
- TODO: The `binary_mode` flag should be deprecated and removed from existing callsites. Once
- `binary_mode` is removed, mode can directly default to `wb`.
- see https://github.com/pantsbuild/pants/issues/6543
+ When `payload` is an empty string (the default), this method can be used as a concise way to
+ create an empty file along with its containing directory (or truncate it if it already exists).
:param string filename: The filename of the file to write to.
:param string payload: The string to write to the file.
@@ -116,9 +119,19 @@ def safe_file_dump(filename, payload, binary_mode=None, mode=None):
:param string mode: A mode argument for the python `open` builtin. Mutually exclusive with
binary_mode.
"""
+ deprecated_conditional(
+ lambda: binary_mode is not None,
+ removal_version='1.16.0.dev2',
+ entity_description='The binary_mode argument in safe_file_dump()',
+ hint_message='Use the mode argument instead!')
if binary_mode is not None and mode is not None:
raise AssertionError('Only one of `binary_mode` and `mode` may be specified.')
+ deprecated_conditional(
+ lambda: mode is None,
+ removal_version='1.16.0.dev2',
+ entity_description='Not specifying mode explicitly in safe_file_dump()',
+ hint_message="Function will default to unicode ('w') when pants migrates to python 3!")
if mode is None:
if binary_mode is False:
mode = 'w'
@@ -129,7 +142,7 @@ def safe_file_dump(filename, payload, binary_mode=None, mode=None):
f.write(payload)
-def maybe_read_file(filename, binary_mode=True):
+def maybe_read_file(filename, binary_mode=None):
"""Read and return the contents of a file in a single file.read().
:param string filename: The filename of the file to read.
@@ -137,13 +150,22 @@ def maybe_read_file(filename, binary_mode=True):
:returns: The contents of the file, or opening the file fails for any reason
:rtype: string
"""
+ # TODO(#7121): Default binary_mode=False after the python 3 switchover!
+ deprecated_conditional(
+ lambda: binary_mode is None,
+ removal_version='1.16.0.dev2',
+ entity_description='Not specifying binary_mode explicitly in maybe_read_file()',
+ hint_message='Function will default to unicode when pants migrates to python 3!')
+ if binary_mode is None:
+ binary_mode = True
+
try:
return read_file(filename, binary_mode=binary_mode)
except IOError:
return None
-def read_file(filename, binary_mode=True):
+def read_file(filename, binary_mode=None):
"""Read and return the contents of a file in a single file.read().
:param string filename: The filename of the file to read.
@@ -151,6 +173,15 @@ def read_file(filename, binary_mode=True):
:returns: The contents of the file.
:rtype: string
"""
+ # TODO(#7121): Default binary_mode=False after the python 3 switchover!
+ deprecated_conditional(
+ lambda: binary_mode is None,
+ removal_version='1.16.0.dev2',
+ entity_description='Not specifying binary_mode explicitly in read_file()',
+ hint_message='Function will default to unicode when pants migrates to python 3!')
+ if binary_mode is None:
+ binary_mode = True
+
mode = 'rb' if binary_mode else 'r'
with open(filename, mode) as f:
return f.read()
diff --git a/src/python/pants/util/objects.py b/src/python/pants/util/objects.py
index ddc51f388fb..f66ebc5d717 100644
--- a/src/python/pants/util/objects.py
+++ b/src/python/pants/util/objects.py
@@ -4,19 +4,33 @@
from __future__ import absolute_import, division, print_function, unicode_literals
-import sys
from abc import abstractmethod
-from builtins import object, zip
+from builtins import zip
from collections import namedtuple
-from future.utils import PY2
from twitter.common.collections import OrderedSet
-from pants.util.collections_abc_backport import OrderedDict
-from pants.util.memo import memoized, memoized_classproperty
-from pants.util.meta import AbstractClass
+from pants.util.collections_abc_backport import Iterable, OrderedDict
+from pants.util.memo import memoized_classproperty
+from pants.util.meta import AbstractClass, classproperty
+class TypeCheckError(TypeError):
+
+ # TODO: make some wrapper exception class to make this kind of
+ # prefixing easy (maybe using a class field format string?).
+ def __init__(self, type_name, msg, *args, **kwargs):
+ formatted_msg = "type check error in class {}: {}".format(type_name, msg)
+ super(TypeCheckError, self).__init__(formatted_msg, *args, **kwargs)
+
+
+class TypedDatatypeInstanceConstructionError(TypeCheckError):
+ """Raised when a datatype()'s fields fail a type check upon construction."""
+
+
+# TODO: create a mixin which declares/implements the methods we define on the generated class in
+# datatype() and enum() to decouple the class's logic from the way it's created. This may also make
+# migration to python 3 dataclasses as per #7074 easier.
def datatype(field_decls, superclass_name=None, **kwargs):
"""A wrapper for `namedtuple` that accounts for the type of the object in equality.
@@ -58,9 +72,20 @@ def datatype(field_decls, superclass_name=None, **kwargs):
namedtuple_cls = namedtuple(superclass_name, field_names, **kwargs)
class DataType(namedtuple_cls):
+ @classproperty
+ def type_check_error_type(cls):
+ """The exception type to use in make_type_error()."""
+ return TypedDatatypeInstanceConstructionError
+
@classmethod
def make_type_error(cls, msg, *args, **kwargs):
- return TypeCheckError(cls.__name__, msg, *args, **kwargs)
+ """A helper method to generate an exception type for type checking errors.
+
+ This method uses `cls.type_check_error_type` to ensure that type checking errors can be caught
+ with a reliable exception type. The type returned by `cls.type_check_error_type` should ensure
+ that the exception messages are prefixed with enough context to be useful and *not* confusing.
+ """
+ return cls.type_check_error_type(cls.__name__, msg, *args, **kwargs)
def __new__(cls, *args, **kwargs):
# TODO: Ideally we could execute this exactly once per `cls` but it should be a
@@ -71,7 +96,8 @@ def __new__(cls, *args, **kwargs):
try:
this_object = super(DataType, cls).__new__(cls, *args, **kwargs)
except TypeError as e:
- raise cls.make_type_error(e)
+ raise cls.make_type_error(
+ "error in namedtuple() base constructor: {}".format(e))
# TODO: Make this kind of exception pattern (filter for errors then display them all at once)
# more ergonomic.
@@ -84,7 +110,9 @@ def __new__(cls, *args, **kwargs):
type_failure_msgs.append(
"field '{}' was invalid: {}".format(field_name, e))
if type_failure_msgs:
- raise cls.make_type_error('\n'.join(type_failure_msgs))
+ raise cls.make_type_error(
+ 'errors type checking constructor arguments:\n{}'
+ .format('\n'.join(type_failure_msgs)))
return this_object
@@ -104,13 +132,16 @@ def __eq__(self, other):
def __ne__(self, other):
return not (self == other)
+ # NB: in Python 3, whenever __eq__ is overridden, __hash__() must also be
+ # explicitly implemented, otherwise Python will raise "unhashable type". See
+ # https://docs.python.org/3/reference/datamodel.html#object.__hash__.
def __hash__(self):
return super(DataType, self).__hash__()
# NB: As datatype is not iterable, we need to override both __iter__ and all of the
# namedtuple methods that expect self to be iterable.
def __iter__(self):
- raise TypeError("'{}' object is not iterable".format(type(self).__name__))
+ raise self.make_type_error("datatype object is not iterable")
def _super_iter(self):
return super(DataType, self).__iter__()
@@ -170,17 +201,37 @@ def __str__(self):
return type(superclass_name.encode('utf-8'), (DataType,), {})
-def enum(field_name, all_values):
+class EnumVariantSelectionError(TypeCheckError):
+ """Raised when an invalid variant for an enum() is constructed or matched against."""
+
+
+def enum(*args):
"""A datatype which can take on a finite set of values. This method is experimental and unstable.
Any enum subclass can be constructed with its create() classmethod. This method will use the first
- element of `all_values` as the enum value if none is specified.
-
- :param field_name: A string used as the field for the datatype. Note that enum does not yet
- support type checking as with datatype.
- :param all_values: An iterable of objects representing all possible values for the enum.
- NB: `all_values` must be a finite, non-empty iterable with unique values!
+ element of `all_values` as the default value, but enum classes can override this behavior by
+ setting `default_value` in the class body.
+
+ NB: Relying on the `field_name` directly is discouraged in favor of using
+ resolve_for_enum_variant() in Python code. The `field_name` argument is exposed to make enum
+ instances more readable when printed, and to allow code in another language using an FFI to
+ reliably extract the value from an enum instance.
+
+ :param string field_name: A string used as the field for the datatype. This positional argument is
+ optional, and defaults to 'value'. Note that `enum()` does not yet
+ support type checking as with `datatype()`.
+ :param Iterable all_values: A nonempty iterable of objects representing all possible values for
+ the enum. This argument must be a finite, non-empty iterable with
+ unique values.
+ :raises: :class:`ValueError`
"""
+ if len(args) == 1:
+ field_name = 'value'
+ all_values, = args
+ elif len(args) == 2:
+ field_name, all_values = args
+ else:
+ raise ValueError("enum() accepts only 1 or 2 args! args = {!r}".format(args))
# This call to list() will eagerly evaluate any `all_values` which would otherwise be lazy, such
# as a generator.
@@ -188,84 +239,141 @@ def enum(field_name, all_values):
# `OrderedSet` maintains the order of the input iterable, but is faster to check membership.
allowed_values_set = OrderedSet(all_values_realized)
- if len(allowed_values_set) < len(all_values_realized):
+ if len(allowed_values_set) == 0:
+ raise ValueError("all_values must be a non-empty iterable!")
+ elif len(allowed_values_set) < len(all_values_realized):
raise ValueError("When converting all_values ({}) to a set, at least one duplicate "
"was detected. The unique elements of all_values were: {}."
- .format(all_values_realized, allowed_values_set))
+ .format(all_values_realized, list(allowed_values_set)))
class ChoiceDatatype(datatype([field_name])):
- allowed_values = allowed_values_set
- default_value = next(iter(allowed_values))
+ default_value = next(iter(allowed_values_set))
+
+ # Overriden from datatype() so providing an invalid variant is catchable as a TypeCheckError,
+ # but more specific.
+ type_check_error_type = EnumVariantSelectionError
@memoized_classproperty
def _singletons(cls):
- """Generate memoized instances of this enum wrapping each of this enum's allowed values."""
- return { value: cls(value) for value in cls.allowed_values }
+ """Generate memoized instances of this enum wrapping each of this enum's allowed values.
+
+ NB: The implementation of enum() should use this property as the source of truth for allowed
+ values and enum instances from those values.
+ """
+ return OrderedDict((value, cls._make_singleton(value)) for value in allowed_values_set)
@classmethod
- def _check_value(cls, value):
- if value not in cls.allowed_values:
- raise cls.make_type_error(
- "Value {!r} for '{}' must be one of: {!r}."
- .format(value, field_name, cls.allowed_values))
+ def _make_singleton(cls, value):
+ """
+ We convert uses of the constructor to call create(), so we then need to go around __new__ to
+ bootstrap singleton creation from datatype()'s __new__.
+ """
+ return super(ChoiceDatatype, cls).__new__(cls, value)
+
+ @classproperty
+ def _allowed_values(cls):
+ """The values provided to the enum() type constructor, for use in error messages."""
+ return list(cls._singletons.keys())
+
+ def __new__(cls, value):
+ """Forward `value` to the .create() factory method.
+
+ The .create() factory method is preferred, but forwarding the constructor like this allows us
+ to use the generated enum type both as a type to check against with isinstance() as well as a
+ function to create instances with. This makes it easy to use as a pants option type.
+ """
+ return cls.create(value)
+
+ # TODO: figure out if this will always trigger on primitives like strings, and what situations
+ # won't call this __eq__ (and therefore won't raise like we want).
+ def __eq__(self, other):
+ """Redefine equality to raise to nudge people to use static pattern matching."""
+ raise self.make_type_error(
+ "enum equality is defined to be an error -- use .resolve_for_enum_variant() instead!")
+ # Redefine the canary so datatype __new__ doesn't raise.
+ __eq__._eq_override_canary = None
+
+ # NB: as noted in datatype(), __hash__ must be explicitly implemented whenever __eq__ is
+ # overridden. See https://docs.python.org/3/reference/datamodel.html#object.__hash__.
+ def __hash__(self):
+ return super(ChoiceDatatype, self).__hash__()
@classmethod
- def create(cls, value=None):
+ def create(cls, *args, **kwargs):
+ """Create an instance of this enum, using the default value if specified.
+
+ :param value: Use this as the enum value. If `value` is an instance of this class, return it,
+ otherwise it is checked against the enum's allowed values. This positional
+ argument is optional, and if not specified, `cls.default_value` is used.
+ :param bool none_is_default: If this is True, a None `value` is converted into
+ `cls.default_value` before being checked against the enum's
+ allowed values.
+ """
+ none_is_default = kwargs.pop('none_is_default', False)
+ if kwargs:
+ raise ValueError('unrecognized keyword arguments for {}.create(): {!r}'
+ .format(cls.__name__, kwargs))
+
+ if len(args) == 0:
+ value = cls.default_value
+ elif len(args) == 1:
+ value = args[0]
+ if none_is_default and value is None:
+ value = cls.default_value
+ else:
+ raise ValueError('{}.create() accepts 0 or 1 positional args! *args = {!r}'
+ .format(cls.__name__, args))
+
# If we get an instance of this enum class, just return it. This means you can call .create()
- # on None, an allowed value for the enum, or an existing instance of the enum.
+ # on an allowed value for the enum, or an existing instance of the enum.
if isinstance(value, cls):
return value
- # Providing an explicit value that is not None will *not* use the default value!
- if value is None:
- value = cls.default_value
-
- # We actually circumvent the constructor in this method due to the cls._singletons
- # memoized_classproperty, but we want to raise the same error, so we move checking into a
- # common method.
- cls._check_value(value)
-
+ if value not in cls._singletons:
+ raise cls.make_type_error(
+ "Value {!r} for '{}' must be one of: {!r}."
+ .format(value, field_name, cls._allowed_values))
return cls._singletons[value]
- def __new__(cls, *args, **kwargs):
- this_object = super(ChoiceDatatype, cls).__new__(cls, *args, **kwargs)
-
- field_value = getattr(this_object, field_name)
-
- cls._check_value(field_value)
+ def resolve_for_enum_variant(self, mapping):
+ """Return the object in `mapping` with the key corresponding to the enum value.
- return this_object
-
- return ChoiceDatatype
-
-
-class TypedDatatypeClassConstructionError(Exception):
-
- # TODO: make some wrapper exception class to make this kind of
- # prefixing easy (maybe using a class field format string?).
- def __init__(self, type_name, msg, *args, **kwargs):
- full_msg = "error: while trying to generate typed datatype {}: {}".format(
- type_name, msg)
- super(TypedDatatypeClassConstructionError, self).__init__(
- full_msg, *args, **kwargs)
+ `mapping` is a dict mapping enum variant value -> arbitrary object. All variant values must be
+ provided.
+ NB: The objects in `mapping` should be made into lambdas if lazy execution is desired, as this
+ will "evaluate" all of the values in `mapping`.
+ """
+ keys = frozenset(mapping.keys())
+ if keys != frozenset(self._allowed_values):
+ raise self.make_type_error(
+ "pattern matching must have exactly the keys {} (was: {})"
+ .format(self._allowed_values, list(keys)))
+ match_for_variant = mapping[getattr(self, field_name)]
+ return match_for_variant
-class TypedDatatypeInstanceConstructionError(TypeError):
+ @classmethod
+ def iterate_enum_variants(cls):
+ """Iterate over all instances of this enum, in the declared order.
- def __init__(self, type_name, msg, *args, **kwargs):
- full_msg = "error: in constructor of type {}: {}".format(type_name, msg)
- super(TypedDatatypeInstanceConstructionError, self).__init__(
- full_msg, *args, **kwargs)
+ NB: This method is exposed for testing enum variants easily. resolve_for_enum_variant() should
+ be used for performing conditional logic based on an enum instance's value.
+ """
+ # TODO(#7232): use this method to register attributes on the generated type object for each of
+ # the singletons!
+ return cls._singletons.values()
+ return ChoiceDatatype
-class TypeCheckError(TypedDatatypeInstanceConstructionError):
- def __init__(self, type_name, msg, *args, **kwargs):
- formatted_msg = "type check error:\n{}".format(msg)
- super(TypeCheckError, self).__init__(
- type_name, formatted_msg, *args, **kwargs)
+# TODO(#7233): allow usage of the normal register() by using an enum class as the `type` argument!
+def register_enum_option(register, enum_cls, *args, **kwargs):
+ """A helper method for declaring a pants option from an `enum()`."""
+ default_value = kwargs.pop('default', enum_cls.default_value)
+ register(*args, choices=enum_cls._allowed_values, default=default_value, **kwargs)
+# TODO: make these members of the `TypeConstraint` class!
class TypeConstraintError(TypeError):
"""Indicates a :class:`TypeConstraint` violation."""
@@ -273,43 +381,99 @@ class TypeConstraintError(TypeError):
class TypeConstraint(AbstractClass):
"""Represents a type constraint.
- Not intended for direct use; instead, use one of :class:`SuperclassesOf`, :class:`Exact` or
+ Not intended for direct use; instead, use one of :class:`SuperclassesOf`, :class:`Exactly` or
:class:`SubclassesOf`.
"""
- def __init__(self, *types, **kwargs):
+ def __init__(self, description):
"""Creates a type constraint centered around the given types.
The type constraint is satisfied as a whole if satisfied for at least one of the given types.
- :param type *types: The focus of this type constraint.
- :param str description: A description for this constraint if the list of types is too long.
+ :param str description: A concise, readable description of what the type constraint represents.
+ Used directly as the __str__ implementation.
+ """
+ self._description = description
+
+ @abstractmethod
+ def satisfied_by(self, obj):
+ """Return `True` if the given object satisfies this type constraint.
+
+ :rtype: bool
+ """
+
+ def make_type_constraint_error(self, obj, constraint):
+ return TypeConstraintError(
+ "value {!r} (with type {!r}) must satisfy this type constraint: {}."
+ .format(obj, type(obj).__name__, constraint))
+
+ # TODO: disallow overriding this method with some form of mixin/decorator along with datatype
+ # __eq__!
+ def validate_satisfied_by(self, obj):
+ """Return `obj` if the object satisfies this type constraint, or raise.
+
+ :raises: `TypeConstraintError` if `obj` does not satisfy the constraint.
+ """
+
+ if self.satisfied_by(obj):
+ return obj
+
+ raise self.make_type_constraint_error(obj, self)
+
+ def __ne__(self, other):
+ return not (self == other)
+
+ def __str__(self):
+ return self._description
+
+
+class TypeOnlyConstraint(TypeConstraint):
+ """A `TypeConstraint` predicated only on the object's type.
+
+ `TypeConstraint` subclasses may override `.satisfied_by()` to perform arbitrary validation on the
+ object itself -- however, this class implements `.satisfied_by()` with a guarantee that it will
+ only act on the object's `type` via `.satisfied_by_type()`. This kind of type checking is faster
+ and easier to understand than the more complex validation allowed by `.satisfied_by()`.
+ """
+
+ # TODO: make an @abstract_classproperty decorator to do this boilerplate!
+ @classproperty
+ def _variance_symbol(cls):
+ """This is propagated to the the `TypeConstraint` constructor."""
+ raise NotImplementedError('{} must implement the _variance_symbol classproperty!'
+ .format(cls.__name__))
+
+ def __init__(self, *types):
+ """Creates a type constraint based on some logic to match the given types.
+
+ NB: A `TypeOnlyConstraint` implementation should ensure that the type constraint is satisfied as
+ a whole if satisfied for at least one of the given `types`.
+
+ :param type *types: The types this constraint will match in some way.
"""
+
if not types:
raise ValueError('Must supply at least one type')
if any(not isinstance(t, type) for t in types):
raise TypeError('Supplied types must be types. {!r}'.format(types))
- # NB: `types` is converted to tuple here because self.types's docstring says
- # it returns a tuple. Does it matter what type this field is?
+ if len(types) == 1:
+ type_list = types[0].__name__
+ else:
+ type_list = ' or '.join(t.__name__ for t in types)
+ description = '{}({})'.format(type(self).__name__, type_list)
+
+ super(TypeOnlyConstraint, self).__init__(description=description)
+
+ # NB: This is made into a tuple so that we can use self._types in issubclass() and others!
self._types = tuple(types)
- self._desc = kwargs.get('description', None)
+ # TODO(#7114): remove this after the engine is converted to use `TypeId` instead of
+ # `TypeConstraint`!
@property
def types(self):
- """Return the subject types of this type constraint.
-
- :type: tuple of type
- """
return self._types
- def satisfied_by(self, obj):
- """Return `True` if the given object satisfies this type constraint.
-
- :rtype: bool
- """
- return self.satisfied_by_type(type(obj))
-
@abstractmethod
def satisfied_by_type(self, obj_type):
"""Return `True` if the given object satisfies this type constraint.
@@ -317,18 +481,8 @@ def satisfied_by_type(self, obj_type):
:rtype: bool
"""
- def validate_satisfied_by(self, obj):
- """Return `obj` if the object satisfies this type constraint, or raise.
-
- :raises: `TypeConstraintError` if `obj` does not satisfy the constraint.
- """
-
- if self.satisfied_by(obj):
- return obj
-
- raise TypeConstraintError(
- "value {!r} (with type {!r}) must satisfy this type constraint: {!r}."
- .format(obj, type(obj).__name__, self))
+ def satisfied_by(self, obj):
+ return self.satisfied_by_type(type(obj))
def __hash__(self):
return hash((type(self), self._types))
@@ -336,44 +490,23 @@ def __hash__(self):
def __eq__(self, other):
return type(self) == type(other) and self._types == other._types
- def __ne__(self, other):
- return not (self == other)
-
- def __str__(self):
- if self._desc:
- constrained_type = '({})'.format(self._desc)
- else:
- if len(self._types) == 1:
- constrained_type = self._types[0].__name__
- else:
- constrained_type = '({})'.format(', '.join(t.__name__ for t in self._types))
- return '{variance_symbol}{constrained_type}'.format(variance_symbol=self._variance_symbol,
- constrained_type=constrained_type)
-
def __repr__(self):
- if self._desc:
- constrained_type = self._desc
- else:
- constrained_type = ', '.join(t.__name__ for t in self._types)
+ constrained_type = ', '.join(t.__name__ for t in self._types)
return ('{type_constraint_type}({constrained_type})'
.format(type_constraint_type=type(self).__name__,
- constrained_type=constrained_type))
+ constrained_type=constrained_type))
-class SuperclassesOf(TypeConstraint):
+class SuperclassesOf(TypeOnlyConstraint):
"""Objects of the exact type as well as any super-types are allowed."""
- _variance_symbol = '-'
-
def satisfied_by_type(self, obj_type):
return any(issubclass(t, obj_type) for t in self._types)
-class Exactly(TypeConstraint):
+class Exactly(TypeOnlyConstraint):
"""Only objects of the exact type are allowed."""
- _variance_symbol = '='
-
def satisfied_by_type(self, obj_type):
return obj_type in self._types
@@ -384,41 +517,66 @@ def graph_str(self):
return repr(self)
-class SubclassesOf(TypeConstraint):
+class SubclassesOf(TypeOnlyConstraint):
"""Objects of the exact type as well as any sub-types are allowed."""
- _variance_symbol = '+'
-
def satisfied_by_type(self, obj_type):
return issubclass(obj_type, self._types)
-class Collection(object):
- """Constructs classes representing collections of objects of a particular type.
+class TypedCollection(TypeConstraint):
+ """A `TypeConstraint` which accepts a TypeOnlyConstraint and validates a collection."""
- The produced class will expose its values under a field named dependencies - this is a stable API
- which may be consumed e.g. over FFI from the engine.
+ _iterable_constraint = SubclassesOf(Iterable)
- Python consumers of a Collection should prefer to use its standard iteration API.
- """
- # TODO: could we check that the input is iterable in the ctor?
-
- @classmethod
- @memoized
- def of(cls, *element_types):
- union = '|'.join(element_type.__name__ for element_type in element_types)
- type_name = '{}.of({})'.format(cls.__name__, union)
- if PY2:
- type_name = type_name.encode('utf-8')
- # TODO: could we allow type checking in the datatype() invocation here?
- supertypes = (cls, datatype(['dependencies'], superclass_name='Collection'))
- properties = {'element_types': element_types}
- collection_of_type = type(type_name, supertypes, properties)
-
- # Expose the custom class type at the module level to be pickle compatible.
- setattr(sys.modules[cls.__module__], type_name, collection_of_type)
-
- return collection_of_type
-
- def __iter__(self):
- return iter(self.dependencies)
+ def __init__(self, constraint):
+ """Create a `TypeConstraint` which validates each member of a collection with `constraint`.
+
+ :param TypeOnlyConstraint constraint: the `TypeConstraint` to apply to each element. This is
+ currently required to be a `TypeOnlyConstraint` to avoid
+ complex prototypal type relationships.
+ """
+
+ if not isinstance(constraint, TypeOnlyConstraint):
+ raise TypeError("constraint for collection must be a {}! was: {}"
+ .format(TypeOnlyConstraint.__name__, constraint))
+
+ description = '{}({})'.format(type(self).__name__, constraint)
+
+ self._constraint = constraint
+
+ super(TypedCollection, self).__init__(description=description)
+
+ # TODO: consider making this a private method of TypeConstraint, as it now duplicates the logic in
+ # self.validate_satisfied_by()!
+ def satisfied_by(self, obj):
+ if self._iterable_constraint.satisfied_by(obj):
+ return all(self._constraint.satisfied_by(el) for el in obj)
+ return False
+
+ def make_collection_type_constraint_error(self, base_obj, el):
+ base_error = self.make_type_constraint_error(el, self._constraint)
+ return TypeConstraintError("in wrapped constraint {} matching iterable object {}: {}"
+ .format(self, base_obj, base_error))
+
+ def validate_satisfied_by(self, obj):
+ if self._iterable_constraint.satisfied_by(obj):
+ for el in obj:
+ if not self._constraint.satisfied_by(el):
+ raise self.make_collection_type_constraint_error(obj, el)
+ return obj
+
+ base_iterable_error = self.make_type_constraint_error(obj, self._iterable_constraint)
+ raise TypeConstraintError(
+ "in wrapped constraint {}: {}".format(self, base_iterable_error))
+
+ def __hash__(self):
+ return hash((type(self), self._constraint))
+
+ def __eq__(self, other):
+ return type(self) == type(other) and self._constraint == other._constraint
+
+ def __repr__(self):
+ return ('{type_constraint_type}({constraint!r})'
+ .format(type_constraint_type=type(self).__name__,
+ constraint=self._constraint))
diff --git a/src/rust/engine/Cargo.lock b/src/rust/engine/Cargo.lock
index 4c13841e6d2..2648f2b9e20 100644
--- a/src/rust/engine/Cargo.lock
+++ b/src/rust/engine/Cargo.lock
@@ -98,7 +98,6 @@ dependencies = [
"grpcio 0.3.0 (git+https://github.com/pantsbuild/grpc-rs.git?rev=4dfafe9355dc996d7d0702e7386a6fedcd9734c0)",
"grpcio-compiler 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
"hashing 0.0.1",
- "log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
"prost 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"prost-derive 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"prost-types 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -1280,28 +1279,18 @@ dependencies = [
"fs 0.0.1",
"futures 0.1.25 (registry+https://github.com/rust-lang/crates.io-index)",
"futures-timer 0.1.1 (git+https://github.com/pantsbuild/futures-timer?rev=0b747e565309a58537807ab43c674d8951f9e5a0)",
- "h2 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)",
+ "grpcio 0.3.0 (git+https://github.com/pantsbuild/grpc-rs.git?rev=4dfafe9355dc996d7d0702e7386a6fedcd9734c0)",
"hashing 0.0.1",
- "http 0.1.14 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
"mock 0.0.1",
- "parking_lot 0.6.4 (registry+https://github.com/rust-lang/crates.io-index)",
- "prost 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "prost-types 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"protobuf 2.0.5 (registry+https://github.com/rust-lang/crates.io-index)",
"resettable 0.0.1",
"sha2 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)",
"tempfile 3.0.5 (registry+https://github.com/rust-lang/crates.io-index)",
"testutil 0.0.1",
"time 0.1.40 (registry+https://github.com/rust-lang/crates.io-index)",
- "tokio 0.1.14 (registry+https://github.com/rust-lang/crates.io-index)",
"tokio-codec 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "tokio-connect 0.1.0 (git+https://github.com/pantsbuild/tokio-connect?rev=f7ad1ca437973d6e24037ac6f7d5ef1013833c0b)",
"tokio-process 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)",
- "tower-grpc 0.1.0 (git+https://github.com/pantsbuild/tower-grpc.git?rev=ef19f2e1715f415ecb699e8f17f5845ad2b45daf)",
- "tower-h2 0.1.0 (git+https://github.com/pantsbuild/tower-h2?rev=44b0efb4983b769283efd5b2a3bc3decbf7c33de)",
- "tower-http 0.1.0 (git+https://github.com/pantsbuild/tower-http?rev=56049ee7f31d4f6c549f5d1d5fbbfd7937df3d00)",
- "tower-util 0.1.0 (git+https://github.com/pantsbuild/tower?rev=7b61c1fc1992c1df684fd3f179644ef0ca9bfa4c)",
]
[[package]]
@@ -1316,7 +1305,6 @@ dependencies = [
"hashing 0.0.1",
"process_execution 0.0.1",
"resettable 0.0.1",
- "tokio 0.1.14 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
diff --git a/src/rust/engine/fs/src/snapshot.rs b/src/rust/engine/fs/src/snapshot.rs
index 1badce3abc7..3701fd2278e 100644
--- a/src/rust/engine/fs/src/snapshot.rs
+++ b/src/rust/engine/fs/src/snapshot.rs
@@ -3,7 +3,7 @@
use crate::glob_matching::GlobMatching;
use crate::pool::ResettablePool;
-use crate::{File, PathGlobs, PathStat, PosixFS, Store};
+use crate::{Dir, File, PathGlobs, PathStat, PosixFS, Store};
use bazel_protos;
use boxfuture::{try_future, BoxFuture, Boxable};
use futures::future::{self, join_all};
@@ -44,27 +44,57 @@ impl Snapshot {
>(
store: Store,
file_digester: &S,
- path_stats: Vec,
+ mut path_stats: Vec,
) -> BoxFuture {
- let mut sorted_path_stats = path_stats.clone();
- sorted_path_stats.sort_by(|a, b| a.path().cmp(b.path()));
+ path_stats.sort_by(|a, b| a.path().cmp(b.path()));
// The helper assumes that if a Path has multiple children, it must be a directory.
// Proactively error if we run into identically named files, because otherwise we will treat
// them like empty directories.
- sorted_path_stats.dedup_by(|a, b| a.path() == b.path());
- if sorted_path_stats.len() != path_stats.len() {
+ let pre_dedupe_len = path_stats.len();
+ path_stats.dedup_by(|a, b| a.path() == b.path());
+ if path_stats.len() != pre_dedupe_len {
return future::err(format!(
"Snapshots must be constructed from unique path stats; got duplicates in {:?}",
path_stats
))
.to_boxed();
}
- Snapshot::ingest_directory_from_sorted_path_stats(store, file_digester, &sorted_path_stats)
+ Snapshot::ingest_directory_from_sorted_path_stats(store, file_digester, &path_stats)
.map(|digest| Snapshot { digest, path_stats })
.to_boxed()
}
+ pub fn from_digest(store: Store, digest: Digest) -> BoxFuture {
+ store
+ .walk(digest, |_, path_so_far, _, directory| {
+ let mut path_stats = Vec::new();
+ path_stats.extend(directory.get_directories().iter().map(move |dir_node| {
+ let path = path_so_far.join(dir_node.get_name());
+ PathStat::dir(path.clone(), Dir(path))
+ }));
+ path_stats.extend(directory.get_files().iter().map(move |file_node| {
+ let path = path_so_far.join(file_node.get_name());
+ PathStat::file(
+ path.clone(),
+ File {
+ path,
+ is_executable: file_node.is_executable,
+ },
+ )
+ }));
+ future::ok(path_stats).to_boxed()
+ })
+ .map(move |path_stats_per_directory| {
+ let mut path_stats =
+ Iterator::flatten(path_stats_per_directory.into_iter().map(|v| v.into_iter()))
+ .collect::>();
+ path_stats.sort_by(|l, r| l.path().cmp(&r.path()));
+ Snapshot { digest, path_stats }
+ })
+ .to_boxed()
+ }
+
pub fn digest_from_path_stats<
S: StoreFileByDigest + Sized + Clone,
Error: fmt::Debug + 'static + Send,
@@ -312,29 +342,44 @@ impl Snapshot {
.to_boxed()
}
- pub fn capture_snapshot_from_arbitrary_root>(
+ ///
+ /// Capture a Snapshot of a presumed-immutable piece of the filesystem.
+ ///
+ /// Note that we don't use a Graph here, and don't cache any intermediate steps, we just place
+ /// the resultant Snapshot into the store and return it. This is important, because we're reading
+ /// things from arbitrary filepaths which we don't want to cache in the graph, as we don't watch
+ /// them for changes.
+ ///
+ /// If the `digest_hint` is given, first attempt to load the Snapshot using that Digest, and only
+ /// fall back to actually walking the filesystem if we don't have it (either due to garbage
+ /// collection or Digest-oblivious legacy caching).
+ ///
+ pub fn capture_snapshot_from_arbitrary_root + Send + 'static>(
store: Store,
fs_pool: Arc,
root_path: P,
path_globs: PathGlobs,
+ digest_hint: Option,
) -> BoxFuture {
- // Note that we don't use a Graph here, and don't cache any intermediate steps, we just place
- // the resultant Snapshot into the store and return it. This is important, because we're reading
- // things from arbitrary filepaths which we don't want to cache in the graph, as we don't watch
- // them for changes.
- // We assume that this Snapshot is of an immutable piece of the filesystem.
-
- let posix_fs = Arc::new(try_future!(PosixFS::new(root_path, fs_pool, &[])));
-
- posix_fs
- .expand(path_globs)
- .map_err(|err| format!("Error expanding globs: {:?}", err))
- .and_then(|path_stats| {
- Snapshot::from_path_stats(
- store.clone(),
- &OneOffStoreFileByDigest::new(store, posix_fs),
- path_stats,
- )
+ // Attempt to use the digest hint to load a Snapshot without expanding the globs; otherwise,
+ // expand the globs to capture a Snapshot.
+ let store2 = store.clone();
+ future::result(digest_hint.ok_or_else(|| "No digest hint provided.".to_string()))
+ .and_then(move |digest| Snapshot::from_digest(store, digest))
+ .or_else(|_| {
+ let posix_fs = Arc::new(try_future!(PosixFS::new(root_path, fs_pool, &[])));
+
+ posix_fs
+ .expand(path_globs)
+ .map_err(|err| format!("Error expanding globs: {:?}", err))
+ .and_then(|path_stats| {
+ Snapshot::from_path_stats(
+ store2.clone(),
+ &OneOffStoreFileByDigest::new(store2, posix_fs),
+ path_stats,
+ )
+ })
+ .to_boxed()
})
.to_boxed()
}
@@ -507,6 +552,27 @@ mod tests {
);
}
+ #[test]
+ fn snapshot_from_digest() {
+ let (store, dir, posix_fs, digester) = setup();
+
+ let cats = PathBuf::from("cats");
+ let roland = cats.join("roland");
+ std::fs::create_dir_all(&dir.path().join(cats)).unwrap();
+ make_file(&dir.path().join(&roland), STR.as_bytes(), 0o600);
+
+ let path_stats = expand_all_sorted(posix_fs);
+ let expected_snapshot = Snapshot::from_path_stats(store.clone(), &digester, path_stats.clone())
+ .wait()
+ .unwrap();
+ assert_eq!(
+ expected_snapshot,
+ Snapshot::from_digest(store, expected_snapshot.digest)
+ .wait()
+ .unwrap(),
+ );
+ }
+
#[test]
fn snapshot_recursive_directories_including_empty() {
let (store, dir, posix_fs, digester) = setup();
@@ -535,7 +601,7 @@ mod tests {
.unwrap(),
232,
),
- path_stats: unsorted_path_stats,
+ path_stats: sorted_path_stats,
}
);
}
diff --git a/src/rust/engine/fs/src/store.rs b/src/rust/engine/fs/src/store.rs
index 1e1f5a2fbcf..412be087044 100644
--- a/src/rust/engine/fs/src/store.rs
+++ b/src/rust/engine/fs/src/store.rs
@@ -461,51 +461,22 @@ impl Store {
}
pub fn expand_directory(&self, digest: Digest) -> BoxFuture, String> {
- let accumulator = Arc::new(Mutex::new(HashMap::new()));
-
- self
- .expand_directory_helper(digest, accumulator.clone())
- .map(|()| {
- Arc::try_unwrap(accumulator)
- .expect("Arc should have been unwrappable")
- .into_inner()
- })
- .to_boxed()
- }
-
- fn expand_directory_helper(
- &self,
- digest: Digest,
- accumulator: Arc>>,
- ) -> BoxFuture<(), String> {
- let store = self.clone();
self
- .load_directory(digest)
- .and_then(move |maybe_directory| match maybe_directory {
- Some(directory) => {
- {
- let mut accumulator = accumulator.lock();
- accumulator.insert(digest, EntryType::Directory);
- for file in directory.get_files() {
- accumulator.insert(try_future!(file.get_digest().into()), EntryType::File);
- }
- }
- future::join_all(
- directory
- .get_directories()
- .iter()
- .map(move |subdir| {
- store.clone().expand_directory_helper(
- try_future!(subdir.get_digest().into()),
- accumulator.clone(),
- )
- })
- .collect::>(),
- )
- .map(|_| ())
- .to_boxed()
+ .walk(digest, |_, _, digest, directory| {
+ let mut digest_types = Vec::new();
+ digest_types.push((digest, EntryType::Directory));
+ for file in directory.get_files() {
+ digest_types.push((try_future!(file.get_digest().into()), EntryType::File));
}
- None => future::err(format!("Could not expand unknown directory: {:?}", digest)).to_boxed(),
+ future::ok(digest_types).to_boxed()
+ })
+ .map(|digest_pairs_per_directory| {
+ Iterator::flatten(
+ digest_pairs_per_directory
+ .into_iter()
+ .map(|v| v.into_iter()),
+ )
+ .collect()
})
.to_boxed()
}
@@ -579,78 +550,124 @@ impl Store {
}
// Returns files sorted by their path.
- pub fn contents_for_directory(
- &self,
- directory: &bazel_protos::remote_execution::Directory,
- ) -> BoxFuture, String> {
- let accumulator = Arc::new(Mutex::new(HashMap::new()));
+ pub fn contents_for_directory(&self, digest: Digest) -> BoxFuture, String> {
self
- .contents_for_directory_helper(directory, PathBuf::new(), accumulator.clone())
- .map(|()| {
- let map = Arc::try_unwrap(accumulator).unwrap().into_inner();
- let mut vec: Vec = map
- .into_iter()
- .map(|(path, content)| FileContent { path, content })
- .collect();
+ .walk(digest, |store, path_so_far, _, directory| {
+ future::join_all(
+ directory
+ .get_files()
+ .iter()
+ .map(move |file_node| {
+ let path = path_so_far.join(file_node.get_name());
+ store
+ .load_file_bytes_with(try_future!(file_node.get_digest().into()), |b| b)
+ .and_then(move |maybe_bytes| {
+ maybe_bytes
+ .ok_or_else(|| format!("Couldn't find file contents for {:?}", path))
+ .map(|content| FileContent { path, content })
+ })
+ .to_boxed()
+ })
+ .collect::>(),
+ )
+ .to_boxed()
+ })
+ .map(|file_contents_per_directory| {
+ let mut vec = Iterator::flatten(
+ file_contents_per_directory
+ .into_iter()
+ .map(|v| v.into_iter()),
+ )
+ .collect::>();
vec.sort_by(|l, r| l.path.cmp(&r.path));
vec
})
.to_boxed()
}
- // Assumes that all fingerprints it encounters are valid.
- fn contents_for_directory_helper(
+ ///
+ /// Given the Digest for a Directory, recursively walk the Directory, calling the given function
+ /// with the path so far, and the new Directory.
+ ///
+ /// The recursive walk will proceed concurrently, so if order matters, a caller should sort the
+ /// output after the call.
+ ///
+ pub fn walk<
+ T: Send + 'static,
+ F: Fn(
+ &Store,
+ &PathBuf,
+ Digest,
+ &bazel_protos::remote_execution::Directory,
+ ) -> BoxFuture
+ + Send
+ + Sync
+ + 'static,
+ >(
&self,
- directory: &bazel_protos::remote_execution::Directory,
+ digest: Digest,
+ f: F,
+ ) -> BoxFuture, String> {
+ let f = Arc::new(f);
+ let accumulator = Arc::new(Mutex::new(Vec::new()));
+ self
+ .walk_helper(digest, PathBuf::new(), f, accumulator.clone())
+ .map(|()| {
+ Arc::try_unwrap(accumulator)
+ .unwrap_or_else(|_| panic!("walk_helper violated its contract."))
+ .into_inner()
+ })
+ .to_boxed()
+ }
+
+ fn walk_helper<
+ T: Send + 'static,
+ F: Fn(
+ &Store,
+ &PathBuf,
+ Digest,
+ &bazel_protos::remote_execution::Directory,
+ ) -> BoxFuture
+ + Send
+ + Sync
+ + 'static,
+ >(
+ &self,
+ digest: Digest,
path_so_far: PathBuf,
- contents_wrapped: Arc>>,
+ f: Arc,
+ accumulator: Arc>>,
) -> BoxFuture<(), String> {
- let contents_wrapped_copy = contents_wrapped.clone();
- let path_so_far_copy = path_so_far.clone();
- let store_copy = self.clone();
- let file_futures = future::join_all(
- directory
- .get_files()
- .iter()
- .map(move |file_node| {
- let path = path_so_far_copy.join(file_node.get_name());
- let contents_wrapped_copy = contents_wrapped_copy.clone();
- store_copy
- .load_file_bytes_with(try_future!(file_node.get_digest().into()), |b| b)
- .and_then(move |maybe_bytes| {
- maybe_bytes
- .ok_or_else(|| format!("Couldn't find file contents for {:?}", path))
- .map(move |bytes| {
- let mut contents = contents_wrapped_copy.lock();
- contents.insert(path, bytes);
- })
- })
- .to_boxed()
- })
- .collect::>(),
- );
let store = self.clone();
- let dir_futures = future::join_all(
- directory
- .get_directories()
- .iter()
- .map(move |dir_node| {
- let digest = try_future!(dir_node.get_digest().into());
- let path = path_so_far.join(dir_node.get_name());
- let store = store.clone();
- let contents_wrapped = contents_wrapped.clone();
- store
- .load_directory(digest)
- .and_then(move |maybe_dir| {
- maybe_dir
- .ok_or_else(|| format!("Could not find sub-directory with digest {:?}", digest))
+ self
+ .load_directory(digest)
+ .and_then(move |maybe_directory| match maybe_directory {
+ Some(directory) => {
+ let result_for_directory = f(&store, &path_so_far, digest, &directory);
+ result_for_directory
+ .and_then(move |r| {
+ {
+ let mut accumulator = accumulator.lock();
+ accumulator.push(r);
+ }
+ future::join_all(
+ directory
+ .get_directories()
+ .iter()
+ .map(move |dir_node| {
+ let subdir_digest = try_future!(dir_node.get_digest().into());
+ let path = path_so_far.join(dir_node.get_name());
+ store.walk_helper(subdir_digest, path, f.clone(), accumulator.clone())
+ })
+ .collect::>(),
+ )
+ .map(|_| ())
})
- .and_then(move |dir| store.contents_for_directory_helper(&dir, path, contents_wrapped))
.to_boxed()
- })
- .collect::>(),
- );
- file_futures.join(dir_futures).map(|(_, _)| ()).to_boxed()
+ }
+ None => future::err(format!("Could not walk unknown directory: {:?}", digest)).to_boxed(),
+ })
+ .to_boxed()
}
}
@@ -3501,7 +3518,7 @@ mod tests {
let store = new_local_store(store_dir.path());
let file_contents = store
- .contents_for_directory(&TestDirectory::empty().directory())
+ .contents_for_directory(TestDirectory::empty().digest())
.wait()
.expect("Getting FileContents");
@@ -3535,7 +3552,7 @@ mod tests {
.expect("Error saving catnip file bytes");
let file_contents = store
- .contents_for_directory(&recursive_testdir.directory())
+ .contents_for_directory(recursive_testdir.digest())
.wait()
.expect("Getting FileContents");
diff --git a/src/rust/engine/graph/src/entry.rs b/src/rust/engine/graph/src/entry.rs
index 09bf6ebc056..1abe49f3dfa 100644
--- a/src/rust/engine/graph/src/entry.rs
+++ b/src/rust/engine/graph/src/entry.rs
@@ -574,6 +574,6 @@ impl Entry {
Some(Err(ref x)) => format!("{:?}", x),
None => "".to_string(),
};
- format!("{} == {}", self.node.content().format(), state).replace("\"", "\\\"")
+ format!("{} == {}", self.node.content(), state).replace("\"", "\\\"")
}
}
diff --git a/src/rust/engine/graph/src/lib.rs b/src/rust/engine/graph/src/lib.rs
index f0c554ff2b1..497c1c13db8 100644
--- a/src/rust/engine/graph/src/lib.rs
+++ b/src/rust/engine/graph/src/lib.rs
@@ -370,7 +370,7 @@ impl InnerGraph {
let format = |eid: EntryId, depth: usize, is_last: bool| -> String {
let entry = self.unsafe_entry_for_id(eid);
let indent = " ".repeat(depth);
- let output = format!("{}Computing {}", indent, entry.node().format());
+ let output = format!("{}Computing {}", indent, entry.node());
if is_last {
format!(
"{}\n{} {}",
@@ -430,7 +430,7 @@ impl InnerGraph {
if deps.peek().is_none() {
// If the entry has no running deps, it is a leaf. Emit it.
- res.insert(self.unsafe_entry_for_id(id).node().format(), duration);
+ res.insert(format!("{}", self.unsafe_entry_for_id(id).node()), duration);
if res.len() >= k {
break;
}
@@ -1055,10 +1055,6 @@ mod tests {
}
}
- fn format(&self) -> String {
- format!("{:?}", self)
- }
-
fn digest(_result: Self::Item) -> Option {
None
}
@@ -1068,6 +1064,12 @@ mod tests {
}
}
+ impl std::fmt::Display for TNode {
+ fn fmt(&self, f: &mut std::fmt::Formatter) -> Result<(), std::fmt::Error> {
+ write!(f, "{:?}", self)
+ }
+ }
+
impl TNode {
///
/// Validates the given TNode output. Both node ids and context ids should increase left to
diff --git a/src/rust/engine/graph/src/node.rs b/src/rust/engine/graph/src/node.rs
index b7a34812ecd..19f9f91124b 100644
--- a/src/rust/engine/graph/src/node.rs
+++ b/src/rust/engine/graph/src/node.rs
@@ -1,7 +1,7 @@
// Copyright 2018 Pants project contributors (see CONTRIBUTORS.md).
// Licensed under the Apache License, Version 2.0 (see LICENSE).
-use std::fmt::Debug;
+use std::fmt::{Debug, Display};
use std::hash::Hash;
use boxfuture::BoxFuture;
@@ -21,7 +21,7 @@ pub type EntryId = stable_graph::NodeIndex;
///
/// Note that it is assumed that Nodes are very cheap to clone.
///
-pub trait Node: Clone + Debug + Eq + Hash + Send + 'static {
+pub trait Node: Clone + Debug + Display + Eq + Hash + Send + 'static {
type Context: NodeContext;
type Item: Clone + Debug + Eq + Send + 'static;
@@ -29,9 +29,6 @@ pub trait Node: Clone + Debug + Eq + Hash + Send + 'static {
fn run(self, context: Self::Context) -> BoxFuture;
- // TODO: Use a `Display` bound instead.
- fn format(&self) -> String;
-
///
/// If the given Node output represents an FS operation, returns its Digest.
///
diff --git a/src/rust/engine/process_execution/Cargo.toml b/src/rust/engine/process_execution/Cargo.toml
index affa75eeae1..2bbef270ffb 100644
--- a/src/rust/engine/process_execution/Cargo.toml
+++ b/src/rust/engine/process_execution/Cargo.toml
@@ -13,28 +13,18 @@ bytes = "0.4.5"
digest = "0.8"
fs = { path = "../fs" }
futures = "^0.1.16"
-# TODO: Switch to a release once https://github.com/alexcrichton/futures-timer/pull/11 and https://github.com/alexcrichton/futures-timer/pull/12 merge
-futures-timer = { git = "https://github.com/pantsbuild/futures-timer", rev = "0b747e565309a58537807ab43c674d8951f9e5a0" }
-h2 = "0.1.13"
+grpcio = { git = "https://github.com/pantsbuild/grpc-rs.git", rev = "4dfafe9355dc996d7d0702e7386a6fedcd9734c0", default_features = false, features = ["protobuf-codec"] }
hashing = { path = "../hashing" }
-http = "0.1"
log = "0.4"
-parking_lot = "0.6"
-prost = "0.4"
-prost-types = "0.4"
protobuf = { version = "2.0.4", features = ["with-bytes"] }
resettable = { path = "../resettable" }
sha2 = "0.8"
tempfile = "3"
+# TODO: Switch to a release once https://github.com/alexcrichton/futures-timer/pull/11 and https://github.com/alexcrichton/futures-timer/pull/12 merge
+futures-timer = { git = "https://github.com/pantsbuild/futures-timer", rev = "0b747e565309a58537807ab43c674d8951f9e5a0" }
time = "0.1.40"
-tokio = "0.1.14"
tokio-codec = "0.1"
-tokio-connect = { git = "https://github.com/pantsbuild/tokio-connect.git", rev = "f7ad1ca437973d6e24037ac6f7d5ef1013833c0b" }
tokio-process = "0.2.1"
-tower-grpc = { git = "https://github.com/pantsbuild/tower-grpc.git", rev = "ef19f2e1715f415ecb699e8f17f5845ad2b45daf" }
-tower-h2 = { git = "https://github.com/pantsbuild/tower-h2.git", rev = "44b0efb4983b769283efd5b2a3bc3decbf7c33de" }
-tower-http = { git = "https://github.com/pantsbuild/tower-http.git", rev = "56049ee7f31d4f6c549f5d1d5fbbfd7937df3d00" }
-tower-util = { git = "https://github.com/pantsbuild/tower.git", rev = "7b61c1fc1992c1df684fd3f179644ef0ca9bfa4c" }
[dev-dependencies]
mock = { path = "../testutil/mock" }
diff --git a/src/rust/engine/process_execution/bazel_protos/Cargo.toml b/src/rust/engine/process_execution/bazel_protos/Cargo.toml
index 6890cef290f..7cd044c7471 100644
--- a/src/rust/engine/process_execution/bazel_protos/Cargo.toml
+++ b/src/rust/engine/process_execution/bazel_protos/Cargo.toml
@@ -10,7 +10,6 @@ bytes = "0.4.5"
futures = "^0.1.16"
grpcio = { git = "https://github.com/pantsbuild/grpc-rs.git", rev = "4dfafe9355dc996d7d0702e7386a6fedcd9734c0", default_features = false, features = ["protobuf-codec"] }
hashing = { path = "../../hashing" }
-log = "0.4"
prost = "0.4"
prost-derive = "0.4"
prost-types = "0.4"
diff --git a/src/rust/engine/process_execution/bazel_protos/build.rs b/src/rust/engine/process_execution/bazel_protos/build.rs
index 23c441961f3..0c9e136ad9a 100644
--- a/src/rust/engine/process_execution/bazel_protos/build.rs
+++ b/src/rust/engine/process_execution/bazel_protos/build.rs
@@ -176,11 +176,9 @@ fn generate_for_tower(thirdpartyprotobuf: &Path, out_dir: PathBuf) {
.enable_server(true)
.enable_client(true)
.build(
- &[
- PathBuf::from("build/bazel/remote/execution/v2/remote_execution.proto"),
- PathBuf::from("google/rpc/code.proto"),
- PathBuf::from("google/rpc/error_details.proto"),
- ],
+ &[PathBuf::from(
+ "build/bazel/remote/execution/v2/remote_execution.proto",
+ )],
&std::fs::read_dir(&thirdpartyprotobuf)
.unwrap()
.into_iter()
diff --git a/src/rust/engine/process_execution/bazel_protos/src/conversions.rs b/src/rust/engine/process_execution/bazel_protos/src/conversions.rs
index e46767fb1e6..f017612f321 100644
--- a/src/rust/engine/process_execution/bazel_protos/src/conversions.rs
+++ b/src/rust/engine/process_execution/bazel_protos/src/conversions.rs
@@ -1,7 +1,4 @@
-use bytes::BytesMut;
use hashing;
-use log::error;
-use prost::Message;
impl<'a> From<&'a hashing::Digest> for crate::remote_execution::Digest {
fn from(d: &hashing::Digest) -> Self {
@@ -21,31 +18,19 @@ impl<'a> From<&'a hashing::Digest> for crate::build::bazel::remote::execution::v
}
}
-impl<'a> From<&'a crate::remote_execution::Digest> for Result {
- fn from(d: &crate::remote_execution::Digest) -> Self {
+impl<'a> From<&'a super::remote_execution::Digest> for Result {
+ fn from(d: &super::remote_execution::Digest) -> Self {
hashing::Fingerprint::from_hex_string(d.get_hash())
.map_err(|err| format!("Bad fingerprint in Digest {:?}: {:?}", d.get_hash(), err))
.map(|fingerprint| hashing::Digest(fingerprint, d.get_size_bytes() as usize))
}
}
-impl<'a> From<&'a crate::build::bazel::remote::execution::v2::Digest>
- for Result
-{
- fn from(d: &crate::build::bazel::remote::execution::v2::Digest) -> Self {
- hashing::Fingerprint::from_hex_string(&d.hash)
- .map_err(|err| format!("Bad fingerprint in Digest {:?}: {:?}", d.hash, err))
- .map(|fingerprint| hashing::Digest(fingerprint, d.size_bytes as usize))
- }
-}
-
impl From for crate::operations::Operation {
fn from(op: crate::google::longrunning::Operation) -> Self {
let mut dst = Self::new();
dst.set_name(op.name);
- if let Some(metadata) = op.metadata {
- dst.set_metadata(prost_any_to_gcprio_any(metadata));
- }
+ dst.set_metadata(prost_any_to_gcprio_any(op.metadata.unwrap()));
dst.set_done(op.done);
match op.result {
Some(crate::google::longrunning::operation::Result::Response(response)) => {
@@ -60,87 +45,6 @@ impl From for crate::operations::Operatio
}
}
-// This should only be used in test contexts. It should be deleted when the mock systems use tower.
-impl From
- for crate::build::bazel::remote::execution::v2::ExecuteRequest
-{
- fn from(req: crate::remote_execution::ExecuteRequest) -> Self {
- if req.has_execution_policy() || req.has_results_cache_policy() {
- panic!("Can't convert ExecuteRequest protos with execution policy or results cache policy");
- }
- let digest: Result = req.get_action_digest().into();
- Self {
- action_digest: Some((&digest.expect("Bad digest converting ExecuteRequest proto")).into()),
- instance_name: req.instance_name,
- execution_policy: None,
- results_cache_policy: None,
- skip_cache_lookup: req.skip_cache_lookup,
- }
- }
-}
-
-// This should only be used in test contexts. It should be deleted when the mock systems use tower.
-impl From
- for crate::remote_execution::ExecuteRequest
-{
- fn from(req: crate::build::bazel::remote::execution::v2::ExecuteRequest) -> Self {
- if req.execution_policy.is_some() || req.results_cache_policy.is_some() {
- panic!("Can't convert ExecuteRequest protos with execution policy or results cache policy");
- }
- let digest: Result = (&req
- .action_digest
- .expect("Missing digest converting ExecuteRequest proto"))
- .into();
-
- let mut ret = Self::new();
- ret.set_action_digest((&digest.expect("Bad digest converting ExecuteRequest proto")).into());
- ret.set_instance_name(req.instance_name);
- ret.set_skip_cache_lookup(req.skip_cache_lookup);
- ret
- }
-}
-
-// This should only be used in test contexts. It should be deleted when the mock systems use tower.
-impl Into for crate::google::rpc::Status {
- fn into(self) -> grpcio::RpcStatus {
- let mut buf = BytesMut::with_capacity(self.encoded_len());
- self.encode(&mut buf).unwrap();
- grpcio::RpcStatus {
- status: self.code.into(),
- details: None,
- status_proto_bytes: Some(buf.to_vec()),
- }
- }
-}
-
-// TODO: Use num_enum or similar here when TryInto is stable.
-pub fn code_from_i32(i: i32) -> crate::google::rpc::Code {
- use crate::google::rpc::Code::*;
- match i {
- 0 => Ok,
- 1 => Cancelled,
- 2 => Unknown,
- 3 => InvalidArgument,
- 4 => DeadlineExceeded,
- 5 => NotFound,
- 6 => AlreadyExists,
- 7 => PermissionDenied,
- 8 => ResourceExhausted,
- 9 => FailedPrecondition,
- 10 => Aborted,
- 11 => OutOfRange,
- 12 => Unimplemented,
- 13 => Internal,
- 14 => Unavailable,
- 15 => DataLoss,
- 16 => Unauthenticated,
- _ => {
- error!("Unknown grpc error code: {}, default to Unknown", i);
- Unknown
- }
- }
-}
-
pub fn prost_any_to_gcprio_any(any: prost_types::Any) -> protobuf::well_known_types::Any {
let prost_types::Any { type_url, value } = any;
let mut dst = protobuf::well_known_types::Any::new();
diff --git a/src/rust/engine/process_execution/bazel_protos/src/lib.rs b/src/rust/engine/process_execution/bazel_protos/src/lib.rs
index df86e9d656a..0bfd0d1bcae 100644
--- a/src/rust/engine/process_execution/bazel_protos/src/lib.rs
+++ b/src/rust/engine/process_execution/bazel_protos/src/lib.rs
@@ -13,6 +13,5 @@ mod gen_for_tower;
pub use crate::gen_for_tower::*;
mod conversions;
-pub use crate::conversions::code_from_i32;
mod verification;
pub use crate::verification::verify_directory_canonical;
diff --git a/src/rust/engine/process_execution/src/remote.rs b/src/rust/engine/process_execution/src/remote.rs
index b0272cc5828..bebf1ee61f0 100644
--- a/src/rust/engine/process_execution/src/remote.rs
+++ b/src/rust/engine/process_execution/src/remote.rs
@@ -1,5 +1,7 @@
use std::collections::HashMap;
+use std::mem::drop;
use std::path::PathBuf;
+use std::sync::Arc;
use std::time::{Duration, Instant};
use bazel_protos;
@@ -9,11 +11,10 @@ use digest::{Digest as DigestTrait, FixedOutput};
use fs::{self, File, PathStat, Store};
use futures::{future, Future, Stream};
use futures_timer::Delay;
+use grpcio;
use hashing::{Digest, Fingerprint};
use log::{debug, trace, warn};
-use parking_lot::Mutex;
-use prost::Message;
-use protobuf::{self, Message as GrpcioMessage, ProtobufEnum};
+use protobuf::{self, Message, ProtobufEnum};
use sha2::Sha256;
use time;
@@ -21,36 +22,26 @@ use super::{ExecuteProcessRequest, ExecutionStats, FallibleExecuteProcessResult}
use std;
use std::cmp::min;
-use std::net::SocketAddr;
-use std::net::ToSocketAddrs;
-use tokio::executor::DefaultExecutor;
-use tokio::net::tcp::{ConnectFuture, TcpStream};
-use tower_grpc::Request;
-use tower_h2::client;
-use tower_util::MakeService;
-
// Environment variable which is exclusively used for cache key invalidation.
// This may be not specified in an ExecuteProcessRequest, and may be populated only by the
// CommandRunner.
const CACHE_KEY_GEN_VERSION_ENV_VAR_NAME: &str = "PANTS_CACHE_KEY_GEN_VERSION";
-type Connection = tower_http::add_origin::AddOrigin<
- tower_h2::client::Connection,
->;
-
-struct Clients {
- execution_client:
- Mutex>,
- operations_client: Mutex>,
+#[derive(Debug)]
+enum OperationOrStatus {
+ Operation(bazel_protos::operations::Operation),
+ Status(bazel_protos::status::Status),
}
#[derive(Clone)]
-#[allow(clippy::type_complexity)]
pub struct CommandRunner {
cache_key_gen_version: Option,
instance_name: Option,
authorization_header: Option,
- clients: futures::future::Shared>,
+ channel: grpcio::Channel,
+ env: Arc,
+ execution_client: Arc,
+ operations_client: Arc,
store: Store,
futures_timer_thread: resettable::Resettable,
}
@@ -81,36 +72,35 @@ impl CommandRunner {
// behavior.
fn oneshot_execute(
&self,
- execute_request: bazel_protos::build::bazel::remote::execution::v2::ExecuteRequest,
- ) -> impl Future- {
- let command_runner = self.clone();
- self
- .clients
- .clone()
- .map_err(|err| format!("Error getting execution_client: {}", err))
- .and_then(move |clients| {
- clients
- .execution_client
- .lock()
- .execute(command_runner.make_request(execute_request))
- .map_err(towergrpcerror_to_string)
- .and_then(|response_stream| {
- response_stream
- .into_inner()
- .take(1)
- .into_future()
- .map_err(|err| {
- format!(
- "Error getting response from remote process execution {:?}",
- err
- )
- })
- .and_then(|(resp, stream)| {
- std::mem::drop(stream);
- resp.ok_or_else(|| "Didn't get response from remote process execution".to_owned())
- })
- })
+ execute_request: &Arc,
+ ) -> BoxFuture {
+ let stream = try_future!(self
+ .execution_client
+ .execute_opt(&execute_request, self.call_option())
+ .map_err(rpcerror_to_string));
+ stream
+ .take(1)
+ .into_future()
+ // If there was a response, drop the _stream to disconnect so that the server doesn't keep
+ // the connection alive and continue sending on it.
+ .map(|(maybe_operation, stream)| {
+ drop(stream);
+ maybe_operation
+ })
+ // If there was an error, drop the _stream to disconnect so that the server doesn't keep the
+ // connection alive and continue sending on it.
+ .map_err(|(error, stream)| {
+ drop(stream);
+ error
+ })
+ .then(|maybe_operation_result| match maybe_operation_result {
+ Ok(Some(operation)) => Ok(OperationOrStatus::Operation(operation)),
+ Ok(None) => {
+ Err("Didn't get proper stream response from server during remote execution".to_owned())
+ }
+ Err(err) => rpcerror_to_status_or_string(err).map(OperationOrStatus::Status),
})
+ .to_boxed()
}
}
@@ -135,7 +125,7 @@ impl super::CommandRunner for CommandRunner {
/// TODO: Request jdk_home be created if set.
///
fn run(&self, req: ExecuteProcessRequest) -> BoxFuture {
- let clients = self.clients.clone();
+ let operations_client = self.operations_client.clone();
let store = self.store.clone();
let execute_request_result =
@@ -154,6 +144,8 @@ impl super::CommandRunner for CommandRunner {
Ok((action, command, execute_request)) => {
let command_runner = self.clone();
let command_runner2 = self.clone();
+ let command_runner3 = self.clone();
+ let execute_request = Arc::new(execute_request);
let execute_request2 = execute_request.clone();
let futures_timer_thread = self.futures_timer_thread.clone();
@@ -174,7 +166,7 @@ impl super::CommandRunner for CommandRunner {
command
);
command_runner
- .oneshot_execute(execute_request)
+ .oneshot_execute(&execute_request)
.join(future::ok(history))
})
.and_then(move |(operation, history)| {
@@ -187,9 +179,9 @@ impl super::CommandRunner for CommandRunner {
let execute_request2 = execute_request2.clone();
let store = store.clone();
- let clients = clients.clone();
+ let operations_client = operations_client.clone();
let command_runner2 = command_runner2.clone();
- let command_runner3 = command_runner2.clone();
+ let command_runner3 = command_runner3.clone();
let futures_timer_thread = futures_timer_thread.clone();
let f = command_runner2.extract_execute_response(operation, &mut history);
f.map(future::Loop::Break).or_else(move |value| {
@@ -220,7 +212,7 @@ impl super::CommandRunner for CommandRunner {
let mut history = history;
history.current_attempt += summary;
command_runner2
- .oneshot_execute(execute_request)
+ .oneshot_execute(&execute_request)
.join(future::ok(history))
})
// Reset `iter_num` on `MissingDigests`
@@ -228,11 +220,9 @@ impl super::CommandRunner for CommandRunner {
.to_boxed()
}
ExecutionError::NotFinished(operation_name) => {
- let operation_name2 = operation_name.clone();
- let operation_request =
- bazel_protos::google::longrunning::GetOperationRequest {
- name: operation_name.clone(),
- };
+ let mut operation_request =
+ bazel_protos::operations::GetOperationRequest::new();
+ operation_request.set_name(operation_name.clone());
let backoff_period = min(
CommandRunner::BACKOFF_MAX_WAIT_MILLIS,
@@ -261,23 +251,19 @@ impl super::CommandRunner for CommandRunner {
)
})
.and_then(move |_| {
- clients
- .map_err(|err| format!("{}", err))
- .and_then(move |clients| {
- clients
- .operations_client
- .lock()
- .get_operation(command_runner3.make_request(operation_request))
- .map(|r| r.into_inner())
- .or_else(move |err| {
- rpcerror_recover_cancelled(operation_name2, err)
- })
- .map_err(towergrpcerror_to_string)
- })
- .map(move |operation| {
- future::Loop::Continue((history, operation, iter_num + 1))
- })
- .to_boxed()
+ future::done(
+ operations_client
+ .get_operation_opt(&operation_request, command_runner3.call_option())
+ .or_else(move |err| {
+ rpcerror_recover_cancelled(operation_request.take_name(), err)
+ })
+ .map(OperationOrStatus::Operation)
+ .map_err(rpcerror_to_string),
+ )
+ .map(move |operation| {
+ future::Loop::Continue((history, operation, iter_num + 1))
+ })
+ .to_boxed()
})
.to_boxed()
}
@@ -315,84 +301,57 @@ impl CommandRunner {
address: &str,
cache_key_gen_version: Option,
instance_name: Option,
+ root_ca_certs: Option>,
oauth_bearer_token: Option,
+ thread_count: usize,
store: Store,
futures_timer_thread: resettable::Resettable,
- ) -> Result {
- struct Dst(SocketAddr);
-
- impl tokio_connect::Connect for Dst {
- type Connected = TcpStream;
- type Error = ::std::io::Error;
- type Future = ConnectFuture;
-
- fn connect(&self) -> Self::Future {
- TcpStream::connect(&self.0)
+ ) -> CommandRunner {
+ let env = Arc::new(grpcio::Environment::new(thread_count));
+ let channel = {
+ let builder = grpcio::ChannelBuilder::new(env.clone());
+ if let Some(_root_ca_certs) = root_ca_certs {
+ panic!("Sorry, we dropped secure grpc support until we can either make openssl link properly, or switch to tower");
+ /*
+ let creds = grpcio::ChannelCredentialsBuilder::new()
+ .root_cert(root_ca_certs)
+ .build();
+ builder.secure_connect(address, creds)
+ */
+ } else {
+ builder.connect(address)
}
- }
-
- // TODO: Support https
- let uri: http::Uri = format!("http://{}", address)
- .parse()
- .map_err(|err| format!("Failed to parse remote server address URL: {}", err))?;
- let socket_addr = address
- .to_socket_addrs()
- .map_err(|err| format!("Failed to resolve remote socket address URL: {}", err))?
- .next()
- .ok_or_else(|| "Remote server address resolved to no addresses".to_owned())?;
- let conn = client::Connect::new(
- Dst(socket_addr),
- h2::client::Builder::default(),
- DefaultExecutor::current(),
- )
- .make_service(())
- .map_err(|err| format!("Error connecting to remote execution server: {}", err))
- .and_then(move |conn| {
- tower_http::add_origin::Builder::new()
- .uri(uri)
- .build(conn)
- .map_err(|err| {
- format!(
- "Failed to add origin for remote execution server: {:?}",
- err
- )
- })
- .map(Mutex::new)
- });
- let clients = conn
- .map(|conn| {
- let conn = conn.lock();
- let execution_client = Mutex::new(
- bazel_protos::build::bazel::remote::execution::v2::client::Execution::new(conn.clone()),
- );
- let operations_client = Mutex::new(
- bazel_protos::google::longrunning::client::Operations::new(conn.clone()),
- );
- Clients {
- execution_client,
- operations_client,
- }
- })
- .to_boxed()
- .shared();
- Ok(CommandRunner {
+ };
+ let execution_client = Arc::new(bazel_protos::remote_execution_grpc::ExecutionClient::new(
+ channel.clone(),
+ ));
+ let operations_client = Arc::new(bazel_protos::operations_grpc::OperationsClient::new(
+ channel.clone(),
+ ));
+
+ CommandRunner {
cache_key_gen_version,
instance_name,
authorization_header: oauth_bearer_token.map(|t| format!("Bearer {}", t)),
- clients,
+ channel,
+ env,
+ execution_client,
+ operations_client,
store,
futures_timer_thread,
- })
+ }
}
- fn make_request(&self, message: T) -> Request {
- let mut request = Request::new(message);
+ fn call_option(&self) -> grpcio::CallOption {
+ let mut call_option = grpcio::CallOption::default();
if let Some(ref authorization_header) = self.authorization_header {
- request
- .metadata_mut()
- .insert("authorization", authorization_header.parse().unwrap());
+ let mut builder = grpcio::MetadataBuilder::with_capacity(1);
+ builder
+ .add_str("authorization", &authorization_header)
+ .unwrap();
+ call_option = call_option.headers(builder.build());
}
- request
+ call_option
}
fn store_proto_locally(
@@ -411,113 +370,102 @@ impl CommandRunner {
fn extract_execute_response(
&self,
- operation: bazel_protos::google::longrunning::Operation,
+ operation_or_status: OperationOrStatus,
attempts: &mut ExecutionHistory,
) -> BoxFuture {
- trace!("Got operation response: {:?}", operation);
-
- if !operation.done {
- return future::err(ExecutionError::NotFinished(operation.name)).to_boxed();
- }
- let execute_response = if let Some(result) = operation.result {
- match result {
- bazel_protos::google::longrunning::operation::Result::Error(ref status) => {
- return future::err(ExecutionError::Fatal(format_error(status))).to_boxed();
- }
- bazel_protos::google::longrunning::operation::Result::Response(ref any) => try_future!(
- bazel_protos::build::bazel::remote::execution::v2::ExecuteResponse::decode(&any.value)
- .map_err(|e| ExecutionError::Fatal(format!("Invalid ExecuteResponse: {:?}", e)))
- ),
- }
- } else {
- return future::err(ExecutionError::Fatal(
- "Operation finished but no response supplied".to_string(),
- ))
- .to_boxed();
- };
+ trace!("Got operation response: {:?}", operation_or_status);
- trace!("Got (nested) execute response: {:?}", execute_response);
-
- if let Some(ref result) = execute_response.result {
- if let Some(ref metadata) = result.execution_metadata {
- let enqueued = timespec_from(&metadata.queued_timestamp);
- let worker_start = timespec_from(&metadata.worker_start_timestamp);
- let input_fetch_start = timespec_from(&metadata.input_fetch_start_timestamp);
- let input_fetch_completed = timespec_from(&metadata.input_fetch_completed_timestamp);
- let execution_start = timespec_from(&metadata.execution_start_timestamp);
- let execution_completed = timespec_from(&metadata.execution_completed_timestamp);
- let output_upload_start = timespec_from(&metadata.output_upload_start_timestamp);
- let output_upload_completed = timespec_from(&metadata.output_upload_completed_timestamp);
-
- match (worker_start - enqueued).to_std() {
- Ok(duration) => attempts.current_attempt.remote_queue = Some(duration),
- Err(err) => warn!("Got negative remote queue time: {}", err),
- }
- match (input_fetch_completed - input_fetch_start).to_std() {
- Ok(duration) => attempts.current_attempt.remote_input_fetch = Some(duration),
- Err(err) => warn!("Got negative remote input fetch time: {}", err),
+ let status = match operation_or_status {
+ OperationOrStatus::Operation(mut operation) => {
+ if !operation.get_done() {
+ return future::err(ExecutionError::NotFinished(operation.take_name())).to_boxed();
}
- match (execution_completed - execution_start).to_std() {
- Ok(duration) => attempts.current_attempt.remote_execution = Some(duration),
- Err(err) => warn!("Got negative remote execution time: {}", err),
+ if operation.has_error() {
+ return future::err(ExecutionError::Fatal(format_error(&operation.get_error())))
+ .to_boxed();
}
- match (output_upload_completed - output_upload_start).to_std() {
- Ok(duration) => attempts.current_attempt.remote_output_store = Some(duration),
- Err(err) => warn!("Got negative remote output store time: {}", err),
+ if !operation.has_response() {
+ return future::err(ExecutionError::Fatal(
+ "Operation finished but no response supplied".to_string(),
+ ))
+ .to_boxed();
}
- attempts.current_attempt.was_cache_hit = execute_response.cached_result;
- }
- }
- let mut execution_attempts = std::mem::replace(&mut attempts.attempts, vec![]);
- execution_attempts.push(attempts.current_attempt);
-
- let maybe_result = execute_response.result;
+ let mut execute_response = bazel_protos::remote_execution::ExecuteResponse::new();
+ try_future!(execute_response
+ .merge_from_bytes(operation.get_response().get_value())
+ .map_err(|e| ExecutionError::Fatal(format!("Invalid ExecuteResponse: {:?}", e))));
+ trace!("Got (nested) execute response: {:?}", execute_response);
+
+ if execute_response.get_result().has_execution_metadata() {
+ let metadata = execute_response.get_result().get_execution_metadata();
+ let enqueued = timespec_from(metadata.get_queued_timestamp());
+ let worker_start = timespec_from(metadata.get_worker_start_timestamp());
+ let input_fetch_start = timespec_from(metadata.get_input_fetch_start_timestamp());
+ let input_fetch_completed = timespec_from(metadata.get_input_fetch_completed_timestamp());
+ let execution_start = timespec_from(metadata.get_execution_start_timestamp());
+ let execution_completed = timespec_from(metadata.get_execution_completed_timestamp());
+ let output_upload_start = timespec_from(metadata.get_output_upload_start_timestamp());
+ let output_upload_completed =
+ timespec_from(metadata.get_output_upload_completed_timestamp());
+
+ match (worker_start - enqueued).to_std() {
+ Ok(duration) => attempts.current_attempt.remote_queue = Some(duration),
+ Err(err) => warn!("Got negative remote queue time: {}", err),
+ }
+ match (input_fetch_completed - input_fetch_start).to_std() {
+ Ok(duration) => attempts.current_attempt.remote_input_fetch = Some(duration),
+ Err(err) => warn!("Got negative remote input fetch time: {}", err),
+ }
+ match (execution_completed - execution_start).to_std() {
+ Ok(duration) => attempts.current_attempt.remote_execution = Some(duration),
+ Err(err) => warn!("Got negative remote execution time: {}", err),
+ }
+ match (output_upload_completed - output_upload_start).to_std() {
+ Ok(duration) => attempts.current_attempt.remote_output_store = Some(duration),
+ Err(err) => warn!("Got negative remote output store time: {}", err),
+ }
+ attempts.current_attempt.was_cache_hit = execute_response.cached_result;
+ }
- let status = execute_response
- .status
- .unwrap_or_else(|| bazel_protos::google::rpc::Status {
- code: bazel_protos::google::rpc::Code::Ok.into(),
- message: String::new(),
- details: vec![],
- });
- if status.code == bazel_protos::google::rpc::Code::Ok.into() {
- if let Some(result) = maybe_result {
- return self
- .extract_stdout(&result)
- .join(self.extract_stderr(&result))
- .join(self.extract_output_files(&result))
- .and_then(move |((stdout, stderr), output_directory)| {
- Ok(FallibleExecuteProcessResult {
- stdout: stdout,
- stderr: stderr,
- exit_code: result.exit_code,
- output_directory: output_directory,
- execution_attempts: execution_attempts,
+ let mut execution_attempts = std::mem::replace(&mut attempts.attempts, vec![]);
+ execution_attempts.push(attempts.current_attempt);
+
+ let status = execute_response.take_status();
+ if grpcio::RpcStatusCode::from(status.get_code()) == grpcio::RpcStatusCode::Ok {
+ return self
+ .extract_stdout(&execute_response)
+ .join(self.extract_stderr(&execute_response))
+ .join(self.extract_output_files(&execute_response))
+ .and_then(move |((stdout, stderr), output_directory)| {
+ Ok(FallibleExecuteProcessResult {
+ stdout: stdout,
+ stderr: stderr,
+ exit_code: execute_response.get_result().get_exit_code(),
+ output_directory: output_directory,
+ execution_attempts: execution_attempts,
+ })
})
- })
- .to_boxed();
- } else {
- return futures::future::err(ExecutionError::Fatal(
- "No result found on ExecuteResponse".to_owned(),
- ))
- .to_boxed();
+ .to_boxed();
+ }
+ status
}
- }
+ OperationOrStatus::Status(status) => status,
+ };
- match bazel_protos::code_from_i32(status.code) {
- bazel_protos::google::rpc::Code::Ok => unreachable!(),
- bazel_protos::google::rpc::Code::FailedPrecondition => {
- if status.details.len() != 1 {
+ match grpcio::RpcStatusCode::from(status.get_code()) {
+ grpcio::RpcStatusCode::Ok => unreachable!(),
+ grpcio::RpcStatusCode::FailedPrecondition => {
+ if status.get_details().len() != 1 {
return future::err(ExecutionError::Fatal(format!(
"Received multiple details in FailedPrecondition ExecuteResponse's status field: {:?}",
- status.details
+ status.get_details()
)))
.to_boxed();
}
- let details = &status.details[0];
+ let details = status.get_details().get(0).unwrap();
let mut precondition_failure = bazel_protos::error_details::PreconditionFailure::new();
- if details.type_url
+ if details.get_type_url()
!= format!(
"type.googleapis.com/{}",
precondition_failure.descriptor().full_name()
@@ -526,12 +474,13 @@ impl CommandRunner {
return future::err(ExecutionError::Fatal(format!(
"Received FailedPrecondition, but didn't know how to resolve it: {},\
protobuf type {}",
- status.message, details.type_url
+ status.get_message(),
+ details.get_type_url()
)))
.to_boxed();
}
try_future!(precondition_failure
- .merge_from_bytes(&details.value)
+ .merge_from_bytes(details.get_value())
.map_err(|e| ExecutionError::Fatal(format!(
"Error deserializing FailedPrecondition proto: {:?}",
e
@@ -579,7 +528,8 @@ impl CommandRunner {
}
code => future::err(ExecutionError::Fatal(format!(
"Error from remote execution: {:?}: {:?}",
- code, status.message
+ code,
+ status.get_message()
)))
.to_boxed(),
}
@@ -588,10 +538,11 @@ impl CommandRunner {
fn extract_stdout(
&self,
- result: &bazel_protos::build::bazel::remote::execution::v2::ActionResult,
+ execute_response: &bazel_protos::remote_execution::ExecuteResponse,
) -> BoxFuture {
- if let Some(ref stdout_digest) = result.stdout_digest {
- let stdout_digest_result: Result = stdout_digest.into();
+ if execute_response.get_result().has_stdout_digest() {
+ let stdout_digest_result: Result =
+ execute_response.get_result().get_stdout_digest().into();
let stdout_digest = try_future!(stdout_digest_result
.map_err(|err| ExecutionError::Fatal(format!("Error extracting stdout: {}", err))));
self
@@ -613,7 +564,7 @@ impl CommandRunner {
})
.to_boxed()
} else {
- let stdout_raw = Bytes::from(result.stdout_raw.as_slice());
+ let stdout_raw = Bytes::from(execute_response.get_result().get_stdout_raw());
let stdout_copy = stdout_raw.clone();
self
.store
@@ -628,10 +579,11 @@ impl CommandRunner {
fn extract_stderr(
&self,
- result: &bazel_protos::build::bazel::remote::execution::v2::ActionResult,
+ execute_response: &bazel_protos::remote_execution::ExecuteResponse,
) -> BoxFuture {
- if let Some(ref stderr_digest) = result.stderr_digest {
- let stderr_digest_result: Result = stderr_digest.into();
+ if execute_response.get_result().has_stderr_digest() {
+ let stderr_digest_result: Result =
+ execute_response.get_result().get_stderr_digest().into();
let stderr_digest = try_future!(stderr_digest_result
.map_err(|err| ExecutionError::Fatal(format!("Error extracting stderr: {}", err))));
self
@@ -653,7 +605,7 @@ impl CommandRunner {
})
.to_boxed()
} else {
- let stderr_raw = Bytes::from(result.stderr_raw.as_slice());
+ let stderr_raw = Bytes::from(execute_response.get_result().get_stderr_raw());
let stderr_copy = stderr_raw.clone();
self
.store
@@ -668,16 +620,21 @@ impl CommandRunner {
fn extract_output_files(
&self,
- result: &bazel_protos::build::bazel::remote::execution::v2::ActionResult,
+ execute_response: &bazel_protos::remote_execution::ExecuteResponse,
) -> BoxFuture {
// Get Digests of output Directories.
// Then we'll make a Directory for the output files, and merge them.
- let output_directories = result.output_directories.clone();
- let mut directory_digests = Vec::with_capacity(output_directories.len() + 1);
+ let mut directory_digests =
+ Vec::with_capacity(execute_response.get_result().get_output_directories().len() + 1);
+ // TODO: Maybe take rather than clone
+ let output_directories = execute_response
+ .get_result()
+ .get_output_directories()
+ .to_owned();
for dir in output_directories {
- let digest_result: Result = (&dir.tree_digest.unwrap()).into();
+ let digest_result: Result = dir.get_tree_digest().into();
let mut digest = future::done(digest_result).to_boxed();
- for component in dir.path.rsplit('/') {
+ for component in dir.get_path().rsplit('/') {
let component = component.to_owned();
let store = self.store.clone();
digest = digest
@@ -700,21 +657,19 @@ impl CommandRunner {
// Make a directory for the files
let mut path_map = HashMap::new();
- let output_files = result.output_files.clone();
- let path_stats_result: Result, String> = output_files
- .into_iter()
+ let path_stats_result: Result, String> = execute_response
+ .get_result()
+ .get_output_files()
+ .iter()
.map(|output_file| {
- let output_file_path_buf = PathBuf::from(output_file.path);
- let digest = output_file
- .digest
- .ok_or_else(|| "No digest on remote execution output file".to_string())?;
- let digest: Result = (&digest).into();
+ let output_file_path_buf = PathBuf::from(output_file.get_path());
+ let digest: Result = output_file.get_digest().into();
path_map.insert(output_file_path_buf.clone(), digest?);
Ok(PathStat::file(
output_file_path_buf.clone(),
File {
path: output_file_path_buf,
- is_executable: output_file.is_executable,
+ is_executable: output_file.get_is_executable(),
},
))
})
@@ -782,7 +737,7 @@ fn make_execute_request(
(
bazel_protos::remote_execution::Action,
bazel_protos::remote_execution::Command,
- bazel_protos::build::bazel::remote::execution::v2::ExecuteRequest,
+ bazel_protos::remote_execution::ExecuteRequest,
),
String,
> {
@@ -851,43 +806,38 @@ fn make_execute_request(
action.set_command_digest((&digest(&command)?).into());
action.set_input_root_digest((&req.input_files).into());
- let execute_request = bazel_protos::build::bazel::remote::execution::v2::ExecuteRequest {
- action_digest: Some((&digest(&action)?).into()),
- skip_cache_lookup: false,
- instance_name: instance_name.clone().unwrap_or_default(),
- execution_policy: None,
- results_cache_policy: None,
- };
+ let mut execute_request = bazel_protos::remote_execution::ExecuteRequest::new();
+ if let Some(instance_name) = instance_name {
+ execute_request.set_instance_name(instance_name.clone());
+ }
+ execute_request.set_action_digest((&digest(&action)?).into());
Ok((action, command, execute_request))
}
-fn format_error(error: &bazel_protos::google::rpc::Status) -> String {
- let error_code_enum = bazel_protos::code::Code::from_i32(error.code);
+fn format_error(error: &bazel_protos::status::Status) -> String {
+ let error_code_enum = bazel_protos::code::Code::from_i32(error.get_code());
let error_code = match error_code_enum {
Some(x) => format!("{:?}", x),
- None => format!("{:?}", error.code),
+ None => format!("{:?}", error.get_code()),
};
- format!("{}: {}", error_code, error.message)
+ format!("{}: {}", error_code, error.get_message())
}
///
/// If the given operation represents a cancelled request, recover it into
/// ExecutionError::NotFinished.
///
-fn rpcerror_recover_cancelled(
+fn rpcerror_recover_cancelled(
operation_name: String,
- err: tower_grpc::Error,
-) -> Result> {
+ err: grpcio::Error,
+) -> Result {
// If the error represented cancellation, return an Operation for the given Operation name.
match &err {
- &tower_grpc::Error::Grpc(ref status) if status.code() == tower_grpc::Code::Cancelled => {
- return Ok(bazel_protos::google::longrunning::Operation {
- name: operation_name,
- done: false,
- metadata: None,
- result: None,
- });
+ &grpcio::Error::RpcFailure(ref rs) if rs.status == grpcio::RpcStatusCode::Cancelled => {
+ let mut next_operation = bazel_protos::operations::Operation::new();
+ next_operation.set_name(operation_name);
+ return Ok(next_operation);
}
_ => {}
}
@@ -895,21 +845,41 @@ fn rpcerror_recover_cancelled(
Err(err)
}
-fn towergrpcerror_to_string(error: tower_grpc::Error) -> String {
+fn rpcerror_to_status_or_string(
+ error: grpcio::Error,
+) -> Result {
match error {
- tower_grpc::Error::Grpc(status) => {
- let error_message = if status.error_message() == "" {
- "[no message]"
- } else {
- &status.error_message()
- };
- format!("{:?}: {}", status.code(), error_message)
+ grpcio::Error::RpcFailure(grpcio::RpcStatus {
+ status_proto_bytes: Some(status_proto_bytes),
+ ..
+ }) => {
+ let mut status_proto = bazel_protos::status::Status::new();
+ status_proto.merge_from_bytes(&status_proto_bytes).unwrap();
+ Ok(status_proto)
}
- tower_grpc::Error::Inner(v) => format!("{:?}", v),
+ grpcio::Error::RpcFailure(grpcio::RpcStatus {
+ status, details, ..
+ }) => Err(format!(
+ "{:?}: {:?}",
+ status,
+ details.unwrap_or_else(|| "[no message]".to_string())
+ )),
+ err => Err(format!("{:?}", err)),
+ }
+}
+
+fn rpcerror_to_string(error: grpcio::Error) -> String {
+ match error {
+ grpcio::Error::RpcFailure(status) => format!(
+ "{:?}: {:?}",
+ status.status,
+ status.details.unwrap_or_else(|| "[no message]".to_string())
+ ),
+ err => format!("{:?}", err),
}
}
-fn digest(message: &dyn GrpcioMessage) -> Result {
+fn digest(message: &dyn Message) -> Result {
let bytes = message.write_to_bytes().map_err(|e| format!("{:?}", e))?;
let mut hasher = Sha256::default();
@@ -921,25 +891,20 @@ fn digest(message: &dyn GrpcioMessage) -> Result {
))
}
-fn timespec_from(timestamp: &Option) -> time::Timespec {
- if let Some(timestamp) = timestamp {
- time::Timespec::new(timestamp.seconds, timestamp.nanos)
- } else {
- time::Timespec::new(0, 0)
- }
+fn timespec_from(timestamp: &protobuf::well_known_types::Timestamp) -> time::Timespec {
+ time::Timespec::new(timestamp.seconds, timestamp.nanos)
}
#[cfg(test)]
mod tests {
use bazel_protos;
- use bytes::{Bytes, BytesMut};
+ use bytes::Bytes;
use fs;
use futures::Future;
+ use grpcio;
use hashing::{Digest, Fingerprint};
use mock;
- use prost::Message;
- use prost_types;
- use protobuf::{self, ProtobufEnum};
+ use protobuf::{self, Message, ProtobufEnum};
use tempfile::TempDir;
use testutil::data::{TestData, TestDirectory};
use testutil::{as_bytes, owned_string_vec};
@@ -1024,19 +989,17 @@ mod tests {
);
want_action.set_input_root_digest((&input_directory.digest()).into());
- let want_execute_request = bazel_protos::build::bazel::remote::execution::v2::ExecuteRequest {
- action_digest: Some(
- (&Digest(
- Fingerprint::from_hex_string(
- "844c929423444f3392e0dcc89ebf1febbfdf3a2e2fcab7567cc474705a5385e4",
- )
- .unwrap(),
- 140,
- ))
- .into(),
- ),
- ..Default::default()
- };
+ let mut want_execute_request = bazel_protos::remote_execution::ExecuteRequest::new();
+ want_execute_request.set_action_digest(
+ (&Digest(
+ Fingerprint::from_hex_string(
+ "844c929423444f3392e0dcc89ebf1febbfdf3a2e2fcab7567cc474705a5385e4",
+ )
+ .unwrap(),
+ 140,
+ ))
+ .into(),
+ );
assert_eq!(
super::make_execute_request(&req, &None, &None),
@@ -1112,21 +1075,6 @@ mod tests {
.into(),
);
- let want_execute_request = bazel_protos::build::bazel::remote::execution::v2::ExecuteRequest {
- action_digest: Some(
- (&Digest(
- Fingerprint::from_hex_string(
- "844c929423444f3392e0dcc89ebf1febbfdf3a2e2fcab7567cc474705a5385e4",
- )
- .unwrap(),
- 140,
- ))
- .into(),
- ),
- instance_name: "dark-tower".to_owned(),
- ..Default::default()
- };
-
assert_eq!(
super::make_execute_request(&req, &Some("dark-tower".to_owned()), &None),
Ok((want_action, want_command, want_execute_request))
@@ -1194,19 +1142,17 @@ mod tests {
);
want_action.set_input_root_digest((&input_directory.digest()).into());
- let want_execute_request = bazel_protos::build::bazel::remote::execution::v2::ExecuteRequest {
- action_digest: Some(
- (&Digest(
- Fingerprint::from_hex_string(
- "0ee5d4c8ac12513a87c8d949c6883ac533a264d30215126af71a9028c4ab6edf",
- )
- .unwrap(),
- 140,
- ))
- .into(),
- ),
- ..Default::default()
- };
+ let mut want_execute_request = bazel_protos::remote_execution::ExecuteRequest::new();
+ want_execute_request.set_action_digest(
+ (&Digest(
+ Fingerprint::from_hex_string(
+ "0ee5d4c8ac12513a87c8d949c6883ac533a264d30215126af71a9028c4ab6edf",
+ )
+ .unwrap(),
+ 140,
+ ))
+ .into(),
+ );
assert_eq!(
super::make_execute_request(&req, &None, &Some("meep".to_owned())),
@@ -1251,19 +1197,17 @@ mod tests {
);
want_action.set_input_root_digest((&input_directory.digest()).into());
- let want_execute_request = bazel_protos::build::bazel::remote::execution::v2::ExecuteRequest {
- action_digest: Some(
- (&Digest(
- Fingerprint::from_hex_string(
- "b1fb7179ce496995a4e3636544ec000dca1b951f1f6216493f6c7608dc4dd910",
- )
- .unwrap(),
- 140,
- ))
- .into(),
- ),
- ..Default::default()
- };
+ let mut want_execute_request = bazel_protos::remote_execution::ExecuteRequest::new();
+ want_execute_request.set_action_digest(
+ (&Digest(
+ Fingerprint::from_hex_string(
+ "b1fb7179ce496995a4e3636544ec000dca1b951f1f6216493f6c7608dc4dd910",
+ )
+ .unwrap(),
+ 140,
+ ))
+ .into(),
+ );
assert_eq!(
super::make_execute_request(&req, &None, &None),
@@ -1301,7 +1245,7 @@ mod tests {
let error = run_command_remote(mock_server.address(), execute_request).expect_err("Want Err");
assert_eq!(
error,
- "InvalidArgument: Did not expect this request".to_string()
+ "InvalidArgument: \"Did not expect this request\"".to_string()
);
}
@@ -1444,19 +1388,17 @@ mod tests {
)
.expect("Failed to make store");
- let mut rt = tokio::runtime::Runtime::new().unwrap();
-
let cmd_runner = CommandRunner::new(
&mock_server.address(),
None,
None,
None,
+ None,
+ 1,
store,
timer_thread,
- )
- .unwrap();
- let result = rt.block_on(cmd_runner.run(echo_roland_request())).unwrap();
- rt.shutdown_now().wait().unwrap();
+ );
+ let result = cmd_runner.run(echo_roland_request()).wait().unwrap();
assert_eq!(
result.without_execution_attempts(),
FallibleExecuteProcessResult {
@@ -1621,17 +1563,21 @@ mod tests {
vec![
make_incomplete_operation(&op_name),
MockOperation::new({
- bazel_protos::google::longrunning::Operation {
- name: op_name.clone(),
- done: true,
- result: Some(
- bazel_protos::google::longrunning::operation::Result::Response(prost_types::Any {
- type_url: "build.bazel.remote.execution.v2.ExecuteResponse".to_string(),
- value: vec![0x00, 0x00, 0x00],
- }),
- ),
- ..Default::default()
- }
+ let mut op = bazel_protos::operations::Operation::new();
+ op.set_name(op_name.clone());
+ op.set_done(true);
+ op.set_response({
+ let mut response_wrapper = protobuf::well_known_types::Any::new();
+ response_wrapper.set_type_url(format!(
+ "type.googleapis.com/{}",
+ bazel_protos::remote_execution::ExecuteResponse::new()
+ .descriptor()
+ .full_name()
+ ));
+ response_wrapper.set_value(vec![0x00, 0x00, 0x00]);
+ response_wrapper
+ });
+ op
}),
],
))
@@ -1652,20 +1598,18 @@ mod tests {
super::make_execute_request(&execute_request, &None, &None)
.unwrap()
.2,
- vec![MockOperation::new(
- bazel_protos::google::longrunning::Operation {
- name: op_name.clone(),
- done: true,
- result: Some(bazel_protos::google::longrunning::operation::Result::Error(
- bazel_protos::google::rpc::Status {
- code: bazel_protos::code::Code::INTERNAL.value(),
- message: "Something went wrong".to_string(),
- details: vec![],
- },
- )),
- ..Default::default()
- },
- )],
+ vec![MockOperation::new({
+ let mut op = bazel_protos::operations::Operation::new();
+ op.set_name(op_name.to_string());
+ op.set_done(true);
+ op.set_error({
+ let mut error = bazel_protos::status::Status::new();
+ error.set_code(bazel_protos::code::Code::INTERNAL.value());
+ error.set_message("Something went wrong".to_string());
+ error
+ });
+ op
+ })],
))
};
@@ -1688,17 +1632,17 @@ mod tests {
.2,
vec![
make_incomplete_operation(&op_name),
- MockOperation::new(bazel_protos::google::longrunning::Operation {
- name: op_name.clone(),
- done: true,
- result: Some(bazel_protos::google::longrunning::operation::Result::Error(
- bazel_protos::google::rpc::Status {
- code: bazel_protos::code::Code::INTERNAL.value(),
- message: "Something went wrong".to_string(),
- details: vec![],
- },
- )),
- ..Default::default()
+ MockOperation::new({
+ let mut op = bazel_protos::operations::Operation::new();
+ op.set_name(op_name.to_string());
+ op.set_done(true);
+ op.set_error({
+ let mut error = bazel_protos::status::Status::new();
+ error.set_code(bazel_protos::code::Code::INTERNAL.value());
+ error.set_message("Something went wrong".to_string());
+ error
+ });
+ op
}),
],
))
@@ -1721,14 +1665,12 @@ mod tests {
super::make_execute_request(&execute_request, &None, &None)
.unwrap()
.2,
- vec![MockOperation::new(
- bazel_protos::google::longrunning::Operation {
- name: op_name.clone(),
- done: true,
- result: None,
- ..Default::default()
- },
- )],
+ vec![MockOperation::new({
+ let mut op = bazel_protos::operations::Operation::new();
+ op.set_name(op_name.to_string());
+ op.set_done(true);
+ op
+ })],
))
};
@@ -1751,11 +1693,11 @@ mod tests {
.2,
vec![
make_incomplete_operation(&op_name),
- MockOperation::new(bazel_protos::google::longrunning::Operation {
- name: op_name.clone(),
- done: true,
- result: None,
- ..Default::default()
+ MockOperation::new({
+ let mut op = bazel_protos::operations::Operation::new();
+ op.set_name(op_name.to_string());
+ op.set_done(true);
+ op
}),
],
))
@@ -1822,23 +1764,21 @@ mod tests {
.wait()
.expect("Saving directory bytes to store");
- let mut rt = tokio::runtime::Runtime::new().unwrap();
-
- let result = rt.block_on(
- CommandRunner::new(
- &mock_server.address(),
- None,
- None,
- None,
- store,
- timer_thread,
- )
- .unwrap()
- .run(cat_roland_request()),
- );
- rt.shutdown_now().wait().unwrap();
+ let result = CommandRunner::new(
+ &mock_server.address(),
+ None,
+ None,
+ None,
+ None,
+ 1,
+ store,
+ timer_thread,
+ )
+ .run(cat_roland_request())
+ .wait()
+ .unwrap();
assert_eq!(
- result.unwrap().without_execution_attempts(),
+ result.without_execution_attempts(),
FallibleExecuteProcessResult {
stdout: roland.bytes(),
stderr: Bytes::from(""),
@@ -1862,9 +1802,17 @@ mod tests {
let mock_server = {
let op_name = "cat".to_owned();
- let status = make_precondition_failure_status(vec![missing_preconditionfailure_violation(
- &roland.digest(),
- )]);
+ let status = grpcio::RpcStatus {
+ status: grpcio::RpcStatusCode::FailedPrecondition,
+ details: None,
+ status_proto_bytes: Some(
+ make_precondition_failure_status(vec![missing_preconditionfailure_violation(
+ &roland.digest(),
+ )])
+ .write_to_bytes()
+ .unwrap(),
+ ),
+ };
mock::execution_server::TestServer::new(mock::execution_server::MockExecution::new(
op_name.clone(),
@@ -1912,19 +1860,18 @@ mod tests {
.wait()
.expect("Saving file bytes to store");
- let mut rt = tokio::runtime::Runtime::new().unwrap();
- let result = rt.block_on(
- CommandRunner::new(
- &mock_server.address(),
- None,
- None,
- None,
- store,
- timer_thread,
- )
- .unwrap()
- .run(cat_roland_request()),
- );
+ let result = CommandRunner::new(
+ &mock_server.address(),
+ None,
+ None,
+ None,
+ None,
+ 1,
+ store,
+ timer_thread,
+ )
+ .run(cat_roland_request())
+ .wait();
assert_eq!(
result,
Ok(FallibleExecuteProcessResult {
@@ -1981,31 +1928,27 @@ mod tests {
)
.expect("Failed to make store");
- let mut rt = tokio::runtime::Runtime::new().unwrap();
- let result = rt.block_on(
- CommandRunner::new(
- &mock_server.address(),
- None,
- None,
- None,
- store,
- timer_thread,
- )
- .unwrap()
- .run(cat_roland_request()),
- );
- rt.shutdown_now().wait().unwrap();
- let error = result.expect_err("Want error");
+ let error = CommandRunner::new(
+ &mock_server.address(),
+ None,
+ None,
+ None,
+ None,
+ 1,
+ store,
+ timer_thread,
+ )
+ .run(cat_roland_request())
+ .wait()
+ .expect_err("Want error");
assert_contains(&error, &format!("{}", missing_digest.0));
}
#[test]
fn format_error_complete() {
- let error = bazel_protos::google::rpc::Status {
- code: bazel_protos::code::Code::CANCELLED.value(),
- message: "Oops, oh well!".to_string(),
- details: vec![],
- };
+ let mut error = bazel_protos::status::Status::new();
+ error.set_code(bazel_protos::code::Code::CANCELLED.value());
+ error.set_message("Oops, oh well!".to_string());
assert_eq!(
super::format_error(&error),
"CANCELLED: Oops, oh well!".to_string()
@@ -2014,11 +1957,9 @@ mod tests {
#[test]
fn extract_execute_response_unknown_code() {
- let error = bazel_protos::google::rpc::Status {
- code: 555,
- message: "Oops, oh well!".to_string(),
- details: vec![],
- };
+ let mut error = bazel_protos::status::Status::new();
+ error.set_code(555);
+ error.set_message("Oops, oh well!".to_string());
assert_eq!(
super::format_error(&error),
"555: Oops, oh well!".to_string()
@@ -2035,35 +1976,28 @@ mod tests {
execution_attempts: vec![],
};
- let response = bazel_protos::build::bazel::remote::execution::v2::ExecuteResponse {
- result: Some(
- bazel_protos::build::bazel::remote::execution::v2::ActionResult {
- exit_code: want_result.exit_code,
- stdout_raw: want_result.stdout.to_vec(),
- stderr_raw: want_result.stderr.to_vec(),
- output_files: vec![
- bazel_protos::build::bazel::remote::execution::v2::OutputFile {
- path: "cats/roland".to_string(),
- digest: Some((&TestData::roland().digest()).into()),
- is_executable: false,
- },
- ],
- ..Default::default()
- },
- ),
- ..Default::default()
- };
-
- let operation = bazel_protos::google::longrunning::Operation {
- name: "cat".to_owned(),
- done: true,
- result: Some(
- bazel_protos::google::longrunning::operation::Result::Response(
- make_any_prost_executeresponse(&response),
- ),
- ),
- ..Default::default()
- };
+ let mut output_file = bazel_protos::remote_execution::OutputFile::new();
+ output_file.set_path("cats/roland".into());
+ output_file.set_digest((&TestData::roland().digest()).into());
+ output_file.set_is_executable(false);
+ let mut output_files = protobuf::RepeatedField::new();
+ output_files.push(output_file);
+
+ let mut operation = bazel_protos::operations::Operation::new();
+ operation.set_name("cat".to_owned());
+ operation.set_done(true);
+ operation.set_response(make_any_proto(&{
+ let mut response = bazel_protos::remote_execution::ExecuteResponse::new();
+ response.set_result({
+ let mut result = bazel_protos::remote_execution::ActionResult::new();
+ result.set_exit_code(want_result.exit_code);
+ result.set_stdout_raw(Bytes::from(want_result.stdout.clone()));
+ result.set_stderr_raw(Bytes::from(want_result.stderr.clone()));
+ result.set_output_files(output_files);
+ result
+ });
+ response
+ }));
assert_eq!(
extract_execute_response(operation)
@@ -2076,11 +2010,9 @@ mod tests {
#[test]
fn extract_execute_response_pending() {
let operation_name = "cat".to_owned();
- let operation = bazel_protos::google::longrunning::Operation {
- name: operation_name.clone(),
- done: false,
- ..Default::default()
- };
+ let mut operation = bazel_protos::operations::Operation::new();
+ operation.set_name(operation_name.clone());
+ operation.set_done(false);
assert_eq!(
extract_execute_response(operation),
@@ -2115,10 +2047,11 @@ mod tests {
fn extract_execute_response_missing_other_things() {
let missing = vec![
missing_preconditionfailure_violation(&TestData::roland().digest()),
- bazel_protos::google::rpc::precondition_failure::Violation {
- type_: "MISSING".to_string(),
- subject: "monkeys".to_string(),
- description: "".to_string(),
+ {
+ let mut violation = bazel_protos::error_details::PreconditionFailure_Violation::new();
+ violation.set_field_type("MISSING".to_owned());
+ violation.set_subject("monkeys".to_owned());
+ violation
},
];
@@ -2135,9 +2068,10 @@ mod tests {
#[test]
fn extract_execute_response_other_failed_precondition() {
- let missing = vec![bazel_protos::google::rpc::precondition_failure::Violation {
- type_: "OUT_OF_CAPACITY".to_string(),
- ..Default::default()
+ let missing = vec![{
+ let mut violation = bazel_protos::error_details::PreconditionFailure_Violation::new();
+ violation.set_field_type("OUT_OF_CAPACITY".to_owned());
+ violation
}];
let operation = make_precondition_failure_operation(missing)
@@ -2168,24 +2102,18 @@ mod tests {
#[test]
fn extract_execute_response_other_status() {
- let operation = bazel_protos::google::longrunning::Operation {
- name: "cat".to_owned(),
- done: true,
- result: Some(
- bazel_protos::google::longrunning::operation::Result::Response(
- make_any_prost_executeresponse(
- &bazel_protos::build::bazel::remote::execution::v2::ExecuteResponse {
- status: Some(bazel_protos::google::rpc::Status {
- code: bazel_protos::google::rpc::Code::PermissionDenied.into(),
- ..Default::default()
- }),
- ..Default::default()
- },
- ),
- ),
- ),
- ..Default::default()
- };
+ let mut operation = bazel_protos::operations::Operation::new();
+ operation.set_name("cat".to_owned());
+ operation.set_done(true);
+ operation.set_response(make_any_proto(&{
+ let mut response = bazel_protos::remote_execution::ExecuteResponse::new();
+ response.set_status({
+ let mut status = bazel_protos::status::Status::new();
+ status.set_code(grpcio::RpcStatusCode::PermissionDenied as i32);
+ status
+ });
+ response
+ }));
match extract_execute_response(operation) {
Err(ExecutionError::Fatal(err)) => assert_contains(&err, "PermissionDenied"),
@@ -2314,90 +2242,103 @@ mod tests {
#[test]
fn extract_output_files_from_response_one_file() {
- let result = bazel_protos::build::bazel::remote::execution::v2::ActionResult {
- exit_code: 0,
- output_files: vec![
- bazel_protos::build::bazel::remote::execution::v2::OutputFile {
- path: "roland".to_string(),
- digest: Some((&TestData::roland().digest()).into()),
- is_executable: false,
- },
- ],
- ..Default::default()
- };
+ let mut output_file = bazel_protos::remote_execution::OutputFile::new();
+ output_file.set_path("roland".into());
+ output_file.set_digest((&TestData::roland().digest()).into());
+ output_file.set_is_executable(false);
+ let mut output_files = protobuf::RepeatedField::new();
+ output_files.push(output_file);
+
+ let mut execute_response = bazel_protos::remote_execution::ExecuteResponse::new();
+ execute_response.set_result({
+ let mut result = bazel_protos::remote_execution::ActionResult::new();
+ result.set_exit_code(0);
+ result.set_output_files(output_files);
+ result
+ });
+
assert_eq!(
- extract_output_files_from_response(&result),
+ extract_output_files_from_response(&execute_response),
Ok(TestDirectory::containing_roland().digest())
)
}
#[test]
fn extract_output_files_from_response_two_files_not_nested() {
- let output_files = vec![
- bazel_protos::build::bazel::remote::execution::v2::OutputFile {
- path: "roland".to_string(),
- digest: Some((&TestData::roland().digest()).into()),
- is_executable: false,
- },
- bazel_protos::build::bazel::remote::execution::v2::OutputFile {
- path: "treats".to_string(),
- digest: Some((&TestData::catnip().digest()).into()),
- is_executable: false,
- },
- ];
-
- let result = bazel_protos::build::bazel::remote::execution::v2::ActionResult {
- output_files,
- ..Default::default()
- };
+ let mut output_file_1 = bazel_protos::remote_execution::OutputFile::new();
+ output_file_1.set_path("roland".into());
+ output_file_1.set_digest((&TestData::roland().digest()).into());
+ output_file_1.set_is_executable(false);
+
+ let mut output_file_2 = bazel_protos::remote_execution::OutputFile::new();
+ output_file_2.set_path("treats".into());
+ output_file_2.set_digest((&TestData::catnip().digest()).into());
+ output_file_2.set_is_executable(false);
+ let mut output_files = protobuf::RepeatedField::new();
+ output_files.push(output_file_1);
+ output_files.push(output_file_2);
+
+ let mut execute_response = bazel_protos::remote_execution::ExecuteResponse::new();
+ execute_response.set_result({
+ let mut result = bazel_protos::remote_execution::ActionResult::new();
+ result.set_exit_code(0);
+ result.set_output_files(output_files);
+ result
+ });
assert_eq!(
- extract_output_files_from_response(&result),
+ extract_output_files_from_response(&execute_response),
Ok(TestDirectory::containing_roland_and_treats().digest())
)
}
#[test]
fn extract_output_files_from_response_two_files_nested() {
- let output_files = vec![
- bazel_protos::build::bazel::remote::execution::v2::OutputFile {
- path: "cats/roland".to_string(),
- digest: Some((&TestData::roland().digest()).into()),
- is_executable: false,
- },
- bazel_protos::build::bazel::remote::execution::v2::OutputFile {
- path: "treats".to_string(),
- digest: Some((&TestData::catnip().digest()).into()),
- is_executable: false,
- },
- ];
-
- let result = bazel_protos::build::bazel::remote::execution::v2::ActionResult {
- output_files,
- ..Default::default()
- };
+ let mut output_file_1 = bazel_protos::remote_execution::OutputFile::new();
+ output_file_1.set_path("cats/roland".into());
+ output_file_1.set_digest((&TestData::roland().digest()).into());
+ output_file_1.set_is_executable(false);
+
+ let mut output_file_2 = bazel_protos::remote_execution::OutputFile::new();
+ output_file_2.set_path("treats".into());
+ output_file_2.set_digest((&TestData::catnip().digest()).into());
+ output_file_2.set_is_executable(false);
+ let mut output_files = protobuf::RepeatedField::new();
+ output_files.push(output_file_1);
+ output_files.push(output_file_2);
+
+ let mut execute_response = bazel_protos::remote_execution::ExecuteResponse::new();
+ execute_response.set_result({
+ let mut result = bazel_protos::remote_execution::ActionResult::new();
+ result.set_exit_code(0);
+ result.set_output_files(output_files);
+ result
+ });
assert_eq!(
- extract_output_files_from_response(&result),
+ extract_output_files_from_response(&execute_response),
Ok(TestDirectory::recursive().digest())
)
}
#[test]
fn extract_output_files_from_response_just_directory() {
- let result = bazel_protos::build::bazel::remote::execution::v2::ActionResult {
- exit_code: 0,
- output_directories: vec![
- bazel_protos::build::bazel::remote::execution::v2::OutputDirectory {
- path: "cats".to_owned(),
- tree_digest: Some((&TestDirectory::containing_roland().digest()).into()),
- },
- ],
- ..Default::default()
- };
+ let mut output_directory = bazel_protos::remote_execution::OutputDirectory::new();
+ output_directory.set_path("cats".into());
+ output_directory.set_tree_digest((&TestDirectory::containing_roland().digest()).into());
+ let mut output_directories = protobuf::RepeatedField::new();
+ output_directories.push(output_directory);
+
+ let mut execute_response = bazel_protos::remote_execution::ExecuteResponse::new();
+ execute_response.set_result({
+ let mut result = bazel_protos::remote_execution::ActionResult::new();
+ result.set_exit_code(0);
+ result.set_output_directories(output_directories);
+ result
+ });
assert_eq!(
- extract_output_files_from_response(&result),
+ extract_output_files_from_response(&execute_response),
Ok(TestDirectory::nested().digest())
)
}
@@ -2408,29 +2349,40 @@ mod tests {
// /pets/cats/roland
// /pets/dogs/robin
- let result = bazel_protos::build::bazel::remote::execution::v2::ActionResult {
- output_files: vec![
- bazel_protos::build::bazel::remote::execution::v2::OutputFile {
- path: "treats".to_owned(),
- digest: Some((&TestData::catnip().digest()).into()),
- is_executable: false,
- },
- ],
- output_directories: vec![
- bazel_protos::build::bazel::remote::execution::v2::OutputDirectory {
- path: "pets/cats".to_owned(),
- tree_digest: Some((&TestDirectory::containing_roland().digest()).into()),
- },
- bazel_protos::build::bazel::remote::execution::v2::OutputDirectory {
- path: "pets/dogs".to_owned(),
- tree_digest: Some((&TestDirectory::containing_robin().digest()).into()),
- },
- ],
- ..Default::default()
- };
+ let mut output_directories = protobuf::RepeatedField::new();
+ output_directories.push({
+ let mut output_directory = bazel_protos::remote_execution::OutputDirectory::new();
+ output_directory.set_path("pets/cats".into());
+ output_directory.set_tree_digest((&TestDirectory::containing_roland().digest()).into());
+ output_directory
+ });
+ output_directories.push({
+ let mut output_directory = bazel_protos::remote_execution::OutputDirectory::new();
+ output_directory.set_path("pets/dogs".into());
+ output_directory.set_tree_digest((&TestDirectory::containing_robin().digest()).into());
+ output_directory
+ });
+
+ let mut execute_response = bazel_protos::remote_execution::ExecuteResponse::new();
+ execute_response.set_result({
+ let mut result = bazel_protos::remote_execution::ActionResult::new();
+ result.set_exit_code(0);
+ result.set_output_directories(output_directories);
+ result.set_output_files({
+ let mut output_files = protobuf::RepeatedField::new();
+ output_files.push({
+ let mut output_file = bazel_protos::remote_execution::OutputFile::new();
+ output_file.set_path("treats".into());
+ output_file.set_digest((&TestData::catnip().digest()).into());
+ output_file
+ });
+ output_files
+ });
+ result
+ });
assert_eq!(
- extract_output_files_from_response(&result),
+ extract_output_files_from_response(&execute_response),
Ok(Digest(
Fingerprint::from_hex_string(
"639b4b84bb58a9353d49df8122e7987baf038efe54ed035e67910846c865b1e2"
@@ -2462,19 +2414,16 @@ mod tests {
}
fn make_incomplete_operation(operation_name: &str) -> MockOperation {
- MockOperation::new(bazel_protos::google::longrunning::Operation {
- name: operation_name.to_string(),
- done: false,
- ..Default::default()
- })
+ let mut op = bazel_protos::operations::Operation::new();
+ op.set_name(operation_name.to_string());
+ op.set_done(false);
+ MockOperation::new(op)
}
fn make_delayed_incomplete_operation(operation_name: &str, delay: Duration) -> MockOperation {
- let op = bazel_protos::google::longrunning::Operation {
- name: operation_name.to_string(),
- done: false,
- ..Default::default()
- };
+ let mut op = bazel_protos::operations::Operation::new();
+ op.set_name(operation_name.to_string());
+ op.set_done(false);
MockOperation {
op: Ok(Some(op)),
duration: Some(delay),
@@ -2487,74 +2436,72 @@ mod tests {
stderr: StderrType,
exit_code: i32,
) -> MockOperation {
- let (stdout_raw, stdout_digest) = match stdout {
- StdoutType::Raw(stdout_raw) => (stdout_raw.as_bytes().to_vec(), None),
- StdoutType::Digest(stdout_digest) => (vec![], Some((&stdout_digest).into())),
- };
-
- let (stderr_raw, stderr_digest) = match stderr {
- StderrType::Raw(stderr_raw) => (stderr_raw.as_bytes().to_vec(), None),
- StderrType::Digest(stderr_digest) => (vec![], Some((&stderr_digest).into())),
- };
-
- let response_proto = bazel_protos::build::bazel::remote::execution::v2::ExecuteResponse {
- result: Some(
- bazel_protos::build::bazel::remote::execution::v2::ActionResult {
- stdout_raw,
- stdout_digest,
- stderr_raw,
- stderr_digest,
- exit_code,
- ..Default::default()
- },
- ),
- ..Default::default()
- };
+ let mut op = bazel_protos::operations::Operation::new();
+ op.set_name(operation_name.to_string());
+ op.set_done(true);
+ op.set_response({
+ let mut response_proto = bazel_protos::remote_execution::ExecuteResponse::new();
+ response_proto.set_result({
+ let mut action_result = bazel_protos::remote_execution::ActionResult::new();
+ match stdout {
+ StdoutType::Raw(stdout_raw) => {
+ action_result.set_stdout_raw(Bytes::from(stdout_raw));
+ }
+ StdoutType::Digest(stdout_digest) => {
+ action_result.set_stdout_digest((&stdout_digest).into());
+ }
+ }
+ match stderr {
+ StderrType::Raw(stderr_raw) => {
+ action_result.set_stderr_raw(Bytes::from(stderr_raw));
+ }
+ StderrType::Digest(stderr_digest) => {
+ action_result.set_stderr_digest((&stderr_digest).into());
+ }
+ }
+ action_result.set_exit_code(exit_code);
+ action_result
+ });
- let op = bazel_protos::google::longrunning::Operation {
- name: operation_name.to_string(),
- done: true,
- result: Some(
- bazel_protos::google::longrunning::operation::Result::Response(
- make_any_prost_executeresponse(&response_proto),
- ),
- ),
- ..Default::default()
- };
+ let mut response_wrapper = protobuf::well_known_types::Any::new();
+ response_wrapper.set_type_url(format!(
+ "type.googleapis.com/{}",
+ response_proto.descriptor().full_name()
+ ));
+ let response_proto_bytes = response_proto.write_to_bytes().unwrap();
+ response_wrapper.set_value(response_proto_bytes);
+ response_wrapper
+ });
MockOperation::new(op)
}
fn make_precondition_failure_operation(
- violations: Vec,
+ violations: Vec,
) -> MockOperation {
- let response = bazel_protos::build::bazel::remote::execution::v2::ExecuteResponse {
- status: Some(make_precondition_failure_status(violations)),
- ..Default::default()
- };
- let operation = bazel_protos::google::longrunning::Operation {
- name: "cat".to_string(),
- done: true,
- result: Some(
- bazel_protos::google::longrunning::operation::Result::Response(
- make_any_prost_executeresponse(&response),
- ),
- ),
- ..Default::default()
- };
+ let mut operation = bazel_protos::operations::Operation::new();
+ operation.set_name("cat".to_owned());
+ operation.set_done(true);
+ operation.set_response(make_any_proto(&{
+ let mut response = bazel_protos::remote_execution::ExecuteResponse::new();
+ response.set_status(make_precondition_failure_status(violations));
+ response
+ }));
MockOperation::new(operation)
}
fn make_precondition_failure_status(
- violations: Vec,
- ) -> bazel_protos::google::rpc::Status {
- bazel_protos::google::rpc::Status {
- code: bazel_protos::google::rpc::Code::FailedPrecondition.into(),
- details: vec![make_any_prost_proto(
- "google.rpc.PreconditionFailure",
- &bazel_protos::google::rpc::PreconditionFailure { violations },
- )],
- ..Default::default()
- }
+ violations: Vec,
+ ) -> bazel_protos::status::Status {
+ let mut status = bazel_protos::status::Status::new();
+ status.set_code(grpcio::RpcStatusCode::FailedPrecondition as i32);
+ status.mut_details().push(make_any_proto(&{
+ let mut precondition_failure = bazel_protos::error_details::PreconditionFailure::new();
+ for violation in violations.into_iter() {
+ precondition_failure.mut_violations().push(violation);
+ }
+ precondition_failure
+ }));
+ status
}
fn run_command_remote(
@@ -2565,11 +2512,8 @@ mod tests {
.file(&TestData::roland())
.directory(&TestDirectory::containing_roland())
.build();
- let mut runtime = tokio::runtime::Runtime::new().unwrap();
let command_runner = create_command_runner(address, &cas);
- let result = runtime.block_on(command_runner.run(request));
- runtime.shutdown_now().wait().unwrap();
- result
+ command_runner.run(request).wait()
}
fn create_command_runner(address: String, cas: &mock::StubCAS) -> CommandRunner {
@@ -2591,8 +2535,7 @@ mod tests {
)
.expect("Failed to make store");
- CommandRunner::new(&address, None, None, None, store, timer_thread)
- .expect("Failed to make command runner")
+ CommandRunner::new(&address, None, None, None, None, 1, store, timer_thread)
}
fn timer_thread() -> resettable::Resettable {
@@ -2600,62 +2543,52 @@ mod tests {
}
fn extract_execute_response(
- operation: bazel_protos::google::longrunning::Operation,
+ operation: bazel_protos::operations::Operation,
) -> Result {
let cas = mock::StubCAS::builder()
.file(&TestData::roland())
.directory(&TestDirectory::containing_roland())
.build();
- let mut runtime = tokio::runtime::Runtime::new().unwrap();
- let command_runner = create_command_runner("127.0.0.1:0".to_owned(), &cas);
- let result = runtime.block_on(
- command_runner.extract_execute_response(operation, &mut ExecutionHistory::default()),
- );
-
- runtime.shutdown_now().wait().unwrap();
- result
+ let command_runner = create_command_runner("".to_owned(), &cas);
+ command_runner
+ .extract_execute_response(
+ super::OperationOrStatus::Operation(operation),
+ &mut ExecutionHistory::default(),
+ )
+ .wait()
}
fn extract_output_files_from_response(
- result: &bazel_protos::build::bazel::remote::execution::v2::ActionResult,
+ execute_response: &bazel_protos::remote_execution::ExecuteResponse,
) -> Result {
let cas = mock::StubCAS::builder()
.file(&TestData::roland())
.directory(&TestDirectory::containing_roland())
.build();
-
- let mut runtime = tokio::runtime::Runtime::new().unwrap();
- let command_runner = create_command_runner("127.0.0.1:0".to_owned(), &cas);
- let result = runtime.block_on(command_runner.extract_output_files(result));
- runtime.shutdown_now().wait().unwrap();
- result
- }
-
- fn make_any_prost_executeresponse(
- message: &bazel_protos::build::bazel::remote::execution::v2::ExecuteResponse,
- ) -> prost_types::Any {
- make_any_prost_proto("build.bazel.remote.execution.v2.ExecuteResponse", message)
+ let command_runner = create_command_runner("".to_owned(), &cas);
+ command_runner
+ .extract_output_files(&execute_response)
+ .wait()
}
- fn make_any_prost_proto(message_name: &str, message: &M) -> prost_types::Any {
- let size = message.encoded_len();
- let mut value = BytesMut::with_capacity(size);
- message.encode(&mut value).expect("Error serializing proto");
- prost_types::Any {
- type_url: format!("type.googleapis.com/{}", message_name),
- value: value.to_vec(),
- }
+ fn make_any_proto(message: &dyn Message) -> protobuf::well_known_types::Any {
+ let mut any = protobuf::well_known_types::Any::new();
+ any.set_type_url(format!(
+ "type.googleapis.com/{}",
+ message.descriptor().full_name()
+ ));
+ any.set_value(message.write_to_bytes().expect("Error serializing proto"));
+ any
}
fn missing_preconditionfailure_violation(
digest: &Digest,
- ) -> bazel_protos::google::rpc::precondition_failure::Violation {
+ ) -> bazel_protos::error_details::PreconditionFailure_Violation {
{
- bazel_protos::google::rpc::precondition_failure::Violation {
- type_: "MISSING".to_owned(),
- subject: format!("blobs/{}/{}", digest.0, digest.1),
- ..Default::default()
- }
+ let mut violation = bazel_protos::error_details::PreconditionFailure_Violation::new();
+ violation.set_field_type("MISSING".to_owned());
+ violation.set_subject(format!("blobs/{}/{}", digest.0, digest.1));
+ violation
}
}
diff --git a/src/rust/engine/process_executor/Cargo.toml b/src/rust/engine/process_executor/Cargo.toml
index 87453d8b797..d4c45ad05ec 100644
--- a/src/rust/engine/process_executor/Cargo.toml
+++ b/src/rust/engine/process_executor/Cargo.toml
@@ -15,4 +15,3 @@ hashing = { path = "../hashing" }
futures = "^0.1.16"
process_execution = { path = "../process_execution" }
resettable = { path = "../resettable" }
-tokio = "0.1.14"
diff --git a/src/rust/engine/process_executor/src/main.rs b/src/rust/engine/process_executor/src/main.rs
index 247b40872f1..4946381b05f 100644
--- a/src/rust/engine/process_executor/src/main.rs
+++ b/src/rust/engine/process_executor/src/main.rs
@@ -92,6 +92,13 @@ fn main() {
If unspecified, local execution will be performed.",
),
)
+ .arg(
+ Arg::with_name("execution-root-ca-cert-file")
+ .help("Path to file containing root certificate authority certificates for the execution server. If not set, TLS will not be used when connecting to the execution server.")
+ .takes_value(true)
+ .long("execution-root-ca-cert-file")
+ .required(false)
+ )
.arg(
Arg::with_name("execution-oauth-bearer-token-path")
.help("Path to file containing oauth bearer token for communication with the execution server. If not set, no authorization will be provided to remote servers.")
@@ -283,6 +290,12 @@ fn main() {
let runner: Box = match server_arg {
Some(address) => {
+ let root_ca_certs = if let Some(path) = args.value_of("execution-root-ca-cert-file") {
+ Some(std::fs::read(path).expect("Error reading root CA certs file"))
+ } else {
+ None
+ };
+
let oauth_bearer_token =
if let Some(path) = args.value_of("execution-oauth-bearer-token-path") {
Some(std::fs::read_to_string(path).expect("Error reading oauth bearer token file"))
@@ -290,17 +303,16 @@ fn main() {
None
};
- Box::new(
- process_execution::remote::CommandRunner::new(
- address,
- args.value_of("cache-key-gen-version").map(str::to_owned),
- remote_instance_arg,
- oauth_bearer_token,
- store.clone(),
- timer_thread,
- )
- .expect("Could not initialize remote execution client"),
- ) as Box
+ Box::new(process_execution::remote::CommandRunner::new(
+ address,
+ args.value_of("cache-key-gen-version").map(str::to_owned),
+ remote_instance_arg,
+ root_ca_certs,
+ oauth_bearer_token,
+ 1,
+ store.clone(),
+ timer_thread,
+ )) as Box
}
None => Box::new(process_execution::local::CommandRunner::new(
store.clone(),
@@ -309,18 +321,17 @@ fn main() {
true,
)) as Box,
};
- let mut rt = tokio::runtime::Runtime::new().unwrap();
- let result = rt.block_on(runner.run(request)).unwrap();
+
+ let result = runner.run(request).wait().expect("Error executing");
if let Some(output) = args.value_of("materialize-output-to").map(PathBuf::from) {
- rt.block_on(store.materialize_directory(output, result.output_directory))
+ store
+ .materialize_directory(output, result.output_directory)
+ .wait()
.unwrap();
- };
+ }
print!("{}", String::from_utf8(result.stdout.to_vec()).unwrap());
eprint!("{}", String::from_utf8(result.stderr.to_vec()).unwrap());
-
- rt.shutdown_now().wait().unwrap();
-
exit(result.exit_code);
}
diff --git a/src/rust/engine/resettable/src/lib.rs b/src/rust/engine/resettable/src/lib.rs
index 513c4b5be09..a05d990a477 100644
--- a/src/rust/engine/resettable/src/lib.rs
+++ b/src/rust/engine/resettable/src/lib.rs
@@ -65,19 +65,33 @@ where
T: Send + Sync,
{
pub fn new T + 'static>(make: F) -> Resettable {
- let val = (make)();
Resettable {
- val: Arc::new(RwLock::new(Some(val))),
+ val: Arc::new(RwLock::new(None)),
make: Arc::new(make),
}
}
+ ///
+ /// Execute f with the value in the Resettable.
+ /// May lazily initialize the value in the Resettable.
+ ///
+ /// TODO Explore the use of parking_lot::RWLock::upgradable_read
+ /// to avoid reacquiring the lock for initialization.
+ /// This can be used if we are sure that a deadlock won't happen
+ /// when two readers are trying to upgrade at the same time.
+ ///
pub fn with O>(&self, f: F) -> O {
- let val_opt = self.val.read();
- let val = val_opt
- .as_ref()
- .unwrap_or_else(|| panic!("A Resettable value cannot be used while it is shutdown."));
- f(val)
+ {
+ let val_opt = self.val.read();
+ if let Some(val) = val_opt.as_ref() {
+ return f(val);
+ }
+ }
+ let mut val_write_opt = self.val.write();
+ if val_write_opt.as_ref().is_none() {
+ *val_write_opt = Some((self.make)())
+ }
+ f(val_write_opt.as_ref().unwrap())
}
///
@@ -89,9 +103,7 @@ where
{
let mut val = self.val.write();
*val = None;
- let t = f();
- *val = Some((self.make)());
- t
+ f()
}
}
@@ -106,10 +118,6 @@ where
/// be sure that dropping it will actually deallocate the resource.
///
pub fn get(&self) -> T {
- let val_opt = self.val.read();
- let val = val_opt
- .as_ref()
- .unwrap_or_else(|| panic!("A Resettable value cannot be used while it is shutdown."));
- val.clone()
+ self.with(T::clone)
}
}
diff --git a/src/rust/engine/src/context.rs b/src/rust/engine/src/context.rs
index 1fac707784c..024f9845c3e 100644
--- a/src/rust/engine/src/context.rs
+++ b/src/rust/engine/src/context.rs
@@ -130,23 +130,23 @@ impl Core {
.unwrap_or_else(|e| panic!("Could not initialize Store: {:?}", e));
let underlying_command_runner: Box = match &remote_execution_server {
- Some(ref address) => Box::new(
- process_execution::remote::CommandRunner::new(
- address,
- remote_execution_process_cache_namespace.clone(),
- remote_instance_name.clone(),
- oauth_bearer_token.clone(),
- store.clone(),
- futures_timer_thread2.clone(),
- )
- .expect("Could not initialize remote execution client"),
- ) as Box,
+ Some(ref address) => Box::new(process_execution::remote::CommandRunner::new(
+ address,
+ remote_execution_process_cache_namespace.clone(),
+ remote_instance_name.clone(),
+ root_ca_certs.clone(),
+ oauth_bearer_token.clone(),
+ // Allow for some overhead for bookkeeping threads (if any).
+ process_execution_parallelism + 2,
+ store.clone(),
+ futures_timer_thread2.clone(),
+ )),
None => Box::new(process_execution::local::CommandRunner::new(
store.clone(),
fs_pool2.clone(),
work_dir.clone(),
process_execution_cleanup_local_dirs,
- )) as Box,
+ )),
};
let command_runner =
diff --git a/src/rust/engine/src/externs.rs b/src/rust/engine/src/externs.rs
index 2dbaf487835..c81bee215e1 100644
--- a/src/rust/engine/src/externs.rs
+++ b/src/rust/engine/src/externs.rs
@@ -82,7 +82,6 @@ pub fn store_set>(values: I) -> Value {
///
/// The underlying slice _must_ contain an even number of elements.
///
-#[allow(dead_code)]
pub fn store_dict(keys_and_values_interleaved: &[(Value)]) -> Value {
if keys_and_values_interleaved.len() % 2 != 0 {
panic!("store_dict requires an even number of elements");
@@ -121,6 +120,11 @@ pub fn store_i64(val: i64) -> Value {
with_externs(|e| (e.store_i64)(e.context, val).into())
}
+#[allow(dead_code)]
+pub fn store_f64(val: f64) -> Value {
+ with_externs(|e| (e.store_f64)(e.context, val).into())
+}
+
#[allow(dead_code)]
pub fn store_bool(val: bool) -> Value {
with_externs(|e| (e.store_bool)(e.context, val).into())
@@ -344,6 +348,7 @@ pub struct Externs {
pub store_bytes: StoreBytesExtern,
pub store_utf8: StoreUtf8Extern,
pub store_i64: StoreI64Extern,
+ pub store_f64: StoreF64Extern,
pub store_bool: StoreBoolExtern,
pub project_ignoring_type: ProjectIgnoringTypeExtern,
pub project_multi: ProjectMultiExtern,
@@ -383,6 +388,8 @@ pub type StoreUtf8Extern = extern "C" fn(*const ExternContext, *const u8, u64) -
pub type StoreI64Extern = extern "C" fn(*const ExternContext, i64) -> Handle;
+pub type StoreF64Extern = extern "C" fn(*const ExternContext, f64) -> Handle;
+
pub type StoreBoolExtern = extern "C" fn(*const ExternContext, bool) -> Handle;
///
diff --git a/src/rust/engine/src/lib.rs b/src/rust/engine/src/lib.rs
index 54a0ed4af0c..cc5d3d5ee98 100644
--- a/src/rust/engine/src/lib.rs
+++ b/src/rust/engine/src/lib.rs
@@ -62,8 +62,8 @@ use crate::externs::{
Buffer, BufferBuffer, CallExtern, CloneValExtern, CreateExceptionExtern, DropHandlesExtern,
EqualsExtern, EvalExtern, ExternContext, Externs, GeneratorSendExtern, HandleBuffer,
IdentifyExtern, LogExtern, ProjectIgnoringTypeExtern, ProjectMultiExtern, PyResult,
- SatisfiedByExtern, SatisfiedByTypeExtern, StoreBoolExtern, StoreBytesExtern, StoreI64Extern,
- StoreTupleExtern, StoreUtf8Extern, TypeIdBuffer, TypeToStrExtern, ValToStrExtern,
+ SatisfiedByExtern, SatisfiedByTypeExtern, StoreBoolExtern, StoreBytesExtern, StoreF64Extern,
+ StoreI64Extern, StoreTupleExtern, StoreUtf8Extern, TypeIdBuffer, TypeToStrExtern, ValToStrExtern,
};
use crate::handles::Handle;
use crate::rule_graph::{GraphMaker, RuleGraph};
@@ -119,6 +119,7 @@ pub extern "C" fn externs_set(
store_bytes: StoreBytesExtern,
store_utf8: StoreUtf8Extern,
store_i64: StoreI64Extern,
+ store_f64: StoreF64Extern,
store_bool: StoreBoolExtern,
project_ignoring_type: ProjectIgnoringTypeExtern,
project_multi: ProjectMultiExtern,
@@ -146,6 +147,7 @@ pub extern "C" fn externs_set(
store_bytes,
store_utf8,
store_i64,
+ store_f64,
store_bool,
project_ignoring_type,
project_multi,
@@ -177,10 +179,6 @@ pub extern "C" fn scheduler_create(
construct_snapshot: Function,
construct_file_content: Function,
construct_files_content: Function,
- construct_path_stat: Function,
- construct_dir: Function,
- construct_file: Function,
- construct_link: Function,
construct_process_result: Function,
type_address: TypeConstraint,
type_path_globs: TypeConstraint,
@@ -224,10 +222,6 @@ pub extern "C" fn scheduler_create(
construct_snapshot: construct_snapshot,
construct_file_content: construct_file_content,
construct_files_content: construct_files_content,
- construct_path_stat: construct_path_stat,
- construct_dir: construct_dir,
- construct_file: construct_file,
- construct_link: construct_link,
construct_process_result: construct_process_result,
address: type_address,
path_globs: type_path_globs,
@@ -315,7 +309,8 @@ pub extern "C" fn scheduler_create(
}
///
-/// Returns a Handle representing a tuple of tuples of metric name string and metric value int.
+/// Returns a Handle representing a dictionary where key is metric name string and value is
+/// metric value int.
///
#[no_mangle]
pub extern "C" fn scheduler_metrics(
@@ -327,11 +322,9 @@ pub extern "C" fn scheduler_metrics(
let values = scheduler
.metrics(session)
.into_iter()
- .map(|(metric, value)| {
- externs::store_tuple(&[externs::store_utf8(metric), externs::store_i64(value)])
- })
+ .flat_map(|(metric, value)| vec![externs::store_utf8(metric), externs::store_i64(value)])
.collect::>();
- externs::store_tuple(&values).into()
+ externs::store_dict(&values).into()
})
})
}
@@ -666,15 +659,24 @@ pub extern "C" fn capture_snapshots(
path_globs_and_root_tuple_wrapper: Handle,
) -> PyResult {
let values = externs::project_multi(&path_globs_and_root_tuple_wrapper.into(), "dependencies");
- let path_globs_and_roots_result: Result, String> = values
+ let path_globs_and_roots_result = values
.iter()
.map(|value| {
let root = PathBuf::from(externs::project_str(&value, "root"));
let path_globs =
nodes::Snapshot::lift_path_globs(&externs::project_ignoring_type(&value, "path_globs"));
- path_globs.map(|path_globs| (path_globs, root))
+ let digest_hint = {
+ let maybe_digest = externs::project_ignoring_type(&value, "digest_hint");
+ // TODO: Extract a singleton Key for None.
+ if maybe_digest == externs::eval("None").unwrap() {
+ None
+ } else {
+ Some(nodes::lift_digest(&maybe_digest)?)
+ }
+ };
+ path_globs.map(|path_globs| (path_globs, root, digest_hint))
})
- .collect();
+ .collect::, _>>();
let path_globs_and_roots = match path_globs_and_roots_result {
Ok(v) => v,
@@ -689,13 +691,14 @@ pub extern "C" fn capture_snapshots(
futures::future::join_all(
path_globs_and_roots
.into_iter()
- .map(|(path_globs, root)| {
+ .map(|(path_globs, root, digest_hint)| {
let core = core.clone();
fs::Snapshot::capture_snapshot_from_arbitrary_root(
core.store(),
core.fs_pool.clone(),
root,
path_globs,
+ digest_hint,
)
.map(move |snapshot| nodes::Snapshot::store_snapshot(&core, &snapshot))
})
diff --git a/src/rust/engine/src/nodes.rs b/src/rust/engine/src/nodes.rs
index 3a9587e741e..e1d5259d63d 100644
--- a/src/rust/engine/src/nodes.rs
+++ b/src/rust/engine/src/nodes.rs
@@ -2,6 +2,7 @@
// Licensed under the Apache License, Version 2.0 (see LICENSE).
use std::collections::{BTreeMap, HashMap};
+use std::fmt::Display;
use std::io::Write;
use std::path::{Path, PathBuf};
use std::sync::Arc;
@@ -216,22 +217,11 @@ impl WrappedNode for Select {
lift_digest(&directory_digest_val).map_err(|str| throw(&str))
})
.and_then(move |digest| {
- let store = context.core.store();
context
.core
.store()
- .load_directory(digest)
+ .contents_for_directory(digest)
.map_err(|str| throw(&str))
- .and_then(move |maybe_directory| {
- maybe_directory
- .ok_or_else(|| format!("Could not find directory with digest {:?}", digest))
- .map_err(|str| throw(&str))
- })
- .and_then(move |directory| {
- store
- .contents_for_directory(&directory)
- .map_err(|str| throw(&str))
- })
.map(move |files_content| Snapshot::store_files_content(&context, &files_content))
})
.to_boxed()
@@ -540,16 +530,24 @@ impl Snapshot {
}
pub fn store_snapshot(core: &Arc, item: &fs::Snapshot) -> Value {
- let path_stats: Vec<_> = item
- .path_stats
- .iter()
- .map(|ps| Self::store_path_stat(core, ps))
- .collect();
+ let mut files = Vec::new();
+ let mut dirs = Vec::new();
+ for ps in &item.path_stats {
+ match ps {
+ &PathStat::File { ref path, .. } => {
+ files.push(Self::store_path(path));
+ }
+ &PathStat::Dir { ref path, .. } => {
+ dirs.push(Self::store_path(path));
+ }
+ }
+ }
externs::unsafe_call(
&core.types.construct_snapshot,
&[
Self::store_directory(core, &item.digest),
- externs::store_tuple(&path_stats),
+ externs::store_tuple(&files),
+ externs::store_tuple(&dirs),
],
)
}
@@ -558,28 +556,6 @@ impl Snapshot {
externs::store_utf8_osstr(item.as_os_str())
}
- fn store_dir(core: &Arc, item: &Dir) -> Value {
- let args = [Self::store_path(item.0.as_path())];
- externs::unsafe_call(&core.types.construct_dir, &args)
- }
-
- fn store_file(core: &Arc, item: &File) -> Value {
- let args = [Self::store_path(item.path.as_path())];
- externs::unsafe_call(&core.types.construct_file, &args)
- }
-
- fn store_path_stat(core: &Arc, item: &PathStat) -> Value {
- let args = match item {
- &PathStat::Dir { ref path, ref stat } => {
- vec![Self::store_path(path), Self::store_dir(core, stat)]
- }
- &PathStat::File { ref path, ref stat } => {
- vec![Self::store_path(path), Self::store_file(core, stat)]
- }
- };
- externs::unsafe_call(&core.types.construct_path_stat, &args)
- }
-
fn store_file_content(context: &Context, item: &FileContent) -> Value {
externs::unsafe_call(
&context.core.types.construct_file_content,
@@ -1086,27 +1062,6 @@ impl Node for NodeKey {
}
}
- fn format(&self) -> String {
- fn keystr(key: &Key) -> String {
- externs::key_to_str(&key)
- }
- fn typstr(tc: &TypeConstraint) -> String {
- externs::key_to_str(&tc.0)
- }
- // TODO: these should all be converted to fmt::Debug implementations, and then this method can
- // go away in favor of the auto-derived Debug for this type.
- match self {
- &NodeKey::DigestFile(ref s) => format!("DigestFile({:?})", s.0),
- &NodeKey::DownloadedFile(ref s) => format!("DownloadedFile({:?})", s.0),
- &NodeKey::ExecuteProcess(ref s) => format!("ExecuteProcess({:?}", s.0),
- &NodeKey::ReadLink(ref s) => format!("ReadLink({:?})", s.0),
- &NodeKey::Scandir(ref s) => format!("Scandir({:?})", s.0),
- &NodeKey::Select(ref s) => format!("Select({}, {})", s.params, typstr(&s.product)),
- &NodeKey::Task(ref s) => format!("{:?}", s),
- &NodeKey::Snapshot(ref s) => format!("Snapshot({})", keystr(&s.0)),
- }
- }
-
fn digest(res: NodeResult) -> Option {
match res {
NodeResult::Digest(d) => Some(d),
@@ -1128,6 +1083,26 @@ impl Node for NodeKey {
}
}
+impl Display for NodeKey {
+ fn fmt(&self, f: &mut std::fmt::Formatter) -> Result<(), std::fmt::Error> {
+ match self {
+ &NodeKey::DigestFile(ref s) => write!(f, "DigestFile({:?})", s.0),
+ &NodeKey::DownloadedFile(ref s) => write!(f, "DownloadedFile({:?})", s.0),
+ &NodeKey::ExecuteProcess(ref s) => write!(f, "ExecuteProcess({:?}", s.0),
+ &NodeKey::ReadLink(ref s) => write!(f, "ReadLink({:?})", s.0),
+ &NodeKey::Scandir(ref s) => write!(f, "Scandir({:?})", s.0),
+ &NodeKey::Select(ref s) => write!(
+ f,
+ "Select({}, {})",
+ s.params,
+ externs::key_to_str(&s.product.0)
+ ),
+ &NodeKey::Task(ref s) => write!(f, "{:?}", s),
+ &NodeKey::Snapshot(ref s) => write!(f, "Snapshot({})", externs::key_to_str(&s.0)),
+ }
+ }
+}
+
impl NodeError for Failure {
fn invalidated() -> Failure {
Failure::Invalidated
diff --git a/src/rust/engine/src/scheduler.rs b/src/rust/engine/src/scheduler.rs
index 0ca660865bf..dff4f06bdb2 100644
--- a/src/rust/engine/src/scheduler.rs
+++ b/src/rust/engine/src/scheduler.rs
@@ -13,7 +13,7 @@ use crate::context::{Context, Core};
use crate::core::{Failure, Params, TypeConstraint, Value};
use crate::nodes::{NodeKey, Select, Tracer, TryInto, Visualizer};
use crate::selectors;
-use graph::{EntryId, Graph, InvalidationResult, Node, NodeContext};
+use graph::{EntryId, Graph, InvalidationResult, NodeContext};
use indexmap::IndexMap;
use log::{debug, info, warn};
use parking_lot::Mutex;
@@ -210,10 +210,7 @@ impl Scheduler {
// Otherwise (if it is a success, some other type of Failure, or if we've run
// out of retries) recover to complete the join, which will cause the results to
// propagate to the user.
- debug!(
- "Root {} completed.",
- NodeKey::Select(Box::new(root)).format()
- );
+ debug!("Root {} completed.", NodeKey::Select(Box::new(root)));
Ok(other.map(|res| {
res
.try_into()
diff --git a/src/rust/engine/src/types.rs b/src/rust/engine/src/types.rs
index c4333deb737..3b517e7919c 100644
--- a/src/rust/engine/src/types.rs
+++ b/src/rust/engine/src/types.rs
@@ -5,10 +5,6 @@ pub struct Types {
pub construct_snapshot: Function,
pub construct_file_content: Function,
pub construct_files_content: Function,
- pub construct_path_stat: Function,
- pub construct_dir: Function,
- pub construct_file: Function,
- pub construct_link: Function,
pub construct_process_result: Function,
pub address: TypeConstraint,
pub path_globs: TypeConstraint,
diff --git a/src/rust/engine/testutil/mock/src/execution_server.rs b/src/rust/engine/testutil/mock/src/execution_server.rs
index d61c5c89ce5..f4a7973aeb9 100644
--- a/src/rust/engine/testutil/mock/src/execution_server.rs
+++ b/src/rust/engine/testutil/mock/src/execution_server.rs
@@ -22,13 +22,12 @@ use protobuf;
///
#[derive(Clone, Debug)]
pub struct MockOperation {
- pub op:
- Result