diff --git a/.coveragerc b/.coveragerc new file mode 100644 index 00000000..b08b2574 --- /dev/null +++ b/.coveragerc @@ -0,0 +1,6 @@ +[run] +branch = True + +[report] +exclude_lines = + pragma: no cover diff --git a/.gitignore b/.gitignore new file mode 100644 index 00000000..73ec28f0 --- /dev/null +++ b/.gitignore @@ -0,0 +1,6 @@ +*.pyc +.cache +.coverage +.nox +/*_local.sh +__pycache__ diff --git a/.travis.yml b/.travis.yml new file mode 100644 index 00000000..5f7debaf --- /dev/null +++ b/.travis.yml @@ -0,0 +1,7 @@ +sudo: required +services: +- docker +script: +- make build +- make tests/virtualenv +- make tests/no-virtualenv diff --git a/CODEOWNERS b/CODEOWNERS new file mode 100644 index 00000000..2b618c5f --- /dev/null +++ b/CODEOWNERS @@ -0,0 +1,4 @@ +# Code owners file. +# This file controls who is tagged for review for any given pull request. + +* @jinglundong @donmccasland diff --git a/CONTRIB.md b/CONTRIB.md deleted file mode 100644 index 0e104db5..00000000 --- a/CONTRIB.md +++ /dev/null @@ -1,20 +0,0 @@ -# How to become a contributor and submit your own code - -## Contributor License Agreements - -We'd love to accept your patches! Before we can take them, we have to jump a couple of legal hurdles. - -Please fill out either the individual or corporate Contributor License Agreement (CLA). - - * If you are an individual writing original source code and you're sure you own the intellectual property, then you'll need to sign an [individual CLA](http://code.google.com/legal/individual-cla-v1.0.html). - * If you work for a company that wants to allow you to contribute your work, then you'll need to sign a [corporate CLA](http://code.google.com/legal/corporate-cla-v1.0.html). - -Follow either of the two links above to access the appropriate CLA and instructions for how to sign and return it. Once we receive it, we'll be able to accept your pull requests. - -## Contributing A Patch - -1. Submit an issue describing your proposed change to the repo in question. -1. The repo owner will respond to your issue promptly. -1. If your proposed change is accepted, and you haven't already done so, sign a Contributor License Agreement (see details above). -1. Fork the desired repo, develop and test your code changes. -1. Submit a pull request. diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 00000000..bb1ec7cb --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,35 @@ +# How to become a contributor and submit your own code + +## Contributor License Agreements + +We'd love to accept your sample apps and patches! Before we can take them, we +have to jump a couple of legal hurdles. + +Please fill out either the individual or corporate Contributor License +Agreement (CLA). + + * If you are an individual writing original source code and you're sure you + own the intellectual property, then you'll need to sign an [individual CLA] + (https://developers.google.com/open-source/cla/individual). + * If you work for a company that wants to allow you to contribute your work, + then you'll need to sign a [corporate CLA] + (https://developers.google.com/open-source/cla/corporate). + +Follow either of the two links above to access the appropriate CLA and +instructions for how to sign and return it. Once we receive it, we'll +be able to accept your pull requests. + +## Contributing A Patch + +1. Submit an issue describing your proposed change to the repo in question. +1. The repo owner will respond to your issue promptly. +1. If your proposed change is accepted, and you haven't already done so, sign a + Contributor License Agreement (see details above). +1. Fork the desired repo, develop and test your code changes. +1. Ensure that your code adheres to the existing style in the sample to which + you are contributing. Refer to the + [Google Cloud Platform Samples Style Guide] + (https://github.com/GoogleCloudPlatform/Template/wiki/style.html) for the + recommended coding standards for this organization. +1. Ensure that your code has an appropriate set of unit tests which all pass. +1. Submit a pull request. diff --git a/README.md b/README.md index 8f96b8e5..51e70cd8 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,84 @@ -python-docker -============= +# Google Cloud Platform - Python Runtime Docker Image -This repository contains the sources for the following [docker](https://docker.io) base images: -- [`google/python`](/base) -- [`google/python-runtime`](/runtime) -- [`google/python-hello`](/hello) +This repository contains the source for the +[`gcr.io/google-appengine/python`](https://gcr.io/google-appengine/python) +[docker](https://docker.io) base image. This image can be used as the base image +for running applications on +[Google App Engine Flexible](https://cloud.google.com/appengine/docs/flexible/), +[Google Kubernetes Engine](https://cloud.google.com/kubernetes-engine), or any +other Docker host. + +This image is based on Ubuntu Xenial and contains packages required to build +most of the popular Python libraries. For more information about this runtime, +see the +[documentation](https://cloud.google.com/appengine/docs/flexible/python/runtime). + +## App Engine + +When using App Engine Flexible, you can use the runtime without worrying about +docker by specifying `runtime: python` in your `app.yaml`: + +```yaml +runtime: python +env: flex +entrypoint: gunicorn -b :$PORT main:app + +runtime_config: + # You can also specify 2 for Python 2.7 + python_version: 3 +``` + +If you have an existing App Engine application using this runtime and want to +customize it, you can use the +[`Cloud SDK`](https://cloud.google.com/sdk/gcloud/reference/preview/app/gen-config) +to create a custom runtime: + + gcloud beta app gen-config --custom + +You can then modify the `Dockerfile` and `.dockerignore` as needed for you +application. + +## Kubernetes Engine & other Docker hosts. + +For other docker hosts, you'll need to create a `Dockerfile` based on this image +that copies your application code, installs dependencies, and declares an +command or entrypoint. For example: + + FROM gcr.io/google-appengine/python + + # Create a virtualenv for dependencies. This isolates these packages from + # system-level packages. + # Use -p python3 or -p python3.7 to select python version. Default is version 2. + RUN virtualenv /env + + # Setting these environment variables are the same as running + # source /env/bin/activate. + ENV VIRTUAL_ENV /env + ENV PATH /env/bin:$PATH + + # Copy the application's requirements.txt and run pip to install all + # dependencies into the virtualenv. + ADD requirements.txt /app/requirements.txt + RUN pip install -r /app/requirements.txt + + # Add the application source code. + ADD . /app + + # Run a WSGI server to serve the application. gunicorn must be declared as + # a dependency in requirements.txt. + CMD gunicorn -b :$PORT main:app + +## Building the image + +Google regularly builds and releases this image at +[`gcr.io/google-appengine/python`](https://gcr.io/google-appengine/python). + +See [RELEASING.md](RELEASING.md) for more information. + +## Contributing changes + +* See [CONTRIBUTING.md](CONTRIBUTING.md) + +## Licensing + +* See [LICENSE](LICENSE) diff --git a/RELEASING.md b/RELEASING.md new file mode 100644 index 00000000..f886a542 --- /dev/null +++ b/RELEASING.md @@ -0,0 +1,158 @@ +# Google Cloud Platform - Python Runtime Docker Image + +## `build.sh` + +There is a shell script called `build.sh` that builds everything in this +repository. + +### Environment variables for `build.sh` + +DOCKER_NAMESPACE +: The prefix applied to all images names created. To push images to Google +Container Registry (GCR), this should be `gcr.io/YOUR-PROJECT-NAME`. + +TAG +: The suffix applied to all images created. This should be unique. If not +specified, the current time will be used (timestamp format `YYYY-mm-dd-HHMMSS`). + +GOOGLE_APPLICATION_CREDENTIALS_FOR_TESTS +: (System test only) Path to service account credentials in JSON format. + +GOOGLE_CLOUD_PROJECT_FOR_TESTS +: (System test only) Name of the Google Cloud Platform project to run the system +tests under. + +## Building and Releasing + +A custom Jenkins job builds and releases this repository using scripts and job +configurations that are not yet available publicly. The control flow is as +follows: + +1. Jenkins job `python/release` is invoked by + a. Manually running the script `build_and_release.py` with arguments + b. Manually invoking the job from the GUI +2. The job runs the script `release.sh` + a. Service account credentials are read + b. `gcloud auth activate-service-account` is performed + c. `gcloud config set project` is performed +3. The script invokes `build.sh` in this repository +4. `build.sh` invokes Google Cloud Build with the `cloudbuild-*.yaml` + config files. + +## Building interpreters + +The interpreters used are now built in a separate step, and stored on GCS. +This allows the runtime images to be build more rapidly. + +To build the interpreters, run: + +```shell +gcloud builds submit . --config=cloudbuild_interpreters.yaml +``` + +## Building outside Jenkins + +To build this repository outside Jenkins, authenticate and authorize yourself +with `gcloud auth`, set the variables listed above, and run: + +``` shell +./build.sh +``` + +This assumes an environment similar to the internal Jenkins environment (Linux, +Debian or Ubuntu-like). + +## Building locally + +To build this repository using local Docker commands instead of the Google +Cloud Build service, add the `--local` flag as shown: + +``` shell +./build.sh --local +``` + +To open an interactive shell session to this image after building it, do the +following: + +``` shell +docker run -it --entrypoint /bin/bash YOUR-IMAGE-NAME +``` + +## Running tests against a released image + +To run compatibility tests against an existing image, such as +`gcr.io/google-appengine/python:latest`, run: + +```shell +DOCKER_NAMESPACE=gcr.io/google-appengine TAG=latest ./build.sh --nobuild --test +``` + +## Running benchmarks + +There is a benchmark suite which compares the performance of interpreters +against each other. + +**Benchmark different versions of interpreter in the same release + +``` shell +DOCKER_NAMESPACE=DOCKER_NAMESPACE_EXAMPLE TAG=TAG_EXAMPLE ./build.sh --nobuild --benchmark +``` + +**Benchmark same versions of interpreter from release to release + +``` shell +DOCKER_NAMESPACE=DOCKER_NAMESPACE_EXAMPLE TAG1=TAG1_EXAMPLE TAG2=TAG2_EXAMPLE ./benchmark_between_releases.sh +``` + +Since these benchmarks are run on cloud instances, the timings may vary from run +to run. + +## Running system tests + +**TAKE NOTE: You will incur charges for use of Google Cloud Platform services!** + +System tests perform mutating operations against the real Google Cloud services. +Since these system tests may fail or be flaky for outside reasons such as +netorking issues, configuration errors, or services outages, they are run +separately from building the images, and should be run in their own project. + +To run the system tests, you need a Google Cloud Project with a service account. +From the [Google Cloud Console](https://console.cloud.google.com/), either +create a new project or switch to an existing one. Next, +[create a service account]( +https://cloud.google.com/iam/docs/creating-managing-service-accounts) that will +be used to run the system tests. Once you have a service account, +[create and download a service account key](https://cloud.google.com/iam/docs/managing-service-account-keys). + +In the +[IAM & Admin](https://console.cloud.google.com/permissions/projectpermissions) +section, grant the `Owner` role to the service account you created above. Also +grant the `Editor` role to the `cloud-logs@google.com` service account. + +Then, follow the +[system test setup instructions](https://github.com/GoogleCloudPlatform/google-cloud-python/blob/master/CONTRIBUTING.rst#running-system-tests). It +describes various steps, including running some scripts to populate and/or +delete datastore example data and indexes (populate_datastore.py, +clear_datastore.py, and `gcloud preview datastore create-indexes +system_tests/data/index.yaml`). + +From the cloud console, you will need to enable at least the following APIs for +your project: + +- Bigquery API +- Cloud Bigtable Admin API +- Cloud Spanner API +- Google Cloud Natural Language API +- Google Cloud Pub/Sub API +- Google Cloud Speech API +- Google Cloud Storage JSON API +- Google Cloud Translation API +- Google Cloud Vision API +- Stackdriver Logging API +- Stackdriver Monitoring API + +Once all the setup has been done, run the following: + +``` shell +./build.sh --nobuild --system_tests +``` diff --git a/base/Dockerfile b/base/Dockerfile deleted file mode 100644 index 9e464eb3..00000000 --- a/base/Dockerfile +++ /dev/null @@ -1,5 +0,0 @@ -FROM google/debian:wheezy - -RUN apt-get update -y && apt-get install --no-install-recommends -y -q build-essential python2.7 python2.7-dev python-pip git -RUN pip install -U pip -RUN pip install virtualenv diff --git a/base/README.md b/base/README.md deleted file mode 100644 index 7664c1d6..00000000 --- a/base/README.md +++ /dev/null @@ -1,24 +0,0 @@ -# google/python - -[`google/python`](https://index.docker.io/u/google/python) is a [docker](https://docker.io) base image that bundles the stable version of [python](http://python.org) installed from [debian stable](https://packages.debian.org/stable/) and [pip](https://pip.pypa.io/en/latest/) and [virtualenv](https://virtualenv.pypa.io/) installed from [PyPI](https://pypi.python.org/pypi). - -It serves as a base for the [`google/python-runtime`](https://index.docker.io/u/google/python-runtime) image. - -## Usage - -- Create a Dockerfile in your python application directory with the following content: - - FROM google/python - - WORKDIR /app - RUN virtualenv /env - ADD requirements.txt /app/requirements.txt - RUN /env/bin/pip install requirements.txt - ADD . /app - - CMD [] - ENTRYPOINT ["/env/bin/python", "/app/main.py"] - -- Run the following command in your application directory: - - docker build -t my/app . diff --git a/build.sh b/build.sh new file mode 100755 index 00000000..2083237d --- /dev/null +++ b/build.sh @@ -0,0 +1,243 @@ +#!/bin/bash + +# Copyright 2016 Google Inc. All rights reserved. + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -euo pipefail + +# Actions +benchmark=0 # Should run benchmarks? +build=0 # Should build images? +client_test=0 # Should run Google Cloud Client Library tests +test=0 # Should run standard test suite? + +local=0 # Should run using local Docker daemon instead of GCR? + +os_base=ubuntu18 # Which operating system base to use +interpreter=0 # Should build interpreters instead of images + +# Note that $gcloud_cmd has spaces in it +gcloud_cmd="gcloud builds submit" +# May need to install via "gcloud components install cloud-build-local" +local_gcloud_cmd="cloud-build-local --push=false --dryrun=false" + +# Helper functions +function fatal() { + echo "$1" >&2 + exit 1 +} + +function usage { + fatal "Usage: $0 [OPTION]... +Build and test artifacts in this repository + +Options: + --[no]benchmark: Run benchmarking suite (default false) + --[no]build: Build all images (default true if no options set) + --[no]test: Run basic tests (default true if no options set) + --[no]client_test: Run Google Cloud Client Library tests (default false) + --[no]local: Build images using local Docker daemon (default false) + --os_base: Which OS image to build on top of [debian8, ubuntu16, ubuntu18] +" +} + +# Read environment variables +if [ -z "${DOCKER_NAMESPACE:+set}" ] ; then + fatal 'Error: $DOCKER_NAMESPACE is not set; invoke with something like DOCKER_NAMESPACE=gcr.io/YOUR-PROJECT-NAME' +fi + +if [ -z "${BUILDER_DOCKER_NAMESPACE:+set}" ] ; then + export BUILDER_DOCKER_NAMESPACE="${DOCKER_NAMESPACE}" +fi + +if [ -z "${TAG:+set}" ] ; then + export TAG=`date +%Y-%m-%d-%H%M%S` +fi + +build_substitutions="\ +_BUILDER_DOCKER_NAMESPACE=${BUILDER_DOCKER_NAMESPACE},\ +_DOCKER_NAMESPACE=${DOCKER_NAMESPACE},\ +_TAG=${TAG}\ +" + +substitutions="\ +_DOCKER_NAMESPACE=${DOCKER_NAMESPACE},\ +_TAG=${TAG}\ +" + +# Read command line arguments +while [ $# -gt 0 ]; do + case "$1" in + --benchmark) + benchmark=1 + shift + ;; + --nobenchmark) + benchmark=0 + shift + ;; + --build) + build=1 + shift + ;; + --nobuild) + build=0 + shift + ;; + --client_test) + client_test=1 + shift + ;; + --noclient_test) + client_test=0 + shift + ;; + --local) + local=1 + shift + ;; + --nolocal) + local=0 + shift + ;; + --os_base=debian8) + os_base=debian8 + shift + ;; + --os_base=ubuntu16) + os_base=ubuntu16 + shift + ;; + --os_base=ubuntu18) + os_base=ubuntu18 + shift + ;; + --test) + test=1 + shift + ;; + --notest) + test=0 + shift + ;; + --interpreter) + interpreter=1 + shift + ;; + *) + usage + ;; + esac +done + +# If no actions chosen, then tell the user +if [ "${benchmark}" -eq 0 -a \ + "${build}" -eq 0 -a \ + "${client_test}" -eq 0 -a \ + "${test}" -eq 0 \ +]; then + echo 'No actions specified, defaulting to --build --test' + build=1 + test=1 +fi + +# Running build local or remote? +if [ "${local}" -eq 1 ]; then + gcloud_cmd="${local_gcloud_cmd}" +fi + +# Pick OS image to use as base +if [ "${os_base}" == "debian8" ]; then + export OS_BASE_IMAGE="gcr.io/google-appengine/debian8:latest" +elif [ "${os_base}" == "ubuntu16" ]; then + export OS_BASE_IMAGE="gcr.io/gcp-runtimes/ubuntu_16_0_4:latest" +elif [ "${os_base}" == "ubuntu18" ]; then + export OS_BASE_IMAGE="gcr.io/gcp-runtimes/ubuntu_18_0_4:latest" +else + echo "Unsupported OS base image: $OS_BASE_IMAGE" + exit 1 +fi +export STAGING_IMAGE="${DOCKER_NAMESPACE}/python:${TAG}" +echo "Using base image name ${STAGING_IMAGE}" + +# Generate Dockerfiles +for outfile in \ + builder/gen-dockerfile/Dockerfile \ + python-interpreter-builder/Dockerfile \ + runtime-image/Dockerfile \ + tests/benchmark/Dockerfile \ + tests/eventlet/Dockerfile \ + tests/google-cloud-python/Dockerfile \ + tests/integration/Dockerfile \ + ; do + envsubst <"${outfile}".in >"${outfile}" \ + '$OS_BASE_IMAGE $STAGING_IMAGE $GOOGLE_CLOUD_PROJECT_FOR_TESTS $TAG' +done + +# Make some files available to the runtime builder Docker context +mkdir -p builder/gen-dockerfile/data +for file in \ + scripts/gen_dockerfile.py \ + scripts/validation_utils.py \ + scripts/data/* \ + ; do + cp -a "${file}" "builder/gen-dockerfile/${file##scripts/}" +done + +# Make a file available to the eventlet test. +cp -a scripts/testdata/hello_world/main.py tests/eventlet/main.py + +# Build interpreters and push to GCS +if [ "${interpreter}" -eq 1 ]; then + echo "Building interpreters" + ${gcloud_cmd} \ + --config=cloudbuild_interpreters.yaml \ + . +fi + +# Build images and push to GCR +if [ "${build}" -eq 1 ]; then + echo "Building images" + ${gcloud_cmd} \ + --config=cloudbuild.yaml \ + --substitutions="${build_substitutions}" \ + . +fi + +# Run the tests that don't require (too many) external services +if [ "${test}" -eq 1 ]; then + echo "Testing compatibility with popular Python libraries" + ${gcloud_cmd} \ + --config=cloudbuild_test.yaml \ + --substitutions="${substitutions}" \ + . +fi + +# Run client library tests +if [ "${client_test}" -eq 1 ]; then + echo "Testing compatibility with Google Cloud Client Libraries" + ${gcloud_cmd} \ + --config=cloudbuild_client_test.yaml \ + --substitutions="${substitutions}" \ + . +fi + +# Run benchmarks +if [ "${benchmark}" -eq 1 ] ; then + echo "Running benchmark" + ${gcloud_cmd} \ + --config=cloudbuild_benchmark.yaml \ + --substitutions="${substitutions}" \ + . +fi diff --git a/builder/gen-dockerfile/.gitignore b/builder/gen-dockerfile/.gitignore new file mode 100644 index 00000000..f45549bd --- /dev/null +++ b/builder/gen-dockerfile/.gitignore @@ -0,0 +1,3 @@ +Dockerfile +*.py +data/ diff --git a/builder/gen-dockerfile/Dockerfile.in b/builder/gen-dockerfile/Dockerfile.in new file mode 100644 index 00000000..92ceaecf --- /dev/null +++ b/builder/gen-dockerfile/Dockerfile.in @@ -0,0 +1,15 @@ +FROM ${STAGING_IMAGE} +LABEL python_version=python3.7 +RUN virtualenv --no-download /env -p python3.7 + +# Set virtualenv environment variables. This is equivalent to running +# source /env/bin/activate +ENV VIRTUAL_ENV /env +ENV PATH /env/bin:$PATH +ADD requirements.txt /builder/ +#virtualenv's pip is pegged at version 10.0, removing so +#newer versions get picked up +RUN pip install -r /builder/requirements.txt +ADD . /builder/ +WORKDIR /workspace +ENTRYPOINT [ "python", "/builder/gen_dockerfile.py" ] diff --git a/builder/gen-dockerfile/requirements.txt b/builder/gen-dockerfile/requirements.txt new file mode 100644 index 00000000..4a285555 --- /dev/null +++ b/builder/gen-dockerfile/requirements.txt @@ -0,0 +1 @@ +PyYAML==3.13 diff --git a/builder/python-latest.yaml b/builder/python-latest.yaml new file mode 100644 index 00000000..3e75acad --- /dev/null +++ b/builder/python-latest.yaml @@ -0,0 +1,12 @@ +# This is a cloudbuild.yaml template for the runtime builder +steps: +- # Generate application Dockerfile + name: 'gcr.io/gcp-runtimes/python/gen-dockerfile:latest' + args: [ + '--base-image=gcr.io/google-appengine/python:latest' + ] +- # Use that Dockerfile to create final application image + name: 'gcr.io/cloud-builders/docker:latest' + args: ['build', '-t', '$_OUTPUT_IMAGE', '.'] +images: + - '$_OUTPUT_IMAGE' diff --git a/builder/python-staging.yaml b/builder/python-staging.yaml new file mode 100644 index 00000000..1e977a89 --- /dev/null +++ b/builder/python-staging.yaml @@ -0,0 +1,12 @@ +# This is a cloudbuild.yaml template for the runtime builder +steps: +- # Generate application Dockerfile + name: 'gcr.io/gcp-runtimes/python/gen-dockerfile:staging' + args: [ + '--base-image=gcr.io/google-appengine/python:staging' + ] +- # Use that Dockerfile to create final application image + name: 'gcr.io/cloud-builders/docker:latest' + args: ['build', '-t', '$_OUTPUT_IMAGE', '.'] +images: + - '$_OUTPUT_IMAGE' diff --git a/cloudbuild.yaml b/cloudbuild.yaml new file mode 100644 index 00000000..e240780e --- /dev/null +++ b/cloudbuild.yaml @@ -0,0 +1,17 @@ +timeout: 10800s +steps: +- # Build base runtime image + name: gcr.io/cloud-builders/docker:latest + args: ['build', '--tag=${_DOCKER_NAMESPACE}/python:${_TAG}', + '--no-cache', '/workspace/runtime-image/'] + id: runtime +- # Build runtime builder image + name: gcr.io/cloud-builders/docker:latest + args: ['build', '--tag=${_BUILDER_DOCKER_NAMESPACE}/python/gen-dockerfile:${_TAG}', + '--no-cache', '/workspace/builder/gen-dockerfile/'] + id: gen-dockerfile + waitFor: ['runtime'] +images: [ + '${_DOCKER_NAMESPACE}/python:${_TAG}', + '${_BUILDER_DOCKER_NAMESPACE}/python/gen-dockerfile:${_TAG}', +] diff --git a/cloudbuild_benchmark.yaml b/cloudbuild_benchmark.yaml new file mode 100644 index 00000000..a960bc9b --- /dev/null +++ b/cloudbuild_benchmark.yaml @@ -0,0 +1,8 @@ +timeout: 3600s +steps: +- name: gcr.io/cloud-builders/docker:latest + args: ['build', '--tag=${_DOCKER_NAMESPACE}/python/tests/benchmark:${_TAG}', + '--no-cache', '/workspace/tests/benchmark/'] +images: [ + # Intentionally empty +] diff --git a/cloudbuild_client_test.yaml b/cloudbuild_client_test.yaml new file mode 100644 index 00000000..010e1ffd --- /dev/null +++ b/cloudbuild_client_test.yaml @@ -0,0 +1,9 @@ +timeout: 3600s +steps: +- # Build image to run google client library unit tests + name: gcr.io/cloud-builders/docker:latest + args: ['build', '--tag=${_DOCKER_NAMESPACE}/python/tests/google-cloud-python:${_TAG}', + '--no-cache', '/workspace/tests/google-cloud-python/'] +- # Run google client library unit tests + name: ${_DOCKER_NAMESPACE}/python/tests/google-cloud-python:${_TAG} +images: [] diff --git a/cloudbuild_interpreters.yaml b/cloudbuild_interpreters.yaml new file mode 100644 index 00000000..c75e59ad --- /dev/null +++ b/cloudbuild_interpreters.yaml @@ -0,0 +1,33 @@ +timeout: 10800s +steps: +- # Compile Python interpreters from source. This step happens first, then + # the next three in parallel. + name: gcr.io/cloud-builders/docker:latest + args: ['build', '--tag=interpreter-builder', + '--no-cache', '/workspace/python-interpreter-builder/'] + id: interpreter-builder +- name: interpreter-builder + args: ['/scripts/build-python-3.4.sh'] + id: build-3.4 + waitFor: ['interpreter-builder'] +- name: interpreter-builder + args: ['/scripts/build-python-3.5.sh'] + id: build-3.5 + waitFor: ['interpreter-builder'] +- name: interpreter-builder + args: ['/scripts/build-python-3.6.sh'] + id: build-3.6 + waitFor: ['interpreter-builder'] +- name: interpreter-builder + args: ['/scripts/build-python-3.7.sh'] + id: build-3.7 + waitFor: ['interpreter-builder'] + +# Upload them to tbe build-id location +- name: gcr.io/cloud-builders/gsutil:latest + args: ['cp', '/workspace/runtime-image/*.tar.gz', 'gs://python-interpreters/$BUILD_ID/'] + waitFor: ['build-3.4', 'build-3.5', 'build-3.6', 'build-3.7'] + +# "Tag" this as latest +- name: gcr.io/cloud-builders/gsutil:latest + args: ['cp', '-r', 'gs://python-interpreters/$BUILD_ID/*', 'gs://python-interpreters/latest/'] diff --git a/cloudbuild_test.yaml b/cloudbuild_test.yaml new file mode 100644 index 00000000..ad674026 --- /dev/null +++ b/cloudbuild_test.yaml @@ -0,0 +1,114 @@ +timeout: 3600s +steps: +- # Explicitly pull image into GCB so that later steps work + name: '${_DOCKER_NAMESPACE}/python:${_TAG}' + args: [ + '/bin/true', + ] + id: runtime + +- # Validate structure of base runtime image + name: gcr.io/gcp-runtimes/container-structure-test:v0.2.1 + args: [ + '-test.v', + '-image', '${_DOCKER_NAMESPACE}/python:${_TAG}', + '/workspace/tests/virtualenv/virtualenv_default.yaml', + ] + waitFor: ['runtime'] +- name: gcr.io/gcp-runtimes/container-structure-test:v0.2.1 + args: [ + '-test.v', + '-image', '${_DOCKER_NAMESPACE}/python:${_TAG}', + '/workspace/tests/virtualenv/virtualenv_python27.yaml', + ] + waitFor: ['runtime'] +- name: gcr.io/gcp-runtimes/container-structure-test:v0.2.1 + args: [ + '-test.v', + '-image', '${_DOCKER_NAMESPACE}/python:${_TAG}', + '/workspace/tests/virtualenv/virtualenv_python34.yaml', + ] + waitFor: ['runtime'] +- name: gcr.io/gcp-runtimes/container-structure-test:v0.2.1 + args: [ + '-test.v', + '-image', '${_DOCKER_NAMESPACE}/python:${_TAG}', + '/workspace/tests/virtualenv/virtualenv_python35.yaml', + ] + waitFor: ['runtime'] +- name: gcr.io/gcp-runtimes/container-structure-test:v0.2.1 + args: [ + '-test.v', + '-image', '${_DOCKER_NAMESPACE}/python:${_TAG}', + '/workspace/tests/virtualenv/virtualenv_python36.yaml', + ] + waitFor: ['runtime'] +- name: gcr.io/gcp-runtimes/container-structure-test:v0.2.1 + args: [ + '-test.v', + '-image', '${_DOCKER_NAMESPACE}/python:${_TAG}', + '/workspace/tests/virtualenv/virtualenv_python37.yaml', + ] + waitFor: ['runtime'] +- name: gcr.io/gcp-runtimes/container-structure-test:v0.2.1 + args: [ + '-test.v', + '-image', '${_DOCKER_NAMESPACE}/python:${_TAG}', + '/workspace/tests/no-virtualenv/no-virtualenv.yaml', + ] + waitFor: ['runtime'] + +# Temporarily disabled because it fails on symbolic links in Ubuntu: +# https://github.com/GoogleCloudPlatform/container-structure-test/issues/77 +#- # Check license compliance +# name: gcr.io/gcp-runtimes/container-structure-test:v0.2.1 +# args: [ +# '-test.v', +# '-image', '${_DOCKER_NAMESPACE}/python:${_TAG}', +# '/workspace/tests/license-test/license-test.yaml' +# ] +# waitFor: ['runtime'] + +- # Do third-party library compatibility tests for Python 2 + name: gcr.io/cloud-builders/docker:latest + args: [ + 'build', '-t', 'python2-libraries-intermediate', '--build-arg', + 'intermediate_image=${_DOCKER_NAMESPACE}/python:${_TAG}', + '/workspace/tests/python2-libraries' + ] + id: python2-libraries-intermediate + waitFor: ['runtime'] +- name: gcr.io/gcp-runtimes/container-structure-test:v0.2.1 + args: [ + '-test.v', + '-image', 'python2-libraries-intermediate', + '/workspace/tests/python2-libraries/python2-libraries.yaml' + ] + waitFor: ['python2-libraries-intermediate'] + +- # Do third-party library compatibility tests for Python 3 + name: gcr.io/cloud-builders/docker:latest + args: [ + 'build', '-t', 'python3-libraries-intermediate', '--build-arg', + 'intermediate_image=${_DOCKER_NAMESPACE}/python:${_TAG}', + '/workspace/tests/python3-libraries' + ] + id: python3-libraries-intermediate + waitFor: ['runtime'] +- name: gcr.io/gcp-runtimes/container-structure-test:v0.2.1 + args: [ + '-test.v', + '-image', 'python3-libraries-intermediate', + '/workspace/tests/python3-libraries/python3-libraries.yaml' + ] + waitFor: ['python3-libraries-intermediate'] + +- # Run other compatibility tests + name: gcr.io/cloud-builders/docker:latest + args: [ + 'build', '--tag=${_DOCKER_NAMESPACE}/python/tests/eventlet:${_TAG}', + '--no-cache', '/workspace/tests/eventlet/' + ] + waitFor: ['runtime'] + +images: [] diff --git a/hello/Dockerfile b/hello/Dockerfile deleted file mode 100644 index acbdb568..00000000 --- a/hello/Dockerfile +++ /dev/null @@ -1 +0,0 @@ -FROM google/python-runtime diff --git a/hello/README.md b/hello/README.md deleted file mode 100644 index 46359905..00000000 --- a/hello/README.md +++ /dev/null @@ -1,11 +0,0 @@ -# google/python-hello - -[`google/python-hello`](https://index.docker.io/u/google/python-hello) is a [docker](https://docker.io) image for the [Flask microframework](http://flask.pocoo.org/) hello world application. - -It is based on [`google/python-runtime`](https://index.docker.io/u/google/python-runtime) base image and listen on port `8080`. - -## Usage - -- Run the following command - - docker run -p 8080 google/python-hello diff --git a/hello/main.py b/hello/main.py deleted file mode 100644 index cf978895..00000000 --- a/hello/main.py +++ /dev/null @@ -1,9 +0,0 @@ -from flask import Flask -app = Flask(__name__) - -@app.route("/") -def hello(): - return "Hello World!" - -if __name__ == "__main__": - app.run(host='0.0.0.0', port=8080, debug=True) diff --git a/hello/requirements.txt b/hello/requirements.txt deleted file mode 100644 index 880a7bc4..00000000 --- a/hello/requirements.txt +++ /dev/null @@ -1 +0,0 @@ -Flask==0.10 diff --git a/nox.py b/nox.py new file mode 100644 index 00000000..0ee6f443 --- /dev/null +++ b/nox.py @@ -0,0 +1,86 @@ +# Copyright 2016 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import fnmatch +import os + +import nox + + +def _list_files(folder, pattern): + """Lists all files below the given folder that match the pattern.""" + for root, folders, files in os.walk(folder): + for filename in files: + if fnmatch.fnmatch(filename, pattern): + yield os.path.join(root, filename) + + +@nox.session +def check_requirements(session): + """Checks for out of date requirements and optionally updates them.""" + session.install('gcp-devrel-py-tools') + + if 'update' in session.posargs: + command = 'update-requirements' + else: + command = 'check-requirements' + + reqfiles = list(_list_files('.', 'requirements*.txt')) + + for reqfile in reqfiles: + session.run('gcp-devrel-py-tools', command, reqfile) + + +@nox.session +def lint(session): + session.interpreter = 'python3' # So it understands Python3 syntax + session.install('flake8', 'flake8-import-order') + session.run( + 'flake8', + '--import-order-style', 'google', + '--application-import-names', + 'gen_dockerfile,local_cloudbuild,validation_utils', + 'scripts', + 'nox.py', + ) + + +@nox.session +@nox.parametrize('version', ['3.4', '3.5', '3.6', '3.7']) +def tests(session, version): + session.interpreter = 'python' + version + session.install('-r', 'scripts/requirements-test.txt') + session.run( + 'py.test', + '--ignore=scripts/testdata', + '--cov=scripts', + '--cov-append', + '--cov-config=.coveragerc', + '--cov-report=', # Report generated below + 'scripts', + env={'PYTHONPATH': ''} + ) + + +@nox.session +def cover(session): + """Run the final coverage report. + + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.interpreter = 'python3.6' + session.install('coverage', 'pytest-cov') + session.run('coverage', 'report', '--show-missing', '--fail-under=97') + session.run('coverage', 'erase') diff --git a/perf_dashboard/__init__.py b/perf_dashboard/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/perf_dashboard/bq_utils.py b/perf_dashboard/bq_utils.py new file mode 100644 index 00000000..cbad65b9 --- /dev/null +++ b/perf_dashboard/bq_utils.py @@ -0,0 +1,36 @@ +# Copyright 2017 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Common util methods for processing data in BigQuery.""" + +import uuid + +from google.cloud import bigquery + + +def insert_rows(project, dataset_name, table_name, rows): + """Insert rows to bigquery table.""" + client = bigquery.Client(project=project) + dataset_ref = client.dataset(dataset_name) + table_ref = dataset_ref.table(table_name) + table = client.get_table(table_ref) + client.create_rows(table, rows) + +def execute_query(query): + """Execute query and return the query results.""" + client = bigquery.Client() + query_job = client.query((query)) + + # Start the query job and wait it to complete + return [row.values() for row in query_job.result()] diff --git a/perf_dashboard/posts_stats.py b/perf_dashboard/posts_stats.py new file mode 100644 index 00000000..efb9c6fc --- /dev/null +++ b/perf_dashboard/posts_stats.py @@ -0,0 +1,105 @@ +# Copyright 2017 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""A script to collect the number of StackOverflow posts related to +Python and Google Cloud Platform.""" + +import datetime +import os +import sys +import time +import uuid + +from collections import Counter + +from google.cloud import bigquery + +import bq_utils + +GCLOUD_PROJECT_ENV = 'GCLOUD_PROJECT' +DATASET_NAME = 'stackoverflow' +TAG_COUNT_TABLE_NAME = 'tag_count_timestamp' +UNANSWERED_POSTS_TABLE_NAME = 'unanswered_posts' + + +def get_stackoverflow_tags_count(): + """Get all the tags contains python and cloud key words""" + query = """ + SELECT + SPLIT(tags, '|') tags + FROM + `bigquery-public-data.stackoverflow.posts_questions` + WHERE + tags LIKE '%python%' + AND (tags LIKE '%google-cloud-platform%' OR tags LIKE '%gcp%') + """ + + results = bq_utils.execute_query(query) + + rows = [row[0] for row in results] + + return rows + + +def get_posts_list_unanswered(): + # Get the list of posts that are unanswered + query = """ + SELECT + id, title, tags + FROM + `bigquery-public-data.stackoverflow.posts_questions` + WHERE + tags LIKE '%python%' + AND (tags LIKE '%google-cloud-platform%' OR tags LIKE '%gcp%') + AND accepted_answer_id is NULL + AND answer_count = 0; + """ + + results = bq_utils.execute_query(query) + + # Add current timestamp to the rows + date_time = datetime.datetime.now() + rows = [(date_time,) + row for row in results] + + return rows + + +def count_unique_tags(data): + flattened_tag_list = [tag for tag_list in data for tag in tag_list] + tag_count = Counter(flattened_tag_list) + + # Add current timestamp to the rows + date_time = datetime.datetime.now() + time_tag_count = [(date_time,) + item for item in tag_count.items()] + + return time_tag_count + + +def main(): + project = os.environ.get(GCLOUD_PROJECT_ENV) + + # Get the posts count for each tag + rows = get_stackoverflow_tags_count() + tag_count = count_unique_tags(rows) + bq_utils.insert_rows( + project, DATASET_NAME, TAG_COUNT_TABLE_NAME, tag_count) + + # Get the list of unanswered posts + unanswered_posts = get_posts_list_unanswered() + bq_utils.insert_rows( + project, DATASET_NAME, UNANSWERED_POSTS_TABLE_NAME, unanswered_posts) + + +if __name__ == '__main__': + main() diff --git a/perf_dashboard/python_clientlibs_download.py b/perf_dashboard/python_clientlibs_download.py new file mode 100644 index 00000000..cae3d2c4 --- /dev/null +++ b/perf_dashboard/python_clientlibs_download.py @@ -0,0 +1,135 @@ +# Copyright 2017 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import datetime +import os +import sys +import time +import uuid + +from google.cloud import bigquery + +import bq_utils + +GCLOUD_PROJECT_ENV = 'GCLOUD_PROJECT' + +DATETIME_FORMAT = '%Y%m%d' + +DATASET_NAME = 'python_clientlibs_download_by_week' + +VENEER_TABLE_NAME = 'veneer_client_libs' +STACKDRIVER_TABLE_NAME = 'stackdriver_client_libs' +GRPC_TABLE_NAME = 'grpc_lib' +THIRD_PARTY_TABLE_NAME = 'third_party_client_libs' + +TABLES = [ + VENEER_TABLE_NAME, + GRPC_TABLE_NAME, + STACKDRIVER_TABLE_NAME, + THIRD_PARTY_TABLE_NAME, +] + +CLIENTLIBS = { + VENEER_TABLE_NAME: [ + 'google-cloud-core', + 'google-cloud-speech', + 'google-cloud-language', + 'google-cloud-pubsub', + 'google-cloud-bigquery', + 'google-cloud-bigtable', + 'google-cloud-datastore', + 'google-cloud-spanner', + 'google-cloud-storage', + 'google-cloud-vision', + 'google-cloud-translate', + 'google-cloud-dns', + 'google-cloud-videointelligence', + ], + STACKDRIVER_TABLE_NAME: [ + 'google-cloud-logging', + 'google-cloud-monitoring', + 'google-cloud-error_reporting', + 'google-cloud-trace', + ], + GRPC_TABLE_NAME: [ + 'grpcio', + ], + THIRD_PARTY_TABLE_NAME: [ + 'pandas-gbq', + ] +} + + +def get_weekly_clientlibs_downloads(clientlibs_table_name, date_str): + """Use a SQL query to collect the weekly download data of the client + libraries. + + Args: + clientlibs_table_name (str): Table name, which is the key in the + CLIENTLIBS dict. + date_str (str): A date string in "YYYYMMDD" format. + + Returns: + list: rows of the query result. + """ + client_libs = CLIENTLIBS[clientlibs_table_name] + date_time = datetime.datetime.strptime(date_str, DATETIME_FORMAT) + week_dates = [(date_time + datetime.timedelta(days=-i)) + .strftime(DATETIME_FORMAT) + for i in range(7)] + query = """ + SELECT + file.project as client_library_name, + COUNT(*) as download_count + FROM + `the-psf.pypi.downloads*` + WHERE + file.project IN UNNEST(@client_libs) + AND + _TABLE_SUFFIX IN UNNEST(@week_dates) + GROUP BY client_library_name + """ + client = bigquery.Client() + query_parameters=[ + bigquery.ArrayQueryParameter( + 'client_libs', 'STRING', client_libs), + bigquery.ArrayQueryParameter( + 'week_dates', 'STRING', week_dates) + ] + job_config = bigquery.QueryJobConfig() + job_config.query_parameters = query_parameters + query_job = client.query(query, job_config=job_config) + + # Wait for the job to complete and get the results + results = [row.values() for row in query_job.result()] + + rows = [(date_time,) + row for row in results] + + return rows + + +def main(): + for table_name in CLIENTLIBS.keys(): + rows = get_weekly_clientlibs_downloads( + clientlibs_table_name=table_name, + date_str=datetime.datetime.now().strftime("%Y%m%d")) + bq_utils.insert_rows( + project=os.environ.get(GCLOUD_PROJECT_ENV), + dataset_name=DATASET_NAME, + table_name=table_name, + rows=rows) + + +if __name__ == '__main__': + main() diff --git a/python-interpreter-builder/.dockerignore b/python-interpreter-builder/.dockerignore new file mode 100644 index 00000000..e69de29b diff --git a/python-interpreter-builder/.gitignore b/python-interpreter-builder/.gitignore new file mode 100644 index 00000000..94143827 --- /dev/null +++ b/python-interpreter-builder/.gitignore @@ -0,0 +1 @@ +Dockerfile diff --git a/python-interpreter-builder/DEBIAN/control.in b/python-interpreter-builder/DEBIAN/control.in new file mode 100644 index 00000000..49980654 --- /dev/null +++ b/python-interpreter-builder/DEBIAN/control.in @@ -0,0 +1,25 @@ +Package: ${DEB_PACKAGE_NAME} +Version: ${DEB_PACKAGE_VERSION} +Section: python +Priority: optional +Architecture: amd64 +Maintainer: Douglas Greiman +Description: Interactive high-level object-oriented language (version ${SHORT_VERSION}) + Python is a high-level, interactive, object-oriented language. Its ${SHORT_VERSION} version + includes an extensive class library with lots of goodies for + network programming, system administration, sounds and graphics. +Depends: libbz2-1.0, + libc6, + libdb5.3, + libexpat1, + libffi6, + liblzma5, + libmpdec2, + libncursesw5, + libreadline6, + libsqlite3-0, + libssl1.0.0, + libtinfo5, + mime-support, + zlib1g +Homepage: https://www.python.org diff --git a/python-interpreter-builder/Dockerfile.in b/python-interpreter-builder/Dockerfile.in new file mode 100644 index 00000000..9039fbf3 --- /dev/null +++ b/python-interpreter-builder/Dockerfile.in @@ -0,0 +1,48 @@ +# The Google App Engine base image is debian (jessie) with ca-certificates +# installed. +FROM ${OS_BASE_IMAGE} + +# Install Python build dependencies (based on Debian Build-Depends) +RUN apt-get update && apt-get install -yq \ + autoconf \ + blt-dev \ + bzip2 \ + debhelper \ + dpkg-dev \ + gcc \ + gettext-base \ + libbluetooth-dev \ + libbz2-dev \ + libdb-dev \ + libexpat1-dev \ + libffi-dev \ + libgdbm-dev \ + libgpm2 \ + liblzma-dev \ + libmpdec-dev \ + libncursesw5-dev \ + libreadline-dev \ + libsqlite3-dev \ + libssl-dev \ + locales \ + lsb-release \ + mime-support \ + net-tools \ + netbase \ + python \ + python3 \ + sharutils \ + time \ + tk-dev \ + wget \ + xauth \ + xvfb \ + zlib1g-dev \ + && rm -rf /var/lib/apt/lists/* + +# Setup locale. This prevents Python 3 IO encoding issues. +ENV LANG C.UTF-8 + +# Add build scripts +ADD scripts /scripts +ADD DEBIAN /DEBIAN diff --git a/python-interpreter-builder/README.md b/python-interpreter-builder/README.md new file mode 100644 index 00000000..7b718581 --- /dev/null +++ b/python-interpreter-builder/README.md @@ -0,0 +1,21 @@ +# Python Interpreter Builder + +This is a Docker-based Python interpreter builder. It builds Python interpreters +using a Debian-based Docker image. These interpreters are suitable to be moved +to another Debian-based Docker image. This avoids needing to install build +dependencies in the final container. + + +## Building + +Use: + + docker build --tag=google/python/interpreter-builder . + +The interpreters will be stored in the image at `/interpreters.tar.gz`. This is +suitable to be extracted from this image and added directly to another Docker +image via: + + ADD interpreters.tar.gz / + +Docker will automatically un-tar the interpreters into `/opt`. diff --git a/python-interpreter-builder/scripts/build-python-3.4.sh b/python-interpreter-builder/scripts/build-python-3.4.sh new file mode 100755 index 00000000..5c0bfb8e --- /dev/null +++ b/python-interpreter-builder/scripts/build-python-3.4.sh @@ -0,0 +1,141 @@ +#!/bin/bash + +set -euo pipefail +set -x + +# Get the source +mkdir -p /opt/sources +cd /opt/sources +wget --no-verbose https://www.python.org/ftp/python/3.4.8/Python-3.4.8.tgz +# SHA-256 generated via `shasum -a 256 [file]` +shasum --check <&2 + exit 1 +} + # Process command line +if [ -z "${1:+set}" -o -z "${2:+set}" ]; then + usage +fi +LONG_VERSION=$1 +BUILD_TAG=$2 +SHORT_VERSION=${1%.*} + +# Compute version specs +DEB_PACKAGE_NAME=gcp-python${SHORT_VERSION} +# Can't have - (hyphen) in debian revision as per +# https://www.debian.org/doc/debian-policy/ch-controlfields.html#s-f-Version +DEBIAN_REVISION=${BUILD_TAG//-/.} +DEB_PACKAGE_VERSION=${LONG_VERSION}-${DEBIAN_REVISION} + +PACKAGE_DIR=/opt/packages +# E.g. gcp-python3.6_3.6.2-1gcp~2017.07.25.110644_amd64.deb +DEB_FILENAME=${DEB_PACKAGE_NAME}_${DEB_PACKAGE_VERSION}_amd64.deb + +# Create directory for intermediate files +SCRATCH_DIR=$(mktemp --directory) +cd "${SCRATCH_DIR}" + +# Synthesize Debian control file. Note that the "Depends:" is +# currently Debian8-specific, and lacks version specifiers present in +# the standard Debian Python packages. +export DEB_PACKAGE_NAME DEB_PACKAGE_VERSION SHORT_VERSION +envsubst control \ + '${DEB_PACKAGE_NAME} ${DEB_PACKAGE_VERSION} ${SHORT_VERSION}' + +# Generate components of .deb archive +tar czf control.tar.gz control +tar czf data.tar.gz "/opt/python${SHORT_VERSION}" +echo "2.0" >debian-binary + +# Generate final .deb. +mkdir -p "${PACKAGE_DIR}" +ar rcD "${PACKAGE_DIR}/${DEB_FILENAME}" \ + debian-binary control.tar.gz data.tar.gz +rm debian-binary control.tar.gz data.tar.gz + +# Validate .deb +dpkg --install --dry-run "${PACKAGE_DIR}/${DEB_FILENAME}" + +# Add to list +echo "${DEB_FILENAME}" >> "${PACKAGE_DIR}/packages.txt" diff --git a/runtime-image/.gitignore b/runtime-image/.gitignore new file mode 100644 index 00000000..94143827 --- /dev/null +++ b/runtime-image/.gitignore @@ -0,0 +1 @@ +Dockerfile diff --git a/runtime-image/Dockerfile.in b/runtime-image/Dockerfile.in new file mode 100644 index 00000000..46387705 --- /dev/null +++ b/runtime-image/Dockerfile.in @@ -0,0 +1,57 @@ +# The Google App Engine base image is debian (jessie) with ca-certificates +# installed. +# Source: https://github.com/GoogleCloudPlatform/debian-docker +FROM ${OS_BASE_IMAGE} + +ADD resources /resources +ADD scripts /scripts + +# Install Python, pip, and C dev libraries necessary to compile the most popular +# Python libraries. +RUN /scripts/install-apt-packages.sh +RUN curl "https://bootstrap.pypa.io/pip/2.7/get-pip.py" -o "get-pip.py" && python ./get-pip.py && ln -s /usr/local/bin/pip /usr/bin/pip + +# Setup locale. This prevents Python 3 IO encoding issues. +ENV LANG C.UTF-8 +# Make stdout/stderr unbuffered. This prevents delay between output and cloud +# logging collection. +ENV PYTHONUNBUFFERED 1 + +RUN wget https://storage.googleapis.com/python-interpreters/latest/interpreter-3.4.tar.gz && \ + wget https://storage.googleapis.com/python-interpreters/latest/interpreter-3.5.tar.gz && \ + wget https://storage.googleapis.com/python-interpreters/latest/interpreter-3.6.tar.gz && \ + wget https://storage.googleapis.com/python-interpreters/latest/interpreter-3.7.tar.gz && \ + tar -xzf interpreter-3.4.tar.gz && \ + tar -xzf interpreter-3.5.tar.gz && \ + tar -xzf interpreter-3.6.tar.gz && \ + tar -xzf interpreter-3.7.tar.gz && \ + rm interpreter-*.tar.gz + +# Add Google-built interpreters to the path +ENV PATH /opt/python3.7/bin:/opt/python3.6/bin:/opt/python3.5/bin:/opt/python3.4/bin:$PATH +RUN update-alternatives --install /usr/local/bin/python3 python3 /opt/python3.7/bin/python3.7 50 && \ + update-alternatives --install /usr/local/bin/pip3 pip3 /opt/python3.7/bin/pip3.7 50 + +# Upgrade pip (debian package version tends to run a few version behind) and +# install virtualenv system-wide. +RUN /usr/bin/pip install --upgrade -r /resources/requirements.txt && \ + /opt/python3.4/bin/pip3.4 install --upgrade -r /resources/requirements.txt && \ + rm -f /opt/python3.4/bin/pip /opt/python3.4/bin/pip3 && \ + /opt/python3.5/bin/pip3.5 install --upgrade -r /resources/requirements.txt && \ + rm -f /opt/python3.5/bin/pip /opt/python3.5/bin/pip3 && \ + /opt/python3.6/bin/pip3.6 install --upgrade -r /resources/requirements.txt && \ + rm -f /opt/python3.6/bin/pip /opt/python3.6/bin/pip3 && \ + /opt/python3.7/bin/pip3.7 install --upgrade -r /resources/requirements.txt && \ + rm -f /opt/python3.7/bin/pip /opt/python3.7/bin/pip3 && \ + /usr/bin/pip install --upgrade -r /resources/requirements-virtualenv.txt + +# Setup the app working directory +RUN ln -s /home/vmagent/app /app +WORKDIR /app + +# Port 8080 is the port used by Google App Engine for serving HTTP traffic. +EXPOSE 8080 +ENV PORT 8080 + +# The user's Dockerfile must specify an entrypoint with ENTRYPOINT or CMD. +CMD [] diff --git a/runtime-image/resources/apt-packages.txt b/runtime-image/resources/apt-packages.txt new file mode 100644 index 00000000..7b88f777 --- /dev/null +++ b/runtime-image/resources/apt-packages.txt @@ -0,0 +1,37 @@ +# utilities +git +mercurial +pkg-config +wget +# debian-provided interpreters +python2.7 +python2.7-dev +# Dependenies for third-party Python packages +# with C-extensions +build-essential +libcurl4-openssl-dev +libffi-dev +libjpeg-dev +libmysqlclient-dev +libpng12-dev +libpq-dev +libssl-dev +libxml2-dev +libxslt1-dev +swig +zlib1g-dev +# Needed by scipy/numpy +gfortran +libatlas-dev +libblas-dev +libfreetype6-dev +liblapack-dev +libquadmath0 +# Needed by pylibmc +libmemcached-dev +libsasl2-2 +libsasl2-dev +libsasl2-modules +sasl2-bin +# Needed by eventlet +netbase diff --git a/runtime-image/resources/requirements-virtualenv.txt b/runtime-image/resources/requirements-virtualenv.txt new file mode 100644 index 00000000..25f09c4a --- /dev/null +++ b/runtime-image/resources/requirements-virtualenv.txt @@ -0,0 +1 @@ +virtualenv==20.0.31 diff --git a/runtime-image/resources/requirements.txt b/runtime-image/resources/requirements.txt new file mode 100644 index 00000000..2d010eef --- /dev/null +++ b/runtime-image/resources/requirements.txt @@ -0,0 +1,3 @@ +pip +setuptools==40.2.0 +wheel==0.31.1 diff --git a/runtime-image/scripts/install-apt-packages.sh b/runtime-image/scripts/install-apt-packages.sh new file mode 100755 index 00000000..bafba2d6 --- /dev/null +++ b/runtime-image/scripts/install-apt-packages.sh @@ -0,0 +1,14 @@ +#!/bin/bash + +set -e + +apt-get -q update + +xargs -a <(awk '/^\s*[^#]/' '/resources/apt-packages.txt') -r -- \ + apt-get install --no-install-recommends -yq + +apt-get upgrade -yq + +# Remove unneeded files. +apt-get clean +rm /var/lib/apt/lists/*_* diff --git a/runtime/Dockerfile b/runtime/Dockerfile deleted file mode 100644 index 90523697..00000000 --- a/runtime/Dockerfile +++ /dev/null @@ -1,11 +0,0 @@ -FROM google/python - -WORKDIR /app -ONBUILD RUN virtualenv /env -ONBUILD ADD requirements.txt /app/requirements.txt -ONBUILD RUN /env/bin/pip install -r /app/requirements.txt -ONBUILD ADD . /app - -EXPOSE 8080 -CMD [] -ENTRYPOINT ["/env/bin/python", "main.py"] diff --git a/runtime/README.md b/runtime/README.md deleted file mode 100644 index 98ad08fd..00000000 --- a/runtime/README.md +++ /dev/null @@ -1,36 +0,0 @@ -# google/python-runtime - -[`google/python-runtime`](https://index.docker.io/u/google/python-runtime) is a [docker](https://docker.io) base image that makes it easy to dockerize standard [Python](http://python.org) application. - -It can automatically bundle a Python application and its dependencies with a single line Dockerfile. - -It is based on [`google/python`](https://index.docker.io/u/google/python) base image. - -## Usage - -- Create a Dockerfile in your python application directory with the following content: - - FROM google/python-runtime - -- Run the following command in your application directory: - - docker build -t app . - -## Sample - -See the [sources](/hello) for [`google/python-hello`](https://index.docker.io/u/google/python-hello) based on this image. - -## Notes - -The image assumes that your application: - -- has a [`requirements.txt`](https://pip.pypa.io/en/latest/user_guide.html#requirements-files) file to specify its dependencies -- listens on port `8080` -- either has a `main.py` script as entrypoint or defines `ENTRYPOINT ["/env/bin/python", "/app/some_other_file.py"]` in its `Dockerfile` - -When building your application docker image, `ONBUILD` triggers: - -- Create a new virtualenv under the `/env` directory in the container -- Fetch the dependencies listed in `requirements.txt` into the virtualenv using `pip install` and leverage docker caching appropriately -- Copy the application sources under the `/app` directory in the container - diff --git a/scripts/data/Dockerfile.entrypoint.template b/scripts/data/Dockerfile.entrypoint.template new file mode 100644 index 00000000..f6b52bf6 --- /dev/null +++ b/scripts/data/Dockerfile.entrypoint.template @@ -0,0 +1 @@ +CMD {entrypoint} diff --git a/scripts/data/Dockerfile.install_app b/scripts/data/Dockerfile.install_app new file mode 100644 index 00000000..54c3d6cc --- /dev/null +++ b/scripts/data/Dockerfile.install_app @@ -0,0 +1 @@ +ADD . /app/ diff --git a/scripts/data/Dockerfile.preamble.template b/scripts/data/Dockerfile.preamble.template new file mode 100644 index 00000000..e4d005bd --- /dev/null +++ b/scripts/data/Dockerfile.preamble.template @@ -0,0 +1 @@ +FROM {base_image} diff --git a/scripts/data/Dockerfile.python_compat b/scripts/data/Dockerfile.python_compat new file mode 100644 index 00000000..1e4d6352 --- /dev/null +++ b/scripts/data/Dockerfile.python_compat @@ -0,0 +1,3 @@ +FROM gcr.io/google_appengine/python-compat-multicore +ADD . /app/ +RUN if [ -s requirements.txt ]; then pip install -r requirements.txt; fi diff --git a/scripts/data/Dockerfile.requirements_txt b/scripts/data/Dockerfile.requirements_txt new file mode 100644 index 00000000..f684c45c --- /dev/null +++ b/scripts/data/Dockerfile.requirements_txt @@ -0,0 +1,2 @@ +ADD requirements.txt /app/ +RUN pip install -r requirements.txt diff --git a/scripts/data/Dockerfile.virtualenv.template b/scripts/data/Dockerfile.virtualenv.template new file mode 100644 index 00000000..557b1992 --- /dev/null +++ b/scripts/data/Dockerfile.virtualenv.template @@ -0,0 +1,7 @@ +LABEL python_version=python{python_version} +RUN virtualenv --no-download /env -p python{python_version} + +# Set virtualenv environment variables. This is equivalent to running +# source /env/bin/activate +ENV VIRTUAL_ENV /env +ENV PATH /env/bin:$PATH diff --git a/scripts/data/dockerignore b/scripts/data/dockerignore new file mode 100644 index 00000000..8b927bb7 --- /dev/null +++ b/scripts/data/dockerignore @@ -0,0 +1,19 @@ +# Copyright 2015 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +.dockerignore +Dockerfile +.git +.hg +.svn diff --git a/scripts/data/dockerignore.python_compat b/scripts/data/dockerignore.python_compat new file mode 100644 index 00000000..5ce5abfa --- /dev/null +++ b/scripts/data/dockerignore.python_compat @@ -0,0 +1,5 @@ +.dockerignore +Dockerfile +.git +.hg +.svn diff --git a/scripts/deploy_check.sh b/scripts/deploy_check.sh new file mode 100644 index 00000000..1e2f02c2 --- /dev/null +++ b/scripts/deploy_check.sh @@ -0,0 +1,34 @@ +#!/bin/bash + +set -ex + +export KOKORO_GITHUB_DIR=${KOKORO_ROOT}/src/github +source ${KOKORO_GFILE_DIR}/kokoro/common.sh + +cd ${KOKORO_GITHUB_DIR}/${SAMPLE_APP_DIRECTORY} +if [ -n "${RUNTIME_SPEC}" -a -f app.yaml.in ]; then + sed "s|\${RUNTIME_SPEC}|${RUNTIME_SPEC}|" app.yaml.in > app.yaml +fi + +cd ${KOKORO_GFILE_DIR}/appengine/integration_tests + +sudo -E /usr/local/bin/pip install --upgrade -r requirements.txt + +if [ -f ${KOKORO_GITHUB_DIR}/${SAMPLE_APP_DIRECTORY}/requirements.txt ] +then + sudo -E /usr/local/bin/pip install --upgrade -r ${KOKORO_GITHUB_DIR}/${SAMPLE_APP_DIRECTORY}/requirements.txt +fi + +export DEPLOY_LATENCY_PROJECT='cloud-deploy-latency' + +skip_flag="" + +if [ "${SKIP_CUSTOM_LOGGING_TESTS}" = "true" -o "${SKIP_BUILDERS}" = "true" ]; then + skip_flag="$skip_flag --skip-builders" +fi + +if [ "${SKIP_XRT}" = "true" ]; then + skip_flag="$skip_flag --skip-xrt" +fi + +python deploy_check.py -d ${KOKORO_GITHUB_DIR}/${SAMPLE_APP_DIRECTORY} -l ${LANGUAGE} ${skip_flag} diff --git a/scripts/gen_dockerfile.py b/scripts/gen_dockerfile.py new file mode 100755 index 00000000..97da60f6 --- /dev/null +++ b/scripts/gen_dockerfile.py @@ -0,0 +1,277 @@ +#!/usr/bin/env python3 + +# Copyright 2017 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Generate a Dockerfile and helper files for a Python application.""" + +import argparse +import collections +import collections.abc +import functools +import io +import os +import re +import sys + +import yaml + +import validation_utils + +# Validate characters for dockerfile image names. +# +# This roots out obvious mistakes, the full gory details are here: +# https://github.com/docker/distribution/blob/master/reference/regexp.go +IMAGE_REGEX = re.compile(r"""(?x) + ^ + [a-zA-Z0-9] # First char must be alphanumeric + [a-zA-Z0-9-_./:@+]* # Punctuation allowed after that + $ +""") + +# `entrypoint` is specified as free-form text parsed as a unix shell +# command line, which limits the sanity checking possible. We +# disallow newlines and control characters which would break the +# Dockerfile format. +PRINTABLE_REGEX = re.compile(r"""^[^\x00-\x1f]*$""") + +# Map from app.yaml "python_version" to {python_version} in Dockerfile +PYTHON_INTERPRETER_VERSION_MAP = { + '': '', # == 2.7 + '2': '', # == 2.7 + '3': '3.6', + '3.4': '3.4', + '3.5': '3.5', + '3.6': '3.6', + '3.7': '3.7', +} + +# Name of environment variable potentially set by gcloud +GAE_APPLICATION_YAML_PATH = 'GAE_APPLICATION_YAML_PATH' + +# Validated application configuration +AppConfig = collections.namedtuple( + 'AppConfig', + 'base_image dockerfile_python_version entrypoint has_requirements_txt is_python_compat' +) + + +def get_app_config(raw_config, base_image, config_file, source_dir): + """Read and validate the application runtime configuration. + + We validate the user input for security and better error messages. + + Consider parsing a yaml file which has a string value where we + expected a list. Python will happily use the string as a sequence + of individual characters, at least for a while, leading to + confusing results when it finally fails. + + We also try to prevent Dockerfile and Bash injection attacks. For + example, specifying entrypoint as "true\\nADD /etc/passwd /pwned" + would allow the user to inject arbitrary directives into the + Dockerfile, which is a support problem if nothing else. + + Args: + raw_config (dict): deserialized app.yaml + base_image (str): Docker image name to build on top of + config_file (str): Path to user's app.yaml (might be .yaml) + source_dir (str): Directory containing user's source code + + Returns: + AppConfig: valid configuration + """ + # Examine app.yaml + if not isinstance(raw_config, collections.abc.Mapping): + raise ValueError( + 'Expected {} contents to be a Mapping type, but found type "{}"'. + format(config_file, type(raw_config))) + + # Short circuit for python compat. + if validation_utils.get_field_value( + raw_config, 'runtime', str) == 'python-compat': + return AppConfig( + base_image=None, + dockerfile_python_version=None, + entrypoint=None, + has_requirements_txt=None, + is_python_compat=True) + + entrypoint = validation_utils.get_field_value( + raw_config, 'entrypoint', str) + if not PRINTABLE_REGEX.match(entrypoint): + raise ValueError( + 'Invalid "entrypoint" value in app.yaml: {!r}'.format(entrypoint)) + + # Mangle entrypoint in the same way as the Cloud SDK + # (googlecloudsdk/third_party/appengine/api/validation.py) + # + # We could handle both string ("shell form") and list ("exec + # form") but it appears that gcloud only handles string form. + if entrypoint and not entrypoint.startswith('exec '): + entrypoint = 'exec ' + entrypoint + + raw_runtime_config = validation_utils.get_field_value( + raw_config, 'runtime_config', dict) + python_version = validation_utils.get_field_value( + raw_runtime_config, 'python_version', str) + + dockerfile_python_version = PYTHON_INTERPRETER_VERSION_MAP.get( + python_version) + if dockerfile_python_version is None: + valid_versions = str(sorted(PYTHON_INTERPRETER_VERSION_MAP.keys())) + raise ValueError( + 'Invalid "python_version" field in "runtime_config" section ' + 'of app.yaml: {!r}. Valid options are: {}'. + format(python_version, valid_versions)) + + # Examine user's files + has_requirements_txt = os.path.isfile( + os.path.join(source_dir, 'requirements.txt')) + + return AppConfig( + base_image=base_image, + dockerfile_python_version=dockerfile_python_version, + entrypoint=entrypoint, + has_requirements_txt=has_requirements_txt, + is_python_compat=False) + + +def get_data(name): + """Return the contents of the named data resource + + These templates are copied from the Google Cloud SDK at + google-cloud-sdk/platform/ext-runtime/python/data + and the two should be kept in sync. + + Args: + name (str): Name of file, without directory + + Returns: + str: Contents of data file + """ + filename = os.path.join(os.path.dirname(__file__), 'data', name) + with io.open(filename, 'r', encoding='utf8') as template_file: + return template_file.read() + + +def generate_files(app_config): + """Generate a Dockerfile and helper files for an application. + + Args: + app_config (AppConfig): Validated configuration + + Returns: + dict: Map of filename to desired file contents + """ + if app_config.has_requirements_txt: + optional_requirements_txt = get_data('Dockerfile.requirements_txt') + else: + optional_requirements_txt = '' + + if app_config.entrypoint: + optional_entrypoint = get_data( + 'Dockerfile.entrypoint.template').format( + entrypoint=app_config.entrypoint) + else: + optional_entrypoint = '' + + if app_config.is_python_compat: + dockerfile = get_data('Dockerfile.python_compat') + dockerignore = get_data('dockerignore.python_compat') + else: + dockerfile = ''.join([ + get_data('Dockerfile.preamble.template').format( + base_image=app_config.base_image), + get_data('Dockerfile.virtualenv.template').format( + python_version=app_config.dockerfile_python_version), + optional_requirements_txt, + get_data('Dockerfile.install_app'), + optional_entrypoint, + ]) + dockerignore = get_data('dockerignore') + + return { + 'Dockerfile': dockerfile, + '.dockerignore': dockerignore, + } + + +def generate_dockerfile_command(base_image, config_file, source_dir): + """Write a Dockerfile and helper files for an application. + + Args: + base_image (str): Docker image name to build on top of + config_file (str): Path to user's app.yaml (might be .yaml) + source_dir (str): Directory container user's source code + """ + # Read yaml file. Does not currently support multiple services + # with configuration filenames besides app.yaml + with io.open(config_file, 'r', encoding='utf8') as yaml_config_file: + raw_config = yaml.safe_load(yaml_config_file) + + # Determine complete configuration + app_config = get_app_config(raw_config, base_image, config_file, + source_dir) + + # Generate list of filenames and their textual contents + files = generate_files(app_config) + + # Write files + for filename, contents in files.items(): + full_filename = os.path.join(source_dir, filename) + with io.open(full_filename, 'w', encoding='utf8') as outfile: + outfile.write(contents) + + +def parse_args(argv): + """Parse and validate command line flags""" + parser = argparse.ArgumentParser() + parser.add_argument( + '--base-image', + type=functools.partial( + validation_utils.validate_arg_regex, flag_regex=IMAGE_REGEX), + default='gcr.io/google-appengine/python:latest', + help='Name of Docker image to use as base') + # In some cases, gcloud sets an environment variable to indicate + # the location of the application configuration file, rather than + # using the --config flag. The order of precedence from highest + # to lowest is: + # + # 1) --config flag + # 2) $GAE_APPLICATION_YAML_PATH environment variable + # 3) a file named "app.yaml" in the current working directory + parser.add_argument( + '--config', + type=functools.partial( + validation_utils.validate_arg_regex, flag_regex=PRINTABLE_REGEX), + default=(os.environ.get(GAE_APPLICATION_YAML_PATH) or 'app.yaml'), + help='Path to application configuration file' + ) + parser.add_argument( + '--source-dir', + type=functools.partial( + validation_utils.validate_arg_regex, flag_regex=PRINTABLE_REGEX), + default='.', + help=('Application source and output directory')) + args = parser.parse_args(argv[1:]) + return args + + +def main(): + args = parse_args(sys.argv) + generate_dockerfile_command(args.base_image, args.config, args.source_dir) + + +if __name__ == '__main__': + main() diff --git a/scripts/gen_dockerfile_test.py b/scripts/gen_dockerfile_test.py new file mode 100755 index 00000000..03eaa079 --- /dev/null +++ b/scripts/gen_dockerfile_test.py @@ -0,0 +1,274 @@ +#!/usr/bin/env python3 + +# Copyright 2017 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Unit test for gen_dockerfile.py""" + +import argparse +import filecmp +import os +import shutil +import subprocess +import unittest.mock + +import pytest +import yaml + +import gen_dockerfile + + +# Expected list of files generated +EXPECTED_OUTPUT_FILES = set(('Dockerfile', '.dockerignore')) + + +@pytest.fixture +def testdata_dir(): + testdata_dir = os.path.join(os.path.dirname(__file__), 'testdata') + assert os.path.isdir(testdata_dir), ( + 'Could not run test: testdata directory not found') + return testdata_dir + + +def compare_file(filename, dir1, dir2): + """Compare identically named files in two different directories""" + assert filecmp.cmp( + os.path.join(dir1, filename), os.path.join(dir2, filename)) + + +@pytest.mark.parametrize('app_yaml, expected', [ + # Basic app.yaml + ('env: flex', { + 'base_image': 'some_image_name', + 'dockerfile_python_version': '', + 'has_requirements_txt': False, + 'entrypoint': '', + 'is_python_compat': False, + }), + ('env: flex\nruntime: python-compat', { + 'base_image': None, + 'dockerfile_python_version': None, + 'has_requirements_txt': None, + 'entrypoint': None, + 'is_python_compat': True, + }), + # All supported python versions + ('runtime_config:\n python_version:', { + 'dockerfile_python_version': '', + }), + ('runtime_config:\n python_version: 2', { + 'dockerfile_python_version': '', + }), + ('runtime_config:\n python_version: 3', { + 'dockerfile_python_version': '3.6', + }), + ('runtime_config:\n python_version: 3.4', { + 'dockerfile_python_version': '3.4', + }), + ('runtime_config:\n python_version: 3.5', { + 'dockerfile_python_version': '3.5', + }), + ('runtime_config:\n python_version: 3.6', { + 'dockerfile_python_version': '3.6', + }), + ('runtime_config:\n python_version: 3.7', { + 'dockerfile_python_version': '3.7', + }), + # entrypoint present + ('entrypoint: my entrypoint', { + 'entrypoint': 'exec my entrypoint', + }), +]) +def test_get_app_config_valid(app_yaml, expected): + config_file = 'some_config_file' + base_image = 'some_image_name' + source_dir = 'some_source_dir' + raw_app_config = yaml.safe_load(app_yaml) + actual = gen_dockerfile.get_app_config( + raw_app_config, base_image, config_file, + source_dir) + for key, value in expected.items(): + assert getattr(actual, key) == value + + +def test_get_app_config_requirements_txt(): + """requirements.txt file present""" + app_yaml = 'env: flex' + expected = { + 'has_requirements_txt': True, + } + with unittest.mock.patch.object(os.path, 'isfile', return_value=True): + test_get_app_config_valid(app_yaml, expected) + + +@pytest.mark.parametrize('app_yaml', [ + # Empty app.yaml + '', + # Invalid entrypoint + 'entrypoint: "bad \\n entrypoint"', + # Invalid python version + 'runtime_config:\n python_version: 1', + 'runtime_config:\n python_version: python2', +]) +def test_get_app_config_invalid(app_yaml): + config_file = 'some_config_file' + base_image = 'some_image_name' + source_dir = 'some_source_dir' + raw_app_config = yaml.safe_load(app_yaml) + with pytest.raises(ValueError): + gen_dockerfile.get_app_config( + raw_app_config, base_image, config_file, source_dir) + + +# Basic AppConfig used below +_BASE_APP_CONFIG = gen_dockerfile.AppConfig( + base_image='', + dockerfile_python_version='', + entrypoint='', + has_requirements_txt=False, + is_python_compat=False, +) + + +@pytest.mark.parametrize('app_config, should_find, test_string', [ + # Requirements.txt + (_BASE_APP_CONFIG, False, 'ADD requirements.txt'), + (_BASE_APP_CONFIG._replace(has_requirements_txt=True), True, + 'ADD requirements.txt'), + # Entrypoint + (_BASE_APP_CONFIG, False, 'CMD'), + (_BASE_APP_CONFIG._replace(entrypoint='my entrypoint'), True, + 'CMD my entrypoint'), + (_BASE_APP_CONFIG._replace(entrypoint='exec my entrypoint'), True, + 'CMD exec my entrypoint'), + # Base runtime image + (_BASE_APP_CONFIG._replace(base_image='my_base_runtime_image'), True, + 'FROM my_base_runtime_image'), + # Python version + (_BASE_APP_CONFIG._replace(dockerfile_python_version='_my_version'), True, + 'python_version=python_my_version'), + # python-compat runtime + (_BASE_APP_CONFIG._replace(is_python_compat=True), True, + 'FROM gcr.io/google_appengine/python-compat-multicore'), +]) +def test_generate_files(app_config, should_find, test_string): + result = gen_dockerfile.generate_files(app_config) + assert set(result.keys()) == EXPECTED_OUTPUT_FILES + dockerfile = result['Dockerfile'] + if should_find: + assert test_string in dockerfile + else: + assert test_string not in dockerfile + + +def compare_against_golden_files(app, config_dir, testdata_dir): + golden_dir = os.path.join(testdata_dir, app + '_golden') + for filename in EXPECTED_OUTPUT_FILES: + compare_file(filename, config_dir, golden_dir) + + +@pytest.mark.parametrize('app', [ + # Sampled from https://github.com/GoogleCloudPlatform/python-docs-samples + 'hello_world', + # From an internal source. + 'hello_world_compat']) +def test_generate_dockerfile_command(tmpdir, testdata_dir, app): + """Generates output and compares against a set of golden files.""" + app_dir = os.path.join(testdata_dir, app) + + # Copy sample app to writable temp dir, and generate Dockerfile. + config_dir = os.path.join(str(tmpdir), 'config') + shutil.copytree(app_dir, config_dir) + gen_dockerfile.generate_dockerfile_command( + base_image='gcr.io/google-appengine/python', + config_file=os.path.join(config_dir, 'app.yaml'), + source_dir=config_dir) + compare_against_golden_files(app, config_dir, testdata_dir) + + +@pytest.mark.parametrize('app', [ + # Sampled from https://github.com/GoogleCloudPlatform/python-docs-samples + 'hello_world', + # From an internal source. + 'hello_world_compat']) +@pytest.mark.xfail(not shutil.which('gcloud'), + reason='Google Cloud SDK is not installed') +def test_generate_dockerfile_golden(tmpdir, testdata_dir, app): + """Validate our golden files against gcloud app gen-config""" + app_dir = os.path.join(testdata_dir, app) + + # Copy sample app to writable temp dir, and generate Dockerfile. + gen_config_dir = os.path.join(str(tmpdir), 'gen_config') + shutil.copytree(app_dir, gen_config_dir) + app_yaml = os.path.join(gen_config_dir, 'app.yaml') + gcloud_args = [ + 'gcloud', '--quiet', 'beta', 'app', 'gen-config', + gen_config_dir, '--custom', '--config={}'.format(app_yaml) + ] + print('Invoking gcloud as {}'.format(gcloud_args)) + subprocess.check_call(gcloud_args) + compare_against_golden_files(app, gen_config_dir, testdata_dir) + + +@pytest.mark.parametrize('argv', [ + [], + ['argv0', '--base-image=nocolon'], + ['argv0', '--base-image=name:andcolon'], + ['argv0', '--base-image=name@sha256:digest'], +]) +def test_parse_args_valid(argv): + args = gen_dockerfile.parse_args(argv) + assert args is not None + + +@pytest.mark.parametrize('argv', [ + ['argv0', '--base-image='], + ['argv0', '--base-image=:'], + ['argv0', '--base-image=:noname'], +]) +def test_parse_args_invalid(argv): + def mock_error(*args): + """Prevent argparse from calling sys.exit()""" + raise AssertionError(*args) + + error_patch = unittest.mock.patch.object( + argparse.ArgumentParser, 'error', mock_error) + with error_patch: + with pytest.raises(AssertionError): + gen_dockerfile.parse_args(argv) + + +@pytest.mark.parametrize('argv, env, expected', [ + # Explicit flag wins + (['argv0', '--config=flag/path'], 'env/path', 'flag/path'), + (['argv0', '--config=flag/path'], '', 'flag/path'), + (['argv0', '--config=flag/path'], None, 'flag/path'), + # Otherwise env var wins + (['argv0'], 'env/path', 'env/path'), + # Otherwise use default name + (['argv0'], '', 'app.yaml'), + (['argv0'], None, 'app.yaml'), +]) +def test_parse_args_config(argv, env, expected): + if env is None: + mock_environ = {} + else: + mock_environ = {gen_dockerfile.GAE_APPLICATION_YAML_PATH: env} + with unittest.mock.patch.dict('os.environ', mock_environ, clear=True): + args = gen_dockerfile.parse_args(argv) + assert args.config == expected + + +if __name__ == '__main__': + pytest.main([__file__]) diff --git a/scripts/integration-test.sh b/scripts/integration-test.sh new file mode 100644 index 00000000..6210e0a1 --- /dev/null +++ b/scripts/integration-test.sh @@ -0,0 +1,61 @@ +#!/bin/bash + +set -ex + +export KOKORO_GITHUB_DIR=${KOKORO_ROOT}/src/github +source ${KOKORO_GFILE_DIR}/kokoro/common.sh + +export GOOGLE_CLOUD_PROJECT=gcp-runtimes + +sudo -E /usr/local/bin/pip install --upgrade -r ${KOKORO_GFILE_DIR}/appengine/integration_tests/requirements.txt + +if [ -f ${KOKORO_GITHUB_DIR}/${SAMPLE_APP_DIRECTORY}/requirements.txt ] +then + sudo -E /usr/local/bin/pip install --upgrade -r ${KOKORO_GITHUB_DIR}/${SAMPLE_APP_DIRECTORY}/requirements.txt +fi + +export GOPATH=${KOKORO_GITHUB_DIR}/${SAMPLE_APP_DIRECTORY} + +flags="" + +if [ -n "${STAGING_IMAGE}" ]; then + flags="$flags -i ${STAGING_IMAGE}" +fi + +if [ "${SKIP_STANDARD_LOGGING_TESTS}" = "true" ]; then + flags="$flags --skip-standard-logging-tests" +fi + +if [ "${SKIP_CUSTOM_LOGGING_TESTS}" = "true" ]; then + flags="$flags --skip-custom-logging-tests" +fi + +if [ "${SKIP_MONITORING_TESTS}" = "true" ]; then + flags="$flags --skip-monitoring-tests" +fi + +if [ "${SKIP_EXCEPTION_TESTS}" = "true" ]; then + flags="$flags --skip-exception-tests" +fi + +if [ "${SKIP_CUSTOM_TESTS}" = "true" ]; then + flags="$flags --skip-custom-tests" +fi + +if [ -n "${URL}" ]; then + flags="$flags --url ${URL}" +fi + +if [ -n "${BUILDER}" ]; then + flags="$flags --builder ${BUILDER}" + gcloud config set app/use_runtime_builders True + gcloud config set app/runtime_builders_root file://${KOKORO_GITHUB_DIR}/${SAMPLE_APP_DIRECTORY} +fi + +if [ -n "${YAML}" ]; then + flags="$flags --yaml ${KOKORO_GITHUB_DIR}/${YAML}" +fi + + +chmod a+x ${KOKORO_GFILE_DIR}/appengine/integration_tests/testsuite/driver.py +${KOKORO_GFILE_DIR}/appengine/integration_tests/testsuite/driver.py -d ${KOKORO_GITHUB_DIR}/${SAMPLE_APP_DIRECTORY} ${flags} diff --git a/scripts/local_cloudbuild.py b/scripts/local_cloudbuild.py new file mode 100755 index 00000000..5c23a1bc --- /dev/null +++ b/scripts/local_cloudbuild.py @@ -0,0 +1,372 @@ +#!/usr/bin/env python3 + +# Copyright 2017 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Emulate the Google Cloud Build locally. + +The input is a local cloudbuild.yaml file. This is translated into a +series of commands for the locally installed Docker daemon. These +commands are output as a shell script and optionally executed. + +The output images are not pushed to the Google Container Registry. +Not all cloudbuild.yaml functionality is supported. In particular, +substitutions are a simplified subset that doesn't include all the +corner cases and error conditions. + +See https://cloud.google.com/container-builder/docs/api/build-steps +for more information. +""" + +import argparse +import collections +import collections.abc +import functools +import io +import os +import re +import shlex +import subprocess +import sys + +import yaml + +import validation_utils + + +# Exclude non-printable control characters (including newlines) +PRINTABLE_REGEX = re.compile(r"""^[^\x00-\x1f]*$""") + +# Cloud Build substitutions +# https://cloud.google.com/cloud-build/docs/api/build-requests#substitutions +SUBSTITUTION_REGEX = re.compile(r"""(?x) + [$] # Dollar sign + ( + [A-Z_][A-Z0-9_]* # Variable name, no curly brackets + | + {[A-Z_][A-Z0-9_]*} # Variable name, with curly brackets + | + [$] # $$, translated to a single literal $ + ) +""") + +# Default builtin substitutions +DEFAULT_SUBSTITUTIONS = { + 'BRANCH_NAME': '', + 'BUILD_ID': 'abcdef12-3456-7890-abcd-ef0123456789', + 'COMMIT_SHA': '', + 'PROJECT_ID': 'dummy-project-id', + 'REPO_NAME': '', + 'REVISION_ID': '', + 'TAG_NAME': '', +} + +# Use this image for cleanup actions +DEBIAN_IMAGE = 'gcr.io/google-appengine/debian8' + +# File template +BUILD_SCRIPT_TEMPLATE = """\ +#!/bin/bash +# This is a generated file. Do not edit. + +set -euo pipefail + +SOURCE_DIR=. + +# Setup staging directory +HOST_WORKSPACE=$(mktemp -d -t local_cloudbuild_XXXXXXXXXX) +function cleanup {{ + if [ "${{HOST_WORKSPACE}}" != '/' -a -d "${{HOST_WORKSPACE}}" ]; then + # Expect a single error message about /workspace busy + {cleanup_str} 2>/dev/null || true + # Do not expect error messages here. Display but ignore. + rmdir "${{HOST_WORKSPACE}}" || true + fi +}} +trap cleanup EXIT + +# Copy source to staging directory +echo "Copying source to staging directory ${{HOST_WORKSPACE}}" +rsync -avzq --exclude=.git "${{SOURCE_DIR}}" "${{HOST_WORKSPACE}}" + +# Build commands +{docker_str} +# End of build commands +echo "Build completed successfully" +""" + + +# Validated cloudbuild recipe + flags +CloudBuild = collections.namedtuple('CloudBuild', + 'output_script run steps substitutions') + +# Single validated step in a cloudbuild recipe +Step = collections.namedtuple('Step', 'args dir_ env name') + + +def sub_and_quote(s, substitutions, substitutions_used): + """Return a shell-escaped, variable substituted, version of the string s. + + Args: + s (str): Any string + subs (dict): Substitution map to apply + subs_used (set): Updated with names from `subs.keys()` when those + substitutions are encountered in `s` + """ + + def sub(match): + """Perform a single substitution.""" + variable_name = match.group(1) + if variable_name[0] == '{': + # Strip curly brackets + variable_name = variable_name[1:-1] + if variable_name == '$': + value = '$' + elif variable_name not in substitutions: + # Variables must be set + raise ValueError( + 'Variable "{}" used without being defined. Try adding ' + 'it to the --substitutions flag'.format(variable_name)) + else: + value = substitutions.get(variable_name) + substitutions_used.add(variable_name) + return value + + substituted_s = re.sub(SUBSTITUTION_REGEX, sub, s) + quoted_s = shlex.quote(substituted_s) + return quoted_s + + +def get_cloudbuild(raw_config, args): + """Read and validate a cloudbuild recipe + + Args: + raw_config (dict): deserialized cloudbuild.yaml + args (argparse.Namespace): command line flags + + Returns: + CloudBuild: valid configuration + """ + if not isinstance(raw_config, dict): + raise ValueError( + 'Expected {} contents to be of type "dict", but found type "{}"'. + format(args.config, type(raw_config))) + + raw_steps = validation_utils.get_field_value(raw_config, 'steps', list) + if not raw_steps: + raise ValueError('No steps defined in {}'.format(args.config)) + + steps = [get_step(raw_step) for raw_step in raw_steps] + return CloudBuild( + output_script=args.output_script, + run=args.run, + steps=steps, + substitutions=args.substitutions, + ) + + +def get_step(raw_step): + """Read and validate a single cloudbuild step + + Args: + raw_step (dict): deserialized step + + Returns: + Step: valid build step + """ + if not isinstance(raw_step, dict): + raise ValueError( + 'Expected step to be of type "dict", but found type "{}"'. + format(type(raw_step))) + raw_args = validation_utils.get_field_value(raw_step, 'args', list) + args = [validation_utils.get_field_value(raw_args, index, str) + for index in range(len(raw_args))] + dir_ = validation_utils.get_field_value(raw_step, 'dir', str) + raw_env = validation_utils.get_field_value(raw_step, 'env', list) + env = [validation_utils.get_field_value(raw_env, index, str) + for index in range(len(raw_env))] + name = validation_utils.get_field_value(raw_step, 'name', str) + return Step( + args=args, + dir_=dir_, + env=env, + name=name, + ) + + +def generate_command(step, substitutions, substitutions_used): + """Generate a single shell command to run for a single cloudbuild step + + Args: + step (Step): Valid build step + subs (dict): Substitution map to apply + subs_used (set): Updated with names from `subs.keys()` when those + substitutions are encountered in an element of `step` + + Returns: + [str]: A single shell command, expressed as a list of quoted tokens. + """ + quoted_args = [sub_and_quote(arg, substitutions, substitutions_used) + for arg in step.args] + quoted_env = [] + for env in step.env: + quoted_env.extend(['--env', sub_and_quote(env, substitutions, + substitutions_used)]) + quoted_name = sub_and_quote(step.name, substitutions, substitutions_used) + workdir = '/workspace' + if step.dir_: + workdir = os.path.join(workdir, sub_and_quote(step.dir_, substitutions, + substitutions_used)) + process_args = [ + 'docker', + 'run', + '--volume', + '/var/run/docker.sock:/var/run/docker.sock', + '--volume', + '/root/.docker:/root/.docker', + '--volume', + '${HOST_WORKSPACE}:/workspace', + '--workdir', + workdir, + ] + quoted_env + [quoted_name] + quoted_args + return process_args + + +def generate_script(cloudbuild): + """Generate the contents of a shell script + + Args: + cloudbuild (CloudBuild): Valid cloudbuild configuration + + Returns: + (str): Contents of shell script + """ + # This deletes everything in /workspace including hidden files, + # but not /workspace itself + cleanup_step = Step( + args=['rm', '-rf', '/workspace'], + dir_='', + env=[], + name=DEBIAN_IMAGE, + ) + cleanup_command = generate_command(cleanup_step, {}, set()) + subs_used = set() + docker_commands = [ + generate_command(step, cloudbuild.substitutions, subs_used) + for step in cloudbuild.steps] + + # Check that all user variables were referenced at least once + user_subs_unused = [name for name in cloudbuild.substitutions.keys() + if name not in subs_used and name[0] == '_'] + if user_subs_unused: + nice_list = '"' + '", "'.join(sorted(user_subs_unused)) + '"' + raise ValueError( + 'User substitution variables {} were defined in the ' + '--substitution flag but never used in the cloudbuild file.'. + format(nice_list)) + + cleanup_str = ' '.join(cleanup_command) + docker_lines = [] + for docker_command in docker_commands: + line = ' '.join(docker_command) + '\n\n' + docker_lines.append(line) + docker_str = ''.join(docker_lines) + + s = BUILD_SCRIPT_TEMPLATE.format(cleanup_str=cleanup_str, + docker_str=docker_str) + return s + + +def make_executable(path): + """Set executable bit(s) on file""" + # http://stackoverflow.com/questions/12791997 + mode = os.stat(path).st_mode + mode |= (mode & 0o444) >> 2 # copy R bits to X + os.chmod(path, mode) + + +def write_script(cloudbuild, contents): + """Write a shell script to a file.""" + print('Writing build script to {}'.format(cloudbuild.output_script)) + with io.open(cloudbuild.output_script, 'w', encoding='utf8') as outfile: + outfile.write(contents) + make_executable(cloudbuild.output_script) + + +def local_cloudbuild(args): + """Execute the steps of a cloudbuild.yaml locally + + Args: + args: command line flags as per parse_args + """ + # Load and parse cloudbuild.yaml + with io.open(args.config, 'r', encoding='utf8') as cloudbuild_file: + raw_config = yaml.safe_load(cloudbuild_file) + + # Determine configuration + cloudbuild = get_cloudbuild(raw_config, args) + + # Create shell script + contents = generate_script(cloudbuild) + write_script(cloudbuild, contents) + + # Run shell script + if cloudbuild.run: + print('Running {}'.format(cloudbuild.output_script)) + args = [os.path.abspath(cloudbuild.output_script)] + subprocess.check_call(args) + + +def parse_args(argv): + """Parse and validate command line flags""" + parser = argparse.ArgumentParser( + description='Process cloudbuild.yaml locally to build Docker images') + parser.add_argument( + '--config', + type=functools.partial( + validation_utils.validate_arg_regex, flag_regex=PRINTABLE_REGEX), + default='cloudbuild.yaml', + help='Path to cloudbuild.yaml file' + ) + parser.add_argument( + '--output_script', + type=functools.partial( + validation_utils.validate_arg_regex, flag_regex=PRINTABLE_REGEX), + help='Filename to write shell script to', + ) + parser.add_argument( + '--no-run', + action='store_false', + help='Create shell script but don\'t execute it', + dest='run', + ) + parser.add_argument( + '--substitutions', + type=validation_utils.validate_arg_dict, + default={}, + help='Parameters to be substituted in the build specification', + ) + args = parser.parse_args(argv[1:]) + if not args.output_script: + args.output_script = args.config + "_local.sh" + return args + + +def main(): + args = parse_args(sys.argv) + local_cloudbuild(args) + + +if __name__ == '__main__': + main() diff --git a/scripts/local_cloudbuild_test.py b/scripts/local_cloudbuild_test.py new file mode 100755 index 00000000..b22d585b --- /dev/null +++ b/scripts/local_cloudbuild_test.py @@ -0,0 +1,410 @@ +#!/usr/bin/env python3 + +# Copyright 2017 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Unit test for local_cloudbuild.py""" + +import argparse +import contextlib +import os +import re +import shutil +import subprocess +import unittest.mock + +import pytest +import yaml + +import local_cloudbuild + + +# Matches script boilerplate +STAGING_DIR_REGEX = re.compile( + b'(?m)Copying source to staging directory (.+)$') + + +@pytest.fixture +def testdata_dir(): + testdata_dir = os.path.join(os.path.dirname(__file__), 'testdata') + assert os.path.isdir(testdata_dir), ( + 'Could not run test: testdata directory not found') + return testdata_dir + + +@pytest.mark.parametrize('s, subs, expected, expected_used', [ + # Empty string + ('', {}, "''", []), + # No substitutions + ('a', {}, 'a', []), + # Unused substitition (ok here but error in generate_script) + ('a', {'FOO': 'foo'}, 'a', []), + ('a', {'_FOO': '_foo'}, 'a', []), + # Defined builtin substitution + ('a$FOOb', {'FOO': 'foo'}, 'afoob', ['FOO']), + ('a${FOO}b', {'FOO': 'foo'}, 'afoob', ['FOO']), + # Defined user substitution + ('a$_FOOb', {'_FOO': '_foo'}, 'a_foob', ['_FOO']), + ('a${_FOO}b', {'_FOO': '_foo'}, 'a_foob', ['_FOO']), + # Multiple substitutions + ('$FOO${FOO}${BAR}$FOO', + {'FOO': 'foo', 'BAR': 'bar'}, + 'foofoobarfoo', + ['FOO', 'BAR']), + # Invalid names + ('a $ b', {}, "'a $ b'", []), + ('a$foo b', {}, "'a$foo b'", []), + ('a$0FOO b', {}, "'a$0FOO b'", []), +]) +def test_sub_and_quote_valid(s, subs, expected, expected_used): + used = set() + actual = local_cloudbuild.sub_and_quote(s, subs, used) + assert actual == expected + assert used == set(expected_used) + + +@pytest.mark.parametrize('s, subs', [ + # Undefined builtin substitution + ('a$FOOb', {}), + ('a${FOO}b', {}), + # Undefined user substitution + ('a$_FOOb', {}), + ('a${_FOO}b', {}), +]) +def test_sub_and_quote_invalid(s, subs): + with pytest.raises(ValueError): + used = set() + local_cloudbuild.sub_and_quote(s, subs, used) + + +def have_docker(): + """Determine if the Docker daemon is present and usable""" + if ((shutil.which('docker') is not None) and + (subprocess.call(['docker', 'info'], + stdout=subprocess.DEVNULL, + stderr=subprocess.DEVNULL) == 0)): + return True + return False + + +_args = argparse.Namespace( + config='some_config_file', + output_script='some_output_script', + run=False, + substitutions={}, +) + + +def test_get_cloudbuild_valid(): + raw_yaml = 'steps:\n- name: step1\n- name: step2\n' + raw_config = yaml.safe_load(raw_yaml) + actual = local_cloudbuild.get_cloudbuild(raw_config, _args) + assert len(actual.steps) == 2 + + +@pytest.mark.parametrize('raw_yaml', [ + # Empty cloud build + '', + # No steps + 'foo: bar\n', + # Steps not a list + 'steps: astring\n', + ]) +def test_get_cloudbuild_invalid(raw_yaml): + raw_config = yaml.safe_load(raw_yaml) + with pytest.raises(ValueError): + local_cloudbuild.get_cloudbuild(raw_config, _args) + + +@pytest.mark.parametrize('raw_step, expected', [ + # Empty step + ({}, local_cloudbuild.Step( + args=[], + dir_='', + env=[], + name='', + )), + # Full step + ({'name': 'aname', + 'args': ['arg1', 2, 'arg3 with \n newline'], + 'env': ['ENV1=value1', 'ENV2=space in value2'], + 'dir': 'adir', + }, local_cloudbuild.Step( + args=['arg1', '2', 'arg3 with \n newline'], + env=['ENV1=value1', 'ENV2=space in value2'], + dir_='adir', + name='aname', + )), +]) +def test_get_step_valid(raw_step, expected): + actual = local_cloudbuild.get_step(raw_step) + assert actual == expected + + +@pytest.mark.parametrize('raw_step', [ + # Wrong type + [], + # More wrong types + {'args': 'not_a_list'}, + {'args': [[]]}, + {'env': 'not_a_list'}, + {'env': [{}]}, + {'dir': {}}, + {'name': []}, +]) +def test_get_step_invalid(raw_step): + with pytest.raises(ValueError): + local_cloudbuild.get_step(raw_step) + + +# Basic valid case +_base_step = local_cloudbuild.Step( + args=['arg1', 'arg2'], + dir_='', + env=['ENV1=value1', 'ENV2=value2'], + name='aname', +) +_subs = {'BUILTIN': 'builtin', '_USER': '_user'} + + +def test_generate_command_basic(): + command = local_cloudbuild.generate_command(_base_step, _subs, set()) + assert command == [ + 'docker', + 'run', + '--volume', + '/var/run/docker.sock:/var/run/docker.sock', + '--volume', + '/root/.docker:/root/.docker', + '--volume', + '${HOST_WORKSPACE}:/workspace', + '--workdir', + '/workspace', + '--env', + 'ENV1=value1', + '--env', + 'ENV2=value2', + 'aname', + 'arg1', + 'arg2', + ] + + +@pytest.mark.parametrize('step, args', [ + # dir specified + (_base_step._replace(dir_='adir'), + ['--workdir', '/workspace/adir']), + # Shell quoting + (_base_step._replace(args=['arg with \n newline']), + ["'arg with \n newline'"]), + (_base_step._replace(dir_='dir/ with space/'), + ["/workspace/'dir/ with space/'"]), + (_base_step._replace(env=['env with space']), + ["'env with space'"]), + (_base_step._replace(name='a name'), + ["'a name'"]), + # Variable substitution + (_base_step._replace(name='a $BUILTIN substitution'), + ["'a builtin substitution'"]), + (_base_step._replace(name='a $_USER substitution'), + ["'a _user substitution'"]), + (_base_step._replace(name='a curly brace ${BUILTIN} substitution'), + ["'a curly brace builtin substitution'"]), + (_base_step._replace( + name='an escaped $$ or $$$$ or $$FOO or $${_FOO} is unescaped'), + ["'an escaped $ or $$ or $FOO or ${_FOO} is unescaped'"]), +]) +def test_generate_command_valid(step, args): + command = local_cloudbuild.generate_command(step, _subs, set()) + for arg in args: + assert arg in command + + +@pytest.mark.parametrize('step', [ + _base_step._replace(name='a $UNSET_BUILTIN substitution'), + _base_step._replace(name='a $_UNSET_USER substitution'), +]) +def test_generate_command_invalid(step): + with pytest.raises(ValueError): + local_cloudbuild.generate_command(step, _subs, set()) + + +def test_generate_script_golden(testdata_dir): + config_name = 'cloudbuild_ok.yaml' + expected_output_script = os.path.join( + testdata_dir, config_name + '_golden.sh') + cloudbuild = local_cloudbuild.CloudBuild( + output_script='test_generate_script', + run=False, + steps=[ + local_cloudbuild.Step( + args=['/bin/sh', '-c', 'printenv MESSAGE'], + dir_='', + env=['MESSAGE=Hello World!'], + name='debian', + ), + local_cloudbuild.Step( + args=['/bin/sh', '-c', 'printenv MESSAGE'], + dir_='', + env=['MESSAGE=Goodbye\\n And Farewell!', 'UNUSED=unused'], + name='debian', + ) + ], + substitutions=local_cloudbuild.DEFAULT_SUBSTITUTIONS, + ) + actual = local_cloudbuild.generate_script(cloudbuild) + # Compare output against golden + with open(expected_output_script, 'r', encoding='utf8') as expected_file: + expected = expected_file.read() + assert actual == expected + + +def test_generate_script_unused_user_substitution(): + cloudbuild = local_cloudbuild.CloudBuild( + output_script='', + run=False, + steps=[], + substitutions={'_FOO': '_foo'}, + ) + with pytest.raises(ValueError, match='User substitution variables'): + local_cloudbuild.generate_script(cloudbuild) + + +def test_make_executable(tmpdir): + test_script_filename = tmpdir.join('test_make_executable.sh') + with test_script_filename.open('w', encoding='utf8') as test_script: + test_script.write('#!/bin/sh\necho "Output from test_make_executable"') + local_cloudbuild.make_executable(str(test_script_filename)) + output = subprocess.check_output([str(test_script_filename)]) + assert output.decode('utf8') == "Output from test_make_executable\n" + + +def test_write_script(tmpdir): + contents = 'The contents\n' + output_script_filename = tmpdir.join('test_write_script') + cloudbuild = local_cloudbuild.CloudBuild( + output_script=str(output_script_filename), + run=False, + steps=[], + substitutions={}, + ) + local_cloudbuild.write_script(cloudbuild, contents) + with output_script_filename.open('r', encoding='utf8') as output_script: + actual = output_script.read() + assert actual == contents + + +@contextlib.contextmanager +def chdir(new_dir): + """Not threadsafe""" + old_dir = os.getcwd() + os.chdir(new_dir) + yield + os.chdir(old_dir) + + +@pytest.mark.parametrize('config_name, substitutions, exception, cleanup', [ + # Everything is ok + ('cloudbuild_ok.yaml', None, None, True), + # Builtin substitutions like $PROJECT_ID work + ('cloudbuild_builtin_substitutions.yaml', None, None, True), + # User substitutions like $_FOO work + ('cloudbuild_user_substitutions.yaml', + {'_FOO': 'this is foo value'}, + None, True + ), + # User substitutions like $_FOO fails when undefined + ('cloudbuild_user_substitutions.yaml', None, ValueError, False), + # Exit code 1 (failure) + ('cloudbuild_err_rc1.yaml', None, subprocess.CalledProcessError, True), + # Command not found + ('cloudbuild_err_not_found.yaml', None, subprocess.CalledProcessError, + True), + # Cleaning up files owned by root + ('cloudbuild_difficult_cleanup.yaml', None, None, True), +]) +def test_local_cloudbuild(testdata_dir, tmpdir, config_name, substitutions, + exception, cleanup): + if not have_docker(): + pytest.fail('This test requires a working Docker daemon') + + check_call_output = None + + def check_call(*args, **kw_args): + """Act like subprocess.check_call but store stdout""" + nonlocal check_call_output + try: + check_call_output = subprocess.check_output(*args, **kw_args) + print(check_call_output) + except subprocess.CalledProcessError as e: + check_call_output = e.output + print(check_call_output) + raise + + # Read cloudbuild.yaml from testdata file, write output to + # tempdir, and maybe try to run it + with unittest.mock.patch('subprocess.check_call', check_call): + if substitutions is None: + substitutions = local_cloudbuild.DEFAULT_SUBSTITUTIONS + should_succeed = (exception is None) + config = os.path.join(testdata_dir, config_name) + actual_output_script = tmpdir.join(config_name + '_local.sh') + args = argparse.Namespace( + config=config, + output_script=str(actual_output_script), + run=True, + substitutions=substitutions, + ) + + # The source directory of the build is currently hardcoded as + # '.', so we must chdir there. + with chdir(testdata_dir): + if should_succeed: + local_cloudbuild.local_cloudbuild(args) + else: + with pytest.raises(exception): + local_cloudbuild.local_cloudbuild(args) + + # Check that staging dir was cleaned up + if cleanup: + assert check_call_output is not None + match = re.search(STAGING_DIR_REGEX, check_call_output) + assert match + staging_dir = match.group(1) + assert not os.path.isdir(staging_dir) + + +@pytest.mark.parametrize('argv, expected', [ + # Test explicit output_script + (['argv0', '--output_script=my_output'], 'my_output'), + # Test implicit output_script + (['argv0', '--config=my_config'], 'my_config_local.sh'), +]) +def test_parse_args_output_script(argv, expected): + args = local_cloudbuild.parse_args(argv) + assert args.output_script == expected + + +@pytest.mark.parametrize('argv, expected', [ + # Test run flag (default) + (['argv0'], True), + (['argv0', '--no-run'], False), +]) +def test_parse_args_run_flag(argv, expected): + args = local_cloudbuild.parse_args(argv) + assert args.run == expected + + +if __name__ == '__main__': + pytest.main([__file__]) diff --git a/scripts/release.sh b/scripts/release.sh new file mode 100644 index 00000000..cd499823 --- /dev/null +++ b/scripts/release.sh @@ -0,0 +1,16 @@ +#!/bin/bash +set -euo pipefail +export KOKORO_GITHUB_DIR=${KOKORO_ROOT}/src/github +source ${KOKORO_GFILE_DIR}/kokoro/common.sh + +cd ${KOKORO_GITHUB_DIR}/python-runtime + +if [ -z "${TAG:+set}" ]; then + export TAG=$(date +%Y-%m-%d-%H%M%S) +fi + +./build.sh $BUILD_FLAGS + +METADATA=$(pwd)/METADATA +cd ${KOKORO_GFILE_DIR}/kokoro +python note.py python -m ${METADATA} -t ${TAG} diff --git a/scripts/requirements-test.txt b/scripts/requirements-test.txt new file mode 100644 index 00000000..c8e698da --- /dev/null +++ b/scripts/requirements-test.txt @@ -0,0 +1,4 @@ +flask==1.0.2 +pytest==3.7.3 +pytest-cov==2.5.1 +pyyaml==3.13 diff --git a/scripts/testdata/cloudbuild_builtin_substitutions.yaml b/scripts/testdata/cloudbuild_builtin_substitutions.yaml new file mode 100644 index 00000000..0e39ba68 --- /dev/null +++ b/scripts/testdata/cloudbuild_builtin_substitutions.yaml @@ -0,0 +1,11 @@ +steps: +- name: debian + args: ['/bin/sh', '-c', 'echo "$PROJECT_ID"'] +- name: debian + args: ['/usr/bin/test', 'dummy-project-id', '=', '$PROJECT_ID'] +- name: debian + args: ['/usr/bin/test', 'dummy-project-id', '=', '${PROJECT_ID}'] +- name: debian + args: ['/bin/sh', '-c', 'test dummy-project-id = "$PROJECT_ID"'] +- name: debian + args: ['/bin/sh', '-c', 'test dummy-project-id = "${PROJECT_ID}"'] diff --git a/scripts/testdata/cloudbuild_difficult_cleanup.yaml b/scripts/testdata/cloudbuild_difficult_cleanup.yaml new file mode 100644 index 00000000..e76846d5 --- /dev/null +++ b/scripts/testdata/cloudbuild_difficult_cleanup.yaml @@ -0,0 +1,3 @@ +steps: +- name: debian + args: ['/bin/sh', '-c', 'mkdir root; umask 0000; touch root/deny_all.txt'] diff --git a/scripts/testdata/cloudbuild_err_not_found.yaml b/scripts/testdata/cloudbuild_err_not_found.yaml new file mode 100644 index 00000000..c7eb070d --- /dev/null +++ b/scripts/testdata/cloudbuild_err_not_found.yaml @@ -0,0 +1,3 @@ +steps: +- name: debian + args: ['/expected file not found'] diff --git a/scripts/testdata/cloudbuild_err_rc1.yaml b/scripts/testdata/cloudbuild_err_rc1.yaml new file mode 100644 index 00000000..3953a586 --- /dev/null +++ b/scripts/testdata/cloudbuild_err_rc1.yaml @@ -0,0 +1,3 @@ +steps: +- name: debian + args: ['/bin/sh', '-c', 'exit 1'] diff --git a/scripts/testdata/cloudbuild_ok.yaml b/scripts/testdata/cloudbuild_ok.yaml new file mode 100644 index 00000000..d58ca3f9 --- /dev/null +++ b/scripts/testdata/cloudbuild_ok.yaml @@ -0,0 +1,7 @@ +steps: +- name: debian + args: ['/bin/sh', '-c', 'printenv MESSAGE'] + env: ['MESSAGE=Hello World!'] +- name: debian + args: ['/bin/sh', '-c', 'printenv MESSAGE'] + env: ['MESSAGE=Goodbye\n And Farewell!', 'UNUSED=unused'] diff --git a/scripts/testdata/cloudbuild_ok.yaml_golden.sh b/scripts/testdata/cloudbuild_ok.yaml_golden.sh new file mode 100755 index 00000000..bda52bd2 --- /dev/null +++ b/scripts/testdata/cloudbuild_ok.yaml_golden.sh @@ -0,0 +1,31 @@ +#!/bin/bash +# This is a generated file. Do not edit. + +set -euo pipefail + +SOURCE_DIR=. + +# Setup staging directory +HOST_WORKSPACE=$(mktemp -d -t local_cloudbuild_XXXXXXXXXX) +function cleanup { + if [ "${HOST_WORKSPACE}" != '/' -a -d "${HOST_WORKSPACE}" ]; then + # Expect a single error message about /workspace busy + docker run --volume /var/run/docker.sock:/var/run/docker.sock --volume /root/.docker:/root/.docker --volume ${HOST_WORKSPACE}:/workspace --workdir /workspace gcr.io/google-appengine/debian8 rm -rf /workspace 2>/dev/null || true + # Do not expect error messages here. Display but ignore. + rmdir "${HOST_WORKSPACE}" || true + fi +} +trap cleanup EXIT + +# Copy source to staging directory +echo "Copying source to staging directory ${HOST_WORKSPACE}" +rsync -avzq --exclude=.git "${SOURCE_DIR}" "${HOST_WORKSPACE}" + +# Build commands +docker run --volume /var/run/docker.sock:/var/run/docker.sock --volume /root/.docker:/root/.docker --volume ${HOST_WORKSPACE}:/workspace --workdir /workspace --env 'MESSAGE=Hello World!' debian /bin/sh -c 'printenv MESSAGE' + +docker run --volume /var/run/docker.sock:/var/run/docker.sock --volume /root/.docker:/root/.docker --volume ${HOST_WORKSPACE}:/workspace --workdir /workspace --env 'MESSAGE=Goodbye\n And Farewell!' --env UNUSED=unused debian /bin/sh -c 'printenv MESSAGE' + + +# End of build commands +echo "Build completed successfully" diff --git a/scripts/testdata/cloudbuild_user_substitutions.yaml b/scripts/testdata/cloudbuild_user_substitutions.yaml new file mode 100644 index 00000000..13cb6602 --- /dev/null +++ b/scripts/testdata/cloudbuild_user_substitutions.yaml @@ -0,0 +1,11 @@ +steps: +- name: debian + args: ['/bin/sh', '-c', 'echo "$_FOO"'] +- name: debian + args: ['/usr/bin/test', 'this is foo value', '=', '$_FOO'] +- name: debian + args: ['/usr/bin/test', 'this is foo value', '=', '${_FOO}'] +- name: debian + args: ['/bin/sh', '-c', 'test "this is foo value" = "$_FOO"'] +- name: debian + args: ['/bin/sh', '-c', 'test "this is foo value" = "${_FOO}"'] diff --git a/scripts/testdata/hello_world/app.yaml b/scripts/testdata/hello_world/app.yaml new file mode 100644 index 00000000..e5ac514e --- /dev/null +++ b/scripts/testdata/hello_world/app.yaml @@ -0,0 +1,6 @@ +runtime: python +env: flex +entrypoint: gunicorn -b :$PORT main:app + +runtime_config: + python_version: 3 diff --git a/scripts/testdata/hello_world/main.py b/scripts/testdata/hello_world/main.py new file mode 100644 index 00000000..97eb37d8 --- /dev/null +++ b/scripts/testdata/hello_world/main.py @@ -0,0 +1,43 @@ +# Copyright 2015 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# [START app] +import logging + +from flask import Flask + + +app = Flask(__name__) + + +@app.route('/') +def hello(): + """Return a friendly HTTP greeting.""" + return 'Hello World!' + + +@app.errorhandler(500) +def server_error(e): + logging.exception('An error occurred during a request.') + return """ + An internal error occurred:
{}
+ See logs for full stacktrace. + """.format(e), 500 + + +if __name__ == '__main__': + # This is used when running locally. Gunicorn is used to run the + # application on Google App Engine. See entrypoint in app.yaml. + app.run(host='127.0.0.1', port=8080, debug=True) +# [END app] diff --git a/scripts/testdata/hello_world/main_test.py b/scripts/testdata/hello_world/main_test.py new file mode 100644 index 00000000..4e230185 --- /dev/null +++ b/scripts/testdata/hello_world/main_test.py @@ -0,0 +1,24 @@ +# Copyright 2015 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import main + + +def test_index(): + main.app.testing = True + client = main.app.test_client() + + r = client.get('/') + assert r.status_code == 200 + assert 'Hello World' in r.data.decode('utf-8') diff --git a/scripts/testdata/hello_world/requirements.txt b/scripts/testdata/hello_world/requirements.txt new file mode 100644 index 00000000..a34d076b --- /dev/null +++ b/scripts/testdata/hello_world/requirements.txt @@ -0,0 +1,2 @@ +Flask==1.0.2 +gunicorn==19.9.0 diff --git a/scripts/testdata/hello_world_compat/app.yaml b/scripts/testdata/hello_world_compat/app.yaml new file mode 100644 index 00000000..e514d42c --- /dev/null +++ b/scripts/testdata/hello_world_compat/app.yaml @@ -0,0 +1,13 @@ +service: default +runtime: python-compat +env: flex + +api_version: 1 +threadsafe: true + +beta_settings: + enable_app_engine_apis: true # Needed for compat apps. + +handlers: +- url: .* + script: main.app diff --git a/scripts/testdata/hello_world_compat/main.py b/scripts/testdata/hello_world_compat/main.py new file mode 100644 index 00000000..40302722 --- /dev/null +++ b/scripts/testdata/hello_world_compat/main.py @@ -0,0 +1,14 @@ +"""The hello world flex app!""" + +import webapp2 + + +class HelloHandler(webapp2.RequestHandler): + + def get(self): + msg = 'Hello GAE Flex (env: flex) Compat-Runtime App\n' + self.response.headers['Content-Type'] = 'text/plain' + self.response.out.write(msg) + +app = webapp2.WSGIApplication([('/', HelloHandler)], + debug=True) diff --git a/scripts/testdata/hello_world_compat_golden/.dockerignore b/scripts/testdata/hello_world_compat_golden/.dockerignore new file mode 100644 index 00000000..5ce5abfa --- /dev/null +++ b/scripts/testdata/hello_world_compat_golden/.dockerignore @@ -0,0 +1,5 @@ +.dockerignore +Dockerfile +.git +.hg +.svn diff --git a/scripts/testdata/hello_world_compat_golden/Dockerfile b/scripts/testdata/hello_world_compat_golden/Dockerfile new file mode 100644 index 00000000..1e4d6352 --- /dev/null +++ b/scripts/testdata/hello_world_compat_golden/Dockerfile @@ -0,0 +1,3 @@ +FROM gcr.io/google_appengine/python-compat-multicore +ADD . /app/ +RUN if [ -s requirements.txt ]; then pip install -r requirements.txt; fi diff --git a/scripts/testdata/hello_world_golden/.dockerignore b/scripts/testdata/hello_world_golden/.dockerignore new file mode 100644 index 00000000..8b927bb7 --- /dev/null +++ b/scripts/testdata/hello_world_golden/.dockerignore @@ -0,0 +1,19 @@ +# Copyright 2015 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +.dockerignore +Dockerfile +.git +.hg +.svn diff --git a/scripts/testdata/hello_world_golden/Dockerfile b/scripts/testdata/hello_world_golden/Dockerfile new file mode 100644 index 00000000..10396399 --- /dev/null +++ b/scripts/testdata/hello_world_golden/Dockerfile @@ -0,0 +1,12 @@ +FROM gcr.io/google-appengine/python +LABEL python_version=python3.6 +RUN virtualenv --no-download /env -p python3.6 + +# Set virtualenv environment variables. This is equivalent to running +# source /env/bin/activate +ENV VIRTUAL_ENV /env +ENV PATH /env/bin:$PATH +ADD requirements.txt /app/ +RUN pip install -r requirements.txt +ADD . /app/ +CMD exec gunicorn -b :$PORT main:app diff --git a/scripts/validation_utils.py b/scripts/validation_utils.py new file mode 100644 index 00000000..28de2d52 --- /dev/null +++ b/scripts/validation_utils.py @@ -0,0 +1,107 @@ +#!/usr/bin/env python3 + +# Copyright 2017 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Utilities for schema and command line validation""" + +import argparse +import re + + +# For easier development, we allow redefining builtins like +# --substitutions=PROJECT_ID=foo even though gcloud doesn't. +KEY_VALUE_REGEX = re.compile(r'^([A-Z_][A-Z0-9_]*)=(.*)$') + + +def get_field_value(container, field_name, field_type): + """Fetch a field from a container with typechecking and default values. + + The field value is coerced to the desired type. If the field is + not present, an instance of `field_type` is constructed with no + arguments and used as the default value. + + This function exists because yaml parsing can lead to surprising + outputs, and the resulting errors are confusing. For example: + entrypoint1: a string, but I can accidentally treat as an sequence + entrypoint2: [a, list, but, I, might, think, its, a, string] + version1: 3 # Parsed to int + version2: 3.1 # Parsed to float + version3: 3.1.1 # Parsed to str + feature: off # Parsed to the boolean False + + Args: + container (dict): Object decoded from yaml + field_name (str): Field that should be present in `container` + field_type (type): Expected type for field value + + Returns: + Any: Fetched or default value of field + + Raises: + ValueError: if field value cannot be converted to the desired type + """ + try: + value = container[field_name] + if value is None: + return field_type() + except (IndexError, KeyError): + return field_type() + + msg = 'Expected "{}" field to be of type "{}", but found type "{}"' + if not isinstance(value, field_type): + # list('some string') is a successful type cast as far as Python + # is concerned, but doesn't exactly produce the results we want. + # We have a whitelist of conversions we will attempt. + whitelist = ( + (float, str), + (int, str), + (str, float), + (str, int), + (int, float), + ) + if (type(value), field_type) not in whitelist: + raise ValueError(msg.format(field_name, field_type, type(value))) + + try: + value = field_type(value) + except ValueError as e: + e.message = msg.format(field_name, field_type, type(value)) + raise + return value + + +def validate_arg_regex(flag_value, flag_regex): + """Check a named command line flag against a regular expression""" + if not re.match(flag_regex, flag_value): + raise argparse.ArgumentTypeError( + 'Value "{}" does not match pattern "{}"'.format( + flag_value, flag_regex.pattern)) + return flag_value + + +def validate_arg_dict(flag_value): + """Parse a command line flag as a key=val,... dict""" + if not flag_value: + return {} + entries = flag_value.split(',') + pairs = [] + for entry in entries: + match = re.match(KEY_VALUE_REGEX, entry) + if not match: + raise argparse.ArgumentTypeError( + 'Value "{}" should be a list like _KEY1=value1,_KEY2=value2"'. + format(flag_value)) + pairs.append((match.group(1), match.group(2))) + return dict(pairs) diff --git a/scripts/validation_utils_test.py b/scripts/validation_utils_test.py new file mode 100755 index 00000000..d759b276 --- /dev/null +++ b/scripts/validation_utils_test.py @@ -0,0 +1,98 @@ +#!/usr/bin/env python3 + +# Copyright 2017 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Unit test for validation_utils.py""" + +import argparse +import re + +import pytest + +import validation_utils + + +@pytest.mark.parametrize('container, field_name, field_type, expected', [ + # Normal case, field present and correct type + ({'present': 1}, 'present', int, 1), + ({'present': '1'}, 'present', str, '1'), + ({'present': [1]}, 'present', list, [1]), + ({'present': {1: 2}}, 'present', dict, {1: 2}), + # Missing field replaced by default + ({}, 'missing', str, ''), + # Valid conversions + ({'str_to_int': '1'}, 'str_to_int', int, 1), + ({'int_to_str': 1}, 'int_to_str', str, '1'), + # None + ({'None_to_int': None}, 'None_to_int', int, 0), + ({'None_to_str': None}, 'None_to_str', str, ''), +]) +def test_get_field_value_valid(container, field_name, field_type, expected): + assert validation_utils.get_field_value( + container, field_name, field_type) == expected + + +@pytest.mark.parametrize('container, field_name, field_type', [ + # Type conversion failures + ({'bad_list_to_dict': [1]}, 'bad_list_to_dict', dict), + ({'bad_list_to_str': [1]}, 'bad_list_to_str', str), + ({'bad_dict_to_list': {1: 2}}, 'bad_dict_to_list', list), + ({'bad_str_to_int': 'not_an_int'}, 'bad_str_to_int', int), + ({'bad_str_to_list': 'abc'}, 'bad_str_to_list', list), +]) +def test_get_field_value_invalid(container, field_name, field_type): + with pytest.raises(ValueError): + validation_utils.get_field_value(container, field_name, field_type) + + +def test_validate_arg_regex(): + assert validation_utils.validate_arg_regex( + 'abc', re.compile('a[b]c')) == 'abc' + with pytest.raises(argparse.ArgumentTypeError): + validation_utils.validate_arg_regex('abc', re.compile('a[d]c')) + + +@pytest.mark.parametrize('arg, expected', [ + # Normal case, field present and correct type + ('', {}), + ('_A=1', {'_A': '1'}), + ('_A=1,_B=2', {'_A': '1', '_B': '2'}), + # Repeated key is ok + ('_A=1,_A=2', {'_A': '2'}), + # Extra = is ok + ('_A=x=y=z,_B=2', {'_A': 'x=y=z', '_B': '2'}), + # No value is ok + ('_A=', {'_A': ''}), +]) +def test_validate_arg_dicts_valid(arg, expected): + assert validation_utils.validate_arg_dict(arg) == expected + + +@pytest.mark.parametrize('arg', [ + # No key + ',_A', + '_A,', + # Invalid variable name + '_Aa=1', + '_aA=1', + '0A=1', +]) +def test_validate_arg_dicts_invalid(arg): + with pytest.raises(argparse.ArgumentTypeError): + validation_utils.validate_arg_dict(arg) + + +if __name__ == '__main__': + pytest.main([__file__]) diff --git a/tests/benchmark/.gitignore b/tests/benchmark/.gitignore new file mode 100644 index 00000000..94143827 --- /dev/null +++ b/tests/benchmark/.gitignore @@ -0,0 +1 @@ +Dockerfile diff --git a/tests/benchmark/Dockerfile.in b/tests/benchmark/Dockerfile.in new file mode 100644 index 00000000..ca0edc24 --- /dev/null +++ b/tests/benchmark/Dockerfile.in @@ -0,0 +1,24 @@ +FROM ${STAGING_IMAGE} + +# Install performance +RUN pip install performance + +# Create virtual environment +RUN pip install --upgrade virtualenv + +# Required for Python 3.4, see +# https://bugs.launchpad.net/ubuntu/+source/python3.4/+bug/1290847 +RUN apt-get update && apt-get install -y --force-yes python3-pip python3-venv + +RUN mkdir /result + +# Run the benchmark and compare the performance, add the +# --debug-single-value flag to let the benchmark run in fastest mode +RUN pyperformance run --debug-single-value --python=python2.7 -o /result/py2.7.json +RUN pyperformance run --debug-single-value --python=python3.4 -o /result/py3.4.json +RUN pyperformance run --debug-single-value --python=python3.5 -o /result/py3.5.json +RUN if [ -e "/opt/python3.6/bin/python3.6" ]; then pyperformance run --debug-single-value --python=python3.6 -o /result/py3.6.json; fi + +RUN pyperformance compare /result/py2.7.json /result/py3.4.json --output_style table +RUN pyperformance compare /result/py3.4.json /result/py3.5.json --output_style table +RUN if [ -e "/result/py3.6.json" ]; then pyperformance compare /result/py3.5.json /result/py3.6.json --output_style table; fi \ No newline at end of file diff --git a/tests/benchmark/benchmark_between_releases.sh b/tests/benchmark/benchmark_between_releases.sh new file mode 100755 index 00000000..0e105cf3 --- /dev/null +++ b/tests/benchmark/benchmark_between_releases.sh @@ -0,0 +1,72 @@ +#!/bin/bash + +# Build the benchmark image for release 1 from Dockerfile +echo "Building image for release 1" +export STAGING_IMAGE="${DOCKER_NAMESPACE}/python:${TAG1}" +envsubst <"Dockerfile".in >"Dockerfile" '$STAGING_IMAGE' +docker build --no-cache -t benchmark_1 . +rm Dockerfile + +# Build the benchmark image for release 2 from Dockerfile +echo "Building image for release 2" +export STAGING_IMAGE="${DOCKER_NAMESPACE}/python:${TAG2}" +envsubst <"Dockerfile".in >"Dockerfile" '$STAGING_IMAGE' +docker build --no-cache -t benchmark_2 . +rm Dockerfile + +echo "Successfully built images" + +# Create folders to hold the files +mkdir "$TAG1" +mkdir "$TAG2" + +# Start running the containers and copy the benchmark result for python versions from container to host +docker run -it --name benchmark_1 -h CONTAINER1 -v "${PWD}"/"$TAG1":/export benchmark_1 /bin/bash -c "cp /result/py*.json /export/" +docker run -it --name benchmark_2 -h CONTAINER2 -v "${PWD}"/"$TAG2":/export benchmark_2 /bin/bash -c "cp /result/py*.json /export/" + +echo "Start benchmarking the python interpreter performance between the two releases" + +# Compare the performance between the interpreter in different release +pyperformance compare "$TAG1"/py2.7.json "$TAG2"/py2.7.json --output_style table > py2.7_res +pyperformance compare "$TAG1"/py3.4.json "$TAG2"/py3.4.json --output_style table > py3.4_res +pyperformance compare "$TAG1"/py3.5.json "$TAG2"/py3.5.json --output_style table > py3.5_res + +# Check if the python3.6 benchmark result exists +if [[ ( -e '"$TAG1"/py3.6.json' ) && ( -e '"$TAG2"/py3.6.json' ) ]]; then + pyperformance compare "$TAG1"/py3.6.json "$TAG2"/py3.6.json --output_style table > py3.6_res; +fi + +echo "Start extracting data and generating CSV file, then upload to Cloud Storage and insert to Big Query table" + +# Extracting memory usage and running time data from the performace result json, generating CSV files +for path_to_file in $TAG1/*.json; do + python generate_csv.py --filename $path_to_file --tag $TAG1 +done + +for path_to_file in $TAG2/*.json; do + python generate_csv.py --filename $path_to_file --tag $TAG2 +done + +# Set the project that hold the cloud storage bucket and big query tables +gcloud config set project cloud-python-runtime-qa + +# Get the list of existing release data on Cloud Storage and skip if the current TAG1 or TAG2 existing in the list +gsutil ls gs://python-runtime-benchmark > existing_releases + +for container_tag in $TAG1 $TAG2; do + if grep --fixed-strings --quiet "$container_tag" existing_releases; then + echo "Performance data of $container_tag existed, so skip processing it." + else + # Upload the CSV files to Cloud Storage + gsutil cp -r $container_tag gs://python-runtime-benchmark + # Load the CSV files from Cloud Storage to Big Query table + # Load the performance data of each function + for path_to_file in $container_tag/py2.7.csv $container_tag/py3.4.csv $container_tag/py3.5.csv; do + bq load benchmark.benchmark_functions gs://python-runtime-benchmark/"$path_to_file" container_tag:string,runtime_version:string,function_name:string,time_used:float,mem_usage:float + done + # Load the average performance data of each runtime version in a release + bq load benchmark.benchmark_statistics gs://python-runtime-benchmark/"$container_tag"/averages.csv container_tag:string,runtime_version:string,ave_time_used:float,ave_mem_usage:float + fi +done + +echo "Completed" diff --git a/tests/benchmark/generate_csv.py b/tests/benchmark/generate_csv.py new file mode 100644 index 00000000..e9882303 --- /dev/null +++ b/tests/benchmark/generate_csv.py @@ -0,0 +1,106 @@ +# Copyright 2016 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import argparse +import csv +import json +import os +import sys + + +def generate_csv(filename, tag): + """Extract function name, time used and memory usage from the metadata and write to the output CSV file. + + Args: + filename (str): Filename of the performance json file to read + tag (str): Tag of the docker container + """ + with open(filename) as input: + data = json.load(input) + benchmarks = data["benchmarks"] + runtime_version = os.path.basename(filename).split(".json")[0] + + # Write data to CSV file + with open("{}.csv".format(os.path.splitext(filename)[0]), "wb") as output: + csv_writer = csv.writer(output, delimiter=',') + for benchmark in benchmarks: + try: + # Get the function name + func_name = benchmark["metadata"]["name"] + # Get the time used for this function, convert to millisecond + time_used = float(benchmark["runs"][0]["values"][0]) * 1000 + # Get the memory usage, convert to MB + mem_usage = float(benchmark["metadata"]["mem_max_rss"]) / float(1<<20) + line = [tag, runtime_version, func_name, time_used, mem_usage] + # Write to CSV file + csv_writer.writerow(line) + except KeyError: + # Skip the benchmark result if it does not contain the fields we want + pass + + +def get_averages(filename, tag): + """Calculate the averages of time_used and memory_usage and append to CSV file. + + Args: + filename (str): Filename of the performance json file to read + tag (str): Tag of the docker container + """ + with open("{}.csv".format(os.path.splitext(filename)[0]), "rb") as input: + lines = input.readlines() + # Get the two columns of times_used and mem_usage + rows_of_data = [map(float, line.split(',')[-2:]) for line in lines] + # Calculate the sum of the two columns + col_sums = map(sum, zip(*rows_of_data)) + # Calculate the average of the two columns by using the sum divided by the total number of lines + averages = [col_sum / len(lines) for col_sum in col_sums] + + # Get the runtime version from filename + runtime_version = os.path.basename(filename).split(".json")[0] + + # Write the averages to CSV file in appending mode + with open("{}/averages.csv".format(tag), "a+") as output: + try: + csv_writer = csv.writer(output, delimiter=',') + csv_writer.writerow([tag, runtime_version] + averages) + except IOError: + print "Could not write averages to file." + + +def parse_args(argv): + """Parse and validate command line flags""" + parser = argparse.ArgumentParser( + description='Read the python performance json file and extract data to genarate CSV file.' + ) + parser.add_argument( + '--filename', + help='Filename of the performance json file to read' + ) + parser.add_argument( + '--tag', + help='Tag of the docker container' + ) + args = parser.parse_args(argv[1:]) + return args + + +def main(): + args = parse_args(sys.argv) + generate_csv(args.filename, args.tag) + get_averages(args.filename, args.tag) + + +if __name__ == '__main__': + main() diff --git a/tests/deploy_check/app.yaml b/tests/deploy_check/app.yaml new file mode 100644 index 00000000..c4a838e6 --- /dev/null +++ b/tests/deploy_check/app.yaml @@ -0,0 +1,3 @@ +runtime: python +env: flex +entrypoint: gunicorn -b :$PORT main:app diff --git a/tests/deploy_check/main.py b/tests/deploy_check/main.py new file mode 100644 index 00000000..97eb37d8 --- /dev/null +++ b/tests/deploy_check/main.py @@ -0,0 +1,43 @@ +# Copyright 2015 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# [START app] +import logging + +from flask import Flask + + +app = Flask(__name__) + + +@app.route('/') +def hello(): + """Return a friendly HTTP greeting.""" + return 'Hello World!' + + +@app.errorhandler(500) +def server_error(e): + logging.exception('An error occurred during a request.') + return """ + An internal error occurred:
{}
+ See logs for full stacktrace. + """.format(e), 500 + + +if __name__ == '__main__': + # This is used when running locally. Gunicorn is used to run the + # application on Google App Engine. See entrypoint in app.yaml. + app.run(host='127.0.0.1', port=8080, debug=True) +# [END app] diff --git a/tests/deploy_check/requirements.txt b/tests/deploy_check/requirements.txt new file mode 100644 index 00000000..a34d076b --- /dev/null +++ b/tests/deploy_check/requirements.txt @@ -0,0 +1,2 @@ +Flask==1.0.2 +gunicorn==19.9.0 diff --git a/tests/eventlet/.gitignore b/tests/eventlet/.gitignore new file mode 100644 index 00000000..14be8077 --- /dev/null +++ b/tests/eventlet/.gitignore @@ -0,0 +1,2 @@ +Dockerfile +main.py diff --git a/tests/eventlet/Dockerfile.in b/tests/eventlet/Dockerfile.in new file mode 100644 index 00000000..fa65a236 --- /dev/null +++ b/tests/eventlet/Dockerfile.in @@ -0,0 +1,13 @@ +FROM ${STAGING_IMAGE} +LABEL python_version=python3.6 +RUN virtualenv --no-download /env -p python3.6 + +# Set virtualenv environment variables. This is equivalent to running +# source /env/bin/activate +ENV VIRTUAL_ENV /env +ENV PATH /env/bin:$PATH +ADD requirements.txt /app/ +RUN pip install -r requirements.txt +ADD . /app/ +RUN gunicorn -k eventlet -b :$PORT --daemon main:app ; \ + wget --retry-connrefused --tries=5 http://localhost:$PORT/ diff --git a/tests/eventlet/README.md b/tests/eventlet/README.md new file mode 100644 index 00000000..0c2a969c --- /dev/null +++ b/tests/eventlet/README.md @@ -0,0 +1 @@ +# Test the Python base image against the 'eventlet' library diff --git a/tests/eventlet/requirements.txt b/tests/eventlet/requirements.txt new file mode 100644 index 00000000..1e2ea66b --- /dev/null +++ b/tests/eventlet/requirements.txt @@ -0,0 +1,10 @@ +click==6.7 +enum-compat==0.0.2 +eventlet==0.24.1 +Flask==2.2.5 +greenlet==0.4.14 +gunicorn==19.9.0 +itsdangerous==0.24 +Jinja2==2.10 +MarkupSafe==1.1.1 +Werkzeug==2.2.3 diff --git a/tests/google-cloud-python/.gitignore b/tests/google-cloud-python/.gitignore new file mode 100644 index 00000000..94143827 --- /dev/null +++ b/tests/google-cloud-python/.gitignore @@ -0,0 +1 @@ +Dockerfile diff --git a/tests/google-cloud-python/Dockerfile.in b/tests/google-cloud-python/Dockerfile.in new file mode 100644 index 00000000..b82c18d1 --- /dev/null +++ b/tests/google-cloud-python/Dockerfile.in @@ -0,0 +1,15 @@ +FROM ${STAGING_IMAGE} + +# Get the source. +RUN git clone --depth 1 https://github.com/GoogleCloudPlatform/google-cloud-python.git +WORKDIR google-cloud-python + +# Upgrade setuptools +RUN pip install --upgrade setuptools + +# Install nox +RUN pip install --upgrade nox-automation + +# Run unit tests for all supported Python versions +ADD run_unit_tests.sh /run_unit_tests.sh +ENTRYPOINT ["/run_unit_tests.sh"] diff --git a/tests/google-cloud-python/run_unit_tests.sh b/tests/google-cloud-python/run_unit_tests.sh new file mode 100755 index 00000000..c386f1c0 --- /dev/null +++ b/tests/google-cloud-python/run_unit_tests.sh @@ -0,0 +1,34 @@ +#!/bin/sh +set -eu + +cd /app/google-cloud-python + +exit_code=0 +failed_files= +for noxfile in */nox.py; do + if [ "${noxfile}" = "dlp/nox.py" ]; then + echo "**** Skipping ${noxfile} ****" + continue + fi + echo "**** Starting tests in ${noxfile} ****" + nox \ + -f "${noxfile}" \ + -e \ + "unit(py='2.7')" \ + "unit(py='3.4')" \ + "unit(py='3.5')" \ + "unit(py='3.6')" \ + || { + echo "**** FAILED tests in ${noxfile} ****" + exit_code=1 + failed_files="${failed_files} ${noxfile}" + } + echo "**** Finished tests in ${noxfile} ****" +done + +if [ "${exit_code}" -eq 0 ]; then + echo "**** All tests passed ****" +else + echo "**** There were test failures:${failed_files} ****" +fi +exit "${exit_code}" diff --git a/tests/integration/.gitignore b/tests/integration/.gitignore new file mode 100644 index 00000000..94143827 --- /dev/null +++ b/tests/integration/.gitignore @@ -0,0 +1 @@ +Dockerfile diff --git a/tests/integration/Dockerfile.in b/tests/integration/Dockerfile.in new file mode 100644 index 00000000..e18d60ef --- /dev/null +++ b/tests/integration/Dockerfile.in @@ -0,0 +1,22 @@ +# Copyright 2016 Google Inc. All rights reserved. + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +FROM ${STAGING_IMAGE} + +COPY . /app +WORKDIR /app + +RUN pip install -r requirements.txt + +ENTRYPOINT ["gunicorn", "-b", ":8080", "server:app"] diff --git a/tests/integration/app.yaml b/tests/integration/app.yaml new file mode 100644 index 00000000..ce2a1243 --- /dev/null +++ b/tests/integration/app.yaml @@ -0,0 +1,2 @@ +runtime: custom +env: flex diff --git a/tests/integration/requirements.txt b/tests/integration/requirements.txt new file mode 100644 index 00000000..a0634bee --- /dev/null +++ b/tests/integration/requirements.txt @@ -0,0 +1,9 @@ +Flask==2.2.5 +google-cloud-error-reporting==0.32.1 +google-cloud-logging==1.12.1 +google-cloud-monitoring==0.33.0 +gunicorn==19.9.0 +requests==2.31.0 +retrying==1.3.3 +six==1.12.0 +protobuf>=3.6.0 diff --git a/tests/integration/server.py b/tests/integration/server.py new file mode 100755 index 00000000..0b4382c6 --- /dev/null +++ b/tests/integration/server.py @@ -0,0 +1,283 @@ +#!/usr/bin/python + +# Copyright 2017 Google Inc. All rights reserved. + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from functools import wraps +import json +import logging +import os + +import google.cloud.logging +import google.cloud.monitoring +import google.cloud.error_reporting +import google.cloud.exceptions + +from flask import Flask, request, jsonify + +log_funcs = { + 'DEBUG': (logging.debug, 'stderr'), + 'INFO': (logging.info, 'stderr'), + 'WARNING': (logging.warn, 'stderr'), + 'ERROR': (logging.error, 'stderr'), + 'CRITICAL': (logging.critical, 'stderr') +} + +_APPENGINE_FLEXIBLE_ENV_VM = 'GAE_APPENGINE_HOSTNAME' +"""Environment variable set in App Engine when vm:true is set.""" + +_APPENGINE_FLEXIBLE_ENV_FLEX = 'GAE_INSTANCE' +"""Environment variable set in App Engine when env:flex is set.""" + +app = Flask(__name__) + + +def verify_request(f): + @wraps(f) + def verified_func(*args, **kwargs): + request_data = request.get_json() + if request_data is None: + raise ErrorResponse('Unable to parse request JSON: ' + 'did you set the Content-type header?') + token = request_data.get('token') + if not token: + raise ErrorResponse('Please provide token name') + return f(*args, request_data=request_data, token=token, **kwargs) + return verified_func + + +@app.route('/') +def hello_world(): + return 'Hello World!' + + +@app.route('/logging_custom', methods=['POST']) +@verify_request +def _logging_custom(request_data, token): + log_name = request_data.get('log_name') + if not log_name: + raise ErrorResponse('Please provide log name') + level = request_data.get('level') + if not level: + raise ErrorResponse('Please provide log level') + + log_source = _log_custom(token, log_name, level) + + return log_source, 200 + + +@app.route('/logging_standard', methods=['POST']) +@verify_request +def _logging_standard(request_data, token): + level = request_data.get('level') + if not level: + raise ErrorResponse('Please provide log level') + + log_source = _log_default(token, level) + + return log_source, 200 + + +def _log_custom(token, log_name, level): + """ + Write a custom log entry to Stackdriver using a client library. + + Keyword arguments: + token -- 16-character (8-byte) hexadecimal token, to be written + as a log entry. + log_name -- The name of the logging group to be written to. + level -- enum(LogSeverity), level of the log to write + + Once the entry is written to Stackdriver, the test driver will retrieve + all entries with the name 'log_name' at level 'level', and verify there + is an entry with the same value as 'token', indicating the entry + was written successfully. + """ + try: + client = google.cloud.logging.Client() + gcloud_logger = client.logger(log_name) + gcloud_logger.log_text(token, severity=str(level)) + except google.cloud.exceptions.GoogleCloudError as e: + logging.error('Error while writing logs: {0}'.format(e)) + raise ErrorResponse('Error while writing logs: {0}'.format(e)) + + return log_name + + +def _log_default(token, level): + """ + Write a log entry to Stackdriver through the default logging module. + + Keyword arguments: + token -- 16-character (8-byte) hexadecimal token, to be written + as a log entry. + level -- enum(LogSeverity), level of the log to write + + Once the entry is written to Stackdriver, the test driver will retrieve + all entries from the default log stream (sent back to the driver) at level + 'level', and verify there is an entry with the same value as 'token', + indicating the entry was written successfully. + """ + + try: + func_pair = log_funcs[level] + f = func_pair[0] + source = func_pair[1] + f(token) + except google.cloud.exceptions.GoogleCloudError as e: + logging.error('Error while writing logs: {0}'.format(e)) + raise ErrorResponse('Error while writing logs: {0}'.format(e)) + + # this is fine regardless of environment, it's only used in GAE logs + return 'appengine.googleapis.com%2F{0}'.format(source) + + +@app.route('/monitoring', methods=['POST']) +@verify_request +def _monitoring(request_data, token): + name = request_data.get('name') + if not name: + raise ErrorResponse('Please provide metric name') + + try: + client = google.cloud.monitoring.Client() + + try: + descriptor = client.fetch_metric_descriptor(name) + if descriptor is None: + _create_descriptor(name, client) + except (google.cloud.exceptions.Forbidden, + google.cloud.exceptions.NotFound) as ignored: # noqa: F841 + _create_descriptor(name, client) + + _write_metric(name, client, token) + + except google.cloud.exceptions.GoogleCloudError as e: + logging.error('Error while writing custom metric: {0}'.format(e)) + raise ErrorResponse('Error while writing custom metric: {0}'.format(e)) + + return 'OK', 200 + + +def _write_metric(name, client, token): + """ + Write a metric to Stackdriver Monitoring. + + Keyword arguments: + name -- The name of the metric to write. Takes the form + 'custom.googleapis.com/{metric_name}' + client -- the authenticated instance of a Google Cloud Client + token -- a random 64-bit integer token. The metric value to be written. + + Once the metric is written, the test driver will retrieve all metrics + written with the provided name, and verify there is an entry with the + same value as the provided token. + """ + metric = client.metric(name, {}) + resource = client.resource('global', labels={}) + client.write_point(metric, resource, token) + + +def _create_descriptor(name, client): + """ + Create a new metric descriptor. + This descriptor is implicitly used to write a point-value metric to + Stackdriver. + """ + logging.info('No descriptor found with name {0}: Creating...'.format(name)) + descriptor = client.metric_descriptor( + name, + metric_kind=google.cloud.monitoring.MetricKind.GAUGE, + value_type=google.cloud.monitoring.ValueType.INT64, + description='Test Metric') + descriptor.create() + + +@app.route('/exception', methods=['POST']) +@verify_request +def _exception(request_data, token): + try: + client = google.cloud.error_reporting.Client() + try: + raise NameError + except Exception: + client.report_exception() + + client.report(token) + except google.cloud.exceptions.GoogleCloudError as e: + logging.error('Error while reporting exception: {0}'.format(e)) + raise ErrorResponse('Error while reporting exception: {0}'.format(e)) + + return 'OK', 200 + + +@app.route('/trace', methods=['POST']) +def _trace(): + return 'OK', 204 + + +@app.route('/custom', methods=['GET']) +def _custom(): + tests = [ + { + 'name': 'foo', + 'path': '/', + 'timeout': 500 + } + ] + return json.dumps(tests), 200 + + +@app.route('/environment', methods=['GET']) +def _check_environment(): + # determine what cloud env we're running in; essentially, GAE vs GKE + # for GAE, we'll check the existence env vars set on + # vm:true or env:flex + # if neither exist, assume we're in GKE + environment = "GKE" + if (_APPENGINE_FLEXIBLE_ENV_VM in os.environ or + _APPENGINE_FLEXIBLE_ENV_FLEX in os.environ): + environment = "GAE" + return environment, 200 + + +class ErrorResponse(Exception): + status_code = 400 + + def __init__(self, message, status_code=None, payload=None): + Exception.__init__(self) + self.message = message + if status_code is not None: + self.status_code = status_code + self.payload = payload + + def to_dict(self): + rv = dict(self.payload or ()) + rv['message'] = self.message + return rv + + +@app.errorhandler(ErrorResponse) +def handle_invalid_usage(error): + response = jsonify(error.to_dict()) + response.status_code = error.status_code + return response + + +if __name__ == '__main__': + # set up logging module to write to Stackdriver + client = google.cloud.logging.Client() + client.setup_logging(log_level=logging.DEBUG) + logging.getLogger().setLevel(logging.DEBUG) + app.run(debug=True, port=8080) diff --git a/tests/license-test/license-test.yaml b/tests/license-test/license-test.yaml new file mode 100644 index 00000000..348fbc9b --- /dev/null +++ b/tests/license-test/license-test.yaml @@ -0,0 +1,6 @@ +schemaVersion: "1.0.0" + +# See https://github.com/GoogleCloudPlatform/container-structure-test/blob/master/README.md +licenseTests: + - debian: true + files: [] diff --git a/tests/no-virtualenv/no-virtualenv.yaml b/tests/no-virtualenv/no-virtualenv.yaml new file mode 100644 index 00000000..4f6c3f48 --- /dev/null +++ b/tests/no-virtualenv/no-virtualenv.yaml @@ -0,0 +1,55 @@ +schemaVersion: "1.0.0" +commandTests: + - name: "default python installation" + command: ["which", "python"] + expectedOutput: ["/usr/bin/python\n"] + + - name: "default pip installation" + command: ["which", "pip"] + expectedOutput: ["/usr/local/bin/pip\n"] + + - name: "default pip python version" + command: ["pip", "-V"] + expectedOutput: ["pip .* from .*python 2[.]7"] + + - name: "default virtualenv installation" + command: ["which", "virtualenv"] + expectedOutput: ["/usr/local/bin/virtualenv\n"] + + - name: "default python2.7 installation" + command: ["which", "python2.7"] + expectedOutput: ["/usr/bin/python2.7\n"] + + - name: "default python3.4 installation" + command: ["which", "python3.4"] + expectedOutput: ["/opt/python3.4/bin/python3.4\n"] + + - name: "default python3.5 installation" + command: ["which", "python3.5"] + expectedOutput: ["/opt/python3.5/bin/python3.5\n"] + + - name: "default python3.6 installation" + command: ["which", "python3.6"] + expectedOutput: ["/opt/python3.6/bin/python3.6\n"] + + - name: "default gunicorn installation" + setup: [["pip", "install", "gunicorn"]] + command: ["which", "gunicorn"] + expectedOutput: ["/usr/local/bin/gunicorn\n"] + + - # Regression test for issue187 + name: "default python3 installation" + command: ["which", "python3"] + expectedOutput: ["/usr/local/bin/python3\n"] + - name: "default python3 version" + command: ["python3", "--version"] + expectedOutput: ["Python 3.7.9\n"] + - name: "default pip3 installation" + command: ["which", "pip3"] + expectedOutput: ["/usr/local/bin/pip3\n"] + + - name: "default flask installation" + # Checks that 'pip' and 'python' are using the same Python version + setup: [["pip", "install", "flask"]] + command: ["python", "-c", "import flask; print(flask.__file__)"] + expectedOutput: ["/usr/local/lib/python2.7/dist-packages/flask"] diff --git a/tests/python2-libraries/Dockerfile b/tests/python2-libraries/Dockerfile new file mode 100644 index 00000000..c2647292 --- /dev/null +++ b/tests/python2-libraries/Dockerfile @@ -0,0 +1,3 @@ +ARG intermediate_image +FROM $intermediate_image +COPY requirements.txt /requirements.txt diff --git a/tests/python2-libraries/python2-libraries.yaml b/tests/python2-libraries/python2-libraries.yaml new file mode 100644 index 00000000..f9fef5a0 --- /dev/null +++ b/tests/python2-libraries/python2-libraries.yaml @@ -0,0 +1,13 @@ +schemaVersion: "1.0.0" + +globalEnvVars: + - key: "VIRTUAL_ENV" + value: "/env" + - key: "PATH" + value: "/env/bin:$PATH" + +commandTests: + - name: "requirements" + setup: [["virtualenv", "-p", "python", "/env"]] + command: ["pip", "install", "-r", "/requirements.txt"] + exitCode: 0 diff --git a/tests/python2-libraries/requirements.txt b/tests/python2-libraries/requirements.txt new file mode 100644 index 00000000..f0596d67 --- /dev/null +++ b/tests/python2-libraries/requirements.txt @@ -0,0 +1,207 @@ +alembic==1.0.0 +amqp==2.3.2 +amqplib==1.0.2 +ansible==7.0.0 +anyjson==0.3.3 +apache-libcloud==2.3.0 +argparse==1.4.0 +astroid==1.6.5 +awscli==1.16.1 +babel==2.6.0 +backports.ssl_match_hostname==3.5.0.1 +bcdoc==0.16.0 +beautifulsoup4==4.6.3 +beautifulsoup==3.2.1 +billiard==3.5.0.4 +blessings==1.7 +blinker==1.4 +boto==2.49.0 +botocore==1.11.1 +bottle==0.12.13 +carbon<1.1.1 +celery==4.2.1 +certifi==2023.7.22 +cffi==1.11.5 +chardet==3.0.4 +click==6.7 +cliff==2.13.0 +cmd2==0.8.9 +colorama==0.3.9 +configobj==5.0.6 +cov-core==1.15.0 +coverage==4.5.1 +coveralls==1.4.0 +crcmod==1.7 +cryptography==2.3.1 +cssselect==1.0.3 +cython==0.28.5 +decorator==4.3.0 +django-celery==3.2.2 +django-debug-toolbar==1.9.1 +django-extensions==2.1.1 +django<2.0 +django_compress==1.0.1 +djangorestframework==3.8.2 +docker-py==1.10.6 +docopt==0.6.2 +docutils==0.14 +ecdsa==0.13 +elasticsearch==6.3.1 +enum34==1.1.6 +eventlet==0.24.1 +extras==1.0.0 +fabric==2.3.1 +fixtures==3.0.0 +flake8==3.5.0 +flask==2.2.5 +funcsigs==1.0.2 +functools32==3.2.3.post2 +futures==3.2.0 +gevent==1.3.6 +google-api-python-client==1.7.4 +graphite-web==1.1.3 +greenlet==0.4.14 +gunicorn==19.9.0 +hiredis==0.2.0 +honcho==1.0.1 +html5lib==1.0.1 +httplib2==0.11.3 +idna==2.7 +ipaddress==1.0.22 +iso8601==0.1.12 +isodate==0.6.0 +itsdangerous==0.24 +jinja2==2.10 +jmespath==0.9.3 +jsonschema==2.6.0 +kombu==4.2.1 +linecache2==1.0.0 +logilab-common==1.4.2 +lxml==4.2.4 +m2crypto==0.30.1 +mako==1.0.7 +manifestparser==1.1 +markdown==2.6.11 +markupsafe==1.0 +matplotlib==2.2.3 +mccabe==0.6.1 +meld3==1.0.2 +mock==2.0.0 +mozcrash==1.0 +mozdevice==1.0.1 +mozfile==1.2 +mozinfo==0.10 +mozlog==3.8 +moznetwork==0.27 +mozprocess==0.26 +mozprofile==1.1.0 +mozrunner==7.0.1 +msgpack-python==0.5.6 +mysql-python==1.2.5 +ndg-httpsclient==0.5.1 +netaddr==0.7.19 +netifaces==0.10.7 +newrelic==4.2.0.100 +nose==1.3.7 +numpy==1.22.0 +oauth2==1.9.0.post1 +oauth2client==4.1.2 +oauthlib==2.1.0 +ordereddict==1.1 +oslo.config==6.4.0 +pandas==0.23.4 +paramiko==2.4.1 +passlib==1.7.1 +paste==2.0.3 +pastedeploy==1.5.2 +pastescript==2.0.2 +pbr==4.2.0 +pep8==1.7.1 +pexpect==4.6.0 +pika==0.12.0 +pillow==9.3.0 +pip==18.0 +prettytable==0.7.2 +protobuf==3.6.1 +psutil==5.4.7 +psycopg2==2.7.5 +py==1.5.4 +pyasn1-modules==0.2.2 +pyasn1==0.4.4 +pycparser==2.18 +pycrypto==2.6.1 +pycurl==7.43.0.2 +pyflakes==2.0.0 +pygments==2.2.0 +pyjwt==1.6.4 +pylibmc==1.5.2 +pylint==1.9.3 +pymongo==3.7.1 +pymysql==0.9.2 +pyopenssl==18.0.0 +pyparsing==2.2.0 +pyramid==1.9.2 +pystache==0.5.4 +pytest-cov==2.5.1 +pytest==3.7.3 +python-cjson==1.2.1 +python-daemon==2.2.0 +python-dateutil==2.7.3 +python-gflags==3.1.2 +python-keystoneclient==3.17.0 +python-memcached==1.59 +python-mimeparse==1.6.0 +python-novaclient==11.0.0 +python-subunit==1.3.0 +python-swiftclient==3.6.0 +pytz==2018.5 +pyyaml==5.4 +pyzmq==17.1.2 +raven==6.9.0 +redis==2.10.6 +repoze.lru==0.7 +requests-oauthlib==1.0.0 +requests==2.31.0 +retrying==1.3.3 +rsa==3.4.2 +scipy==1.10.0 +selenium==3.14.0 +setuptools-git==1.2 +setuptools==40.2.0 +sh==1.12.14 +simplejson==3.16.0 +six==1.11.0 +snowballstemmer==1.2.1 +south==1.0.2 +sphinx==1.7.7 +sqlalchemy-migrate==0.11.0 +sqlalchemy==1.2.11 +sqlparse==0.4.4 +statsd==3.3.0 +stevedore==1.29.0 +suds==0.4 +supervisor==3.3.4 +testrepository==0.0.20 +testtools==2.3.0 +thrift==0.11.0 +tornado==6.3.3 +tox==3.2.1 +twisted==18.7.0 +ujson==1.35 +unidecode==1.0.22 +unittest2==1.1.0 +uritemplate==3.0.0 +urllib3==1.26.5 +uwsgi==2.0.22 +versiontools==1.9.1 +virtualenv==16.0.0 +waitress==2.1.2 +warlock==1.3.0 +webob==1.8.2 +websocket-client==0.51.0 +webtest==2.0.30 +werkzeug==2.2.3 +wheel==0.31.1 +xlrd==1.1.0 +zc.buildout==2.12.1 +zope.interface==4.5.0 diff --git a/tests/python3-libraries/Dockerfile b/tests/python3-libraries/Dockerfile new file mode 100644 index 00000000..c2647292 --- /dev/null +++ b/tests/python3-libraries/Dockerfile @@ -0,0 +1,3 @@ +ARG intermediate_image +FROM $intermediate_image +COPY requirements.txt /requirements.txt diff --git a/tests/python3-libraries/python3-libraries.yaml b/tests/python3-libraries/python3-libraries.yaml new file mode 100644 index 00000000..ace58132 --- /dev/null +++ b/tests/python3-libraries/python3-libraries.yaml @@ -0,0 +1,18 @@ +schemaVersion: "1.0.0" + +globalEnvVars: + - key: "VIRTUAL_ENV" + value: "/env" + - key: "PATH" + value: "/env/bin:$PATH" + +commandTests: + - name: "requirements 3.5" + setup: [["virtualenv", "-p", "/opt/python3.5/bin/python3.5", "/env"]] + command: ["pip", "install", "-r", "/requirements.txt"] + exitCode: 0 + + - name: "requirements 3.6" + setup: [["virtualenv", "-p", "/opt/python3.6/bin/python3.6", "/env"]] + command: ["pip", "install", "-r", "/requirements.txt"] + exitCode: 0 diff --git a/tests/python3-libraries/requirements.txt b/tests/python3-libraries/requirements.txt new file mode 100644 index 00000000..23175a06 --- /dev/null +++ b/tests/python3-libraries/requirements.txt @@ -0,0 +1,195 @@ +alembic==1.0.0 +amqp==2.3.2 +amqplib==1.0.2 +ansible==7.0.0 +anyjson==0.3.3 +apache-libcloud==2.3.0 +argparse==1.4.0 +astroid==2.0.4 +awscli==1.16.1 +babel==2.6.0 +backports.ssl_match_hostname==3.5.0.1 +bcdoc==0.16.0 +beautifulsoup4==4.6.3 +billiard==3.5.0.4 +blessings==1.7 +blinker==1.4 +boto==2.49.0 +botocore==1.11.1 +bottle==0.12.13 +celery==4.2.1 +certifi==2023.7.22 +cffi==1.11.5 +chardet==3.0.4 +click==6.7 +cliff==2.13.0 +cmd2==0.9.4 +colorama==0.3.9 +configobj==5.0.6 +cov-core==1.15.0 +coverage==4.5.1 +coveralls==1.4.0 +crcmod==1.7 +cryptography==41.0.3 +cssselect==1.0.3 +cython==0.28.5 +decorator==4.3.0 +django-celery==3.2.2 +django-debug-toolbar==1.9.1 +django-extensions==2.1.1 +django==2.2.28 +django_compress==1.0.1 +djangorestframework==3.8.2 +docker-py==1.10.6 +docopt==0.6.2 +docutils==0.14 +ecdsa==0.13 +elasticsearch==6.3.1 +enum34==1.1.6 +eventlet==0.24.1 +extras==1.0.0 +fabric==2.3.1 +fixtures==3.0.0 +flake8==3.5.0 +flask==2.2.5 +funcsigs==1.0.2 +gevent==1.3.6 +google-api-python-client==1.7.4 +greenlet==0.4.14 +gunicorn==19.9.0 +hiredis==0.2.0 +honcho==1.0.1 +html5lib==1.0.1 +httplib2==0.11.3 +idna==2.7 +ipaddress==1.0.22 +ipython==6.5.0 +iso8601==0.1.12 +isodate==0.6.0 +itsdangerous==0.24 +jinja2==2.10 +jmespath==0.9.3 +jsonschema==2.6.0 +kombu==4.2.1 +linecache2==1.0.0 +logilab-common==1.4.2 +lxml==4.2.4 +mako==1.0.7 +manifestparser==1.1 +markdown==2.6.11 +markupsafe==1.1.1 +matplotlib==2.2.3 +mccabe==0.6.1 +meld3==1.0.2 +mock==2.0.0 +mozcrash==1.0 +mozdevice==1.0.1 +mozfile==1.2 +mozinfo==0.10 +mozlog==3.8 +moznetwork==0.27 +mozprocess==0.26 +msgpack-python==0.5.6 +ndg-httpsclient==0.5.1 +netaddr==0.7.19 +netifaces==0.10.7 +newrelic==4.2.0.100 +nose==1.3.7 +numpy==1.22.0 +oauth2==1.9.0.post1 +oauth2client==4.1.2 +oauthlib==2.1.0 +ordereddict==1.1 +oslo.config==6.4.0 +pandas==0.23.4 +paramiko==2.4.1 +passlib==1.7.1 +paste==2.0.3 +pastedeploy==1.5.2 +pastescript==2.0.2 +pbr==4.2.0 +pep8==1.7.1 +pexpect==4.6.0 +pika==0.12.0 +pillow==9.3.0 +pip==18.0 +prettytable==0.7.2 +protobuf==3.6.1 +psutil==5.4.7 +psycopg2==2.7.5 +py==1.5.4 +pyasn1-modules==0.2.2 +pyasn1==0.4.4 +pycparser==2.18 +pycrypto==2.6.1 +pyflakes==2.0.0 +pygments==2.15.0 +pyjwt==1.6.4 +pylibmc==1.5.2 +pylint==2.1.1 +pymongo==3.7.1 +pymysql==0.9.2 +pyopenssl==18.0.0 +pyparsing==2.2.0 +pyramid==1.9.2 +pystache==0.5.4 +pytest-cov==2.5.1 +pytest==3.7.3 +python-daemon==2.2.0 +python-dateutil==2.7.3 +python-gflags==3.1.2 +python-keystoneclient==3.17.0 +python-memcached==1.59 +python-mimeparse==1.6.0 +python-novaclient==11.0.0 +python-subunit==1.3.0 +python-swiftclient==3.6.0 +pytz==2018.5 +pyyaml==5.4 +pyzmq==17.1.2 +raven==6.9.0 +redis==2.10.6 +repoze.lru==0.7 +requests-oauthlib==1.0.0 +requests==2.31.0 +retrying==1.3.3 +rsa==3.4.2 +scipy==1.10.0 +selenium==3.14.0 +setuptools-git==1.2 +setuptools==40.2.0 +sh==1.12.14 +simplejson==3.16.0 +six==1.11.0 +snowballstemmer==1.2.1 +south==1.0.2 +sphinx==1.7.7 +sqlalchemy-migrate==0.11.0 +sqlalchemy==1.2.11 +sqlparse==0.4.4 +statsd==3.3.0 +stevedore==1.29.0 +testrepository==0.0.20 +testtools==2.3.0 +thrift==0.11.0 +tornado==6.3.3 +tox==3.2.1 +twisted==18.7.0 +ujson==1.35 +unidecode==1.0.22 +unittest2==1.1.0 +uritemplate==3.0.0 +urllib3==1.26.5 +uwsgi==2.0.22 +versiontools==1.9.1 +virtualenv==16.0.0 +waitress==2.1.2 +warlock==1.3.0 +webob==1.8.2 +websocket-client==0.51.0 +webtest==2.0.30 +werkzeug==2.2.3 +wheel==0.31.1 +xlrd==1.1.0 +zc.buildout==2.12.1 +zope.interface==4.5.0 diff --git a/tests/virtualenv/virtualenv_default.yaml b/tests/virtualenv/virtualenv_default.yaml new file mode 100644 index 00000000..6b6ad282 --- /dev/null +++ b/tests/virtualenv/virtualenv_default.yaml @@ -0,0 +1,37 @@ +schemaVersion: "1.0.0" + +globalEnvVars: + - key: "VIRTUAL_ENV" + value: "/env" + - key: "PATH" + value: "/env/bin:$PATH" + +commandTests: + - name: "virtualenv python installation" + setup: [["virtualenv", "/env"]] + command: ["which", "python"] + expectedOutput: ["/env/bin/python\n"] + + - name: "virtualenv python version" + setup: [["virtualenv", "/env"]] + command: ["python", "--version"] + # we check stderr instead of stdout for Python versions < 3.4 + # https://bugs.python.org/issue18338 + expectedError: ["Python 2.7.(9|12)\n"] + + - name: "virtualenv pip installation" + setup: [["virtualenv", "/env"]] + command: ["which", "pip"] + expectedOutput: ["/env/bin/pip\n"] + + - name: "virtualenv gunicorn installation" + setup: [["virtualenv", "/env"], + ["pip", "install", "gunicorn"]] + command: ["which", "gunicorn"] + expectedOutput: ["/env/bin/gunicorn"] + + - name: "virtualenv flask installation" + setup: [["virtualenv", "/env"], + ["pip", "install", "flask"]] + command: ["python", "-c", "import flask; print(flask.__file__)"] + expectedOutput: ["/env/lib/python2.7/site-packages/flask/__init__.pyc"] diff --git a/tests/virtualenv/virtualenv_python27.yaml b/tests/virtualenv/virtualenv_python27.yaml new file mode 100644 index 00000000..09b78480 --- /dev/null +++ b/tests/virtualenv/virtualenv_python27.yaml @@ -0,0 +1,47 @@ +schemaVersion: "1.0.0" + +globalEnvVars: + - key: "VIRTUAL_ENV" + value: "/env" + - key: "PATH" + value: "/env/bin:$PATH" + +commandTests: + - name: "virtualenv27 python installation" + setup: [["virtualenv", "-p", "python", "/env"]] + command: ["which", "python"] + expectedOutput: ["/env/bin/python\n"] + + - name: "virtualenv27 python2 installation" + setup: [["virtualenv", "-p", "python", "/env"]] + command: ["which", "python2"] + expectedOutput: ["/env/bin/python2\n"] + + - name: "virtualenv27python2.7 installation" + setup: [["virtualenv", "-p", "python", "/env"]] + command: ["which", "python2.7"] + expectedOutput: ["/env/bin/python2.7\n"] + + - name: "virtualenv27 python version" + setup: [["virtualenv", "-p", "python", "/env"]] + command: ["python", "--version"] + # we check stderr instead of stdout for Python versions < 3.4 + # https://bugs.python.org/issue18338 + expectedError: ["Python 2.7.(9|12)\n"] + + - name: "virtualenv27 pip installation" + setup: [["virtualenv", "-p", "python", "/env"]] + command: ["which", "pip"] + expectedOutput: ["/env/bin/pip\n"] + + - name: "virtualenv27 gunicorn installation" + setup: [["virtualenv", "-p", "python", "/env"], + ["pip", "install", "gunicorn"]] + command: ["which", "gunicorn"] + expectedOutput: ["/env/bin/gunicorn"] + + - name: "virtualenv27 flask installation" + setup: [["virtualenv", "-p", "python", "/env"], + ["pip", "install", "flask"]] + command: ["python", "-c", "import flask; print(flask.__file__)"] + expectedOutput: ["/env/lib/python2.7/site-packages/flask/__init__.pyc"] diff --git a/tests/virtualenv/virtualenv_python34.yaml b/tests/virtualenv/virtualenv_python34.yaml new file mode 100644 index 00000000..9b5b77d0 --- /dev/null +++ b/tests/virtualenv/virtualenv_python34.yaml @@ -0,0 +1,54 @@ +schemaVersion: "1.0.0" + +globalEnvVars: + - key: "VIRTUAL_ENV" + value: "/env" + - key: "PATH" + value: "/env/bin:$PATH" + +commandTests: + - name: "virtualenv34 python installation" + setup: [["virtualenv", "-p", "python3.4", "/env"]] + command: ["which", "python"] + expectedOutput: ["/env/bin/python\n"] + + - name: "virtualenv34 python3 installation" + setup: [["virtualenv", "-p", "python3.4", "/env"]] + command: ["which", "python3"] + expectedOutput: ["/env/bin/python3\n"] + + - name: "virtualenv34 python3.4 installation" + setup: [["virtualenv", "-p", "python3.4", "/env"]] + command: ["which", "python3.4"] + expectedOutput: ["/env/bin/python3.4\n"] + + - name: "virtualenv34 python version" + setup: [["virtualenv", "-p", "python3.4", "/env"]] + command: ["python", "--version"] + expectedOutput: ["Python 3.4.8\n"] + + - name: "virtualenv34 pip installation" + setup: [["virtualenv", "-p", "python3.4", "/env"]] + command: ["which", "pip"] + expectedOutput: ["/env/bin/pip\n"] + + - name: "virtualenv34 pip3 installation" + setup: [["virtualenv", "-p", "python3.4", "/env"]] + command: ["which", "pip3"] + expectedOutput: ["/env/bin/pip3\n"] + + - name: "virtualenv34 gunicorn installation" + setup: [["virtualenv", "-p", "python3.4", "/env"], + ["pip", "install", "gunicorn"]] + command: ["which", "gunicorn"] + expectedOutput: ["/env/bin/gunicorn"] + + - name: "virtualenv34 flask installation" + setup: [["virtualenv", "-p", "python3.4", "/env"], + ["pip", "install", "flask"]] + command: ["python", "-c", "import flask; print(flask.__file__)"] + expectedOutput: ["/env/lib/python3.4/site-packages/flask/__init__.py"] + + - name: "virtualenv34 test.support availability" + setup: [["virtualenv", "-p", "python3.4", "/env"]] + command: ["python", "-c", "\"from test import pystone, regrtest, support\""] diff --git a/tests/virtualenv/virtualenv_python35.yaml b/tests/virtualenv/virtualenv_python35.yaml new file mode 100644 index 00000000..5e4b394a --- /dev/null +++ b/tests/virtualenv/virtualenv_python35.yaml @@ -0,0 +1,54 @@ +schemaVersion: "1.0.0" + +globalEnvVars: + - key: "VIRTUAL_ENV" + value: "/env" + - key: "PATH" + value: "/env/bin:$PATH" + +commandTests: + - name: "virtualenv35 python installation" + setup: [["virtualenv", "-p", "python3.5", "/env"]] + command: ["which", "python"] + expectedOutput: ["/env/bin/python\n"] + + - name: "virtualenv35 python3 installation" + setup: [["virtualenv", "-p", "python3.5", "/env"]] + command: ["which", "python3"] + expectedOutput: ["/env/bin/python3\n"] + + - name: "virtualenv35 python3.5 installation" + setup: [["virtualenv", "-p", "python3.5", "/env"]] + command: ["which", "python3.5"] + expectedOutput: ["/env/bin/python3.5\n"] + + - name: "virtualenv35 python version" + setup: [["virtualenv", "-p", "python3.5", "/env"]] + command: ["python", "--version"] + expectedOutput: ["Python 3.5.9\n"] + + - name: "virtualenv35 pip installation" + setup: [["virtualenv", "-p", "python3.5", "/env"]] + command: ["which", "pip"] + expectedOutput: ["/env/bin/pip\n"] + + - name: "virtualenv35 pip3 installation" + setup: [["virtualenv", "-p", "python3.5", "/env"]] + command: ["which", "pip3"] + expectedOutput: ["/env/bin/pip3\n"] + + - name: "virtualenv35 gunicorn installation" + setup: [["virtualenv", "-p", "python3.5", "/env"], + ["pip", "install", "gunicorn"]] + command: ["which", "gunicorn"] + expectedOutput: ["/env/bin/gunicorn"] + + - name: "virtualenv35 flask installation" + setup: [["virtualenv", "-p", "python3.5", "/env"], + ["pip", "install", "flask"]] + command: ["python", "-c", "import flask; print(flask.__file__)"] + expectedOutput: ["/env/lib/python3.5/site-packages/flask/__init__.py"] + + - name: "virtualenv35 test.support availability" + setup: [["virtualenv", "-p", "python3.5", "/env"]] + command: ["python", "-c", "\"from test import pystone, regrtest, support\""] diff --git a/tests/virtualenv/virtualenv_python36.yaml b/tests/virtualenv/virtualenv_python36.yaml new file mode 100644 index 00000000..b3a9e68e --- /dev/null +++ b/tests/virtualenv/virtualenv_python36.yaml @@ -0,0 +1,54 @@ +schemaVersion: "1.0.0" + +globalEnvVars: + - key: "VIRTUAL_ENV" + value: "/env" + - key: "PATH" + value: "/env/bin:$PATH" + +commandTests: + - name: "virtualenv36 python installation" + setup: [["virtualenv", "-p", "python3.6", "/env"]] + command: ["which", "python"] + expectedOutput: ["/env/bin/python\n"] + + - name: "virtualenv36 python3 installation" + setup: [["virtualenv", "-p", "python3.6", "/env"]] + command: ["which", "python3"] + expectedOutput: ["/env/bin/python3\n"] + + - name: "virtualenv36 python3.6 installation" + setup: [["virtualenv", "-p", "python3.6", "/env"]] + command: ["which", "python3.6"] + expectedOutput: ["/env/bin/python3.6\n"] + + - name: "virtualenv36 python version" + setup: [["virtualenv", "-p", "python3.6", "/env"]] + command: ["python", "--version"] + expectedOutput: ["Python 3.6.10\n"] + + - name: "virtualenv36 pip installation" + setup: [["virtualenv", "-p", "python3.6", "/env"]] + command: ["which", "pip"] + expectedOutput: ["/env/bin/pip\n"] + + - name: "virtualenv36 pip3 installation" + setup: [["virtualenv", "-p", "python3.6", "/env"]] + command: ["which", "pip3"] + expectedOutput: ["/env/bin/pip3\n"] + + - name: "virtualenv36 gunicorn installation" + setup: [["virtualenv", "-p", "python3.6", "/env"], + ["pip", "install", "gunicorn"]] + command: ["which", "gunicorn"] + expectedOutput: ["/env/bin/gunicorn"] + + - name: "virtualenv36 flask installation" + setup: [["virtualenv", "-p", "python3.6", "/env"], + ["pip", "install", "flask"]] + command: ["python", "-c", "import flask; print(flask.__file__)"] + expectedOutput: ["/env/lib/python3.6/site-packages/flask/__init__.py"] + + - name: "virtualenv36 test.support availability" + setup: [["virtualenv", "-p", "python3.6", "/env"]] + command: ["python", "-c", "\"from test import pystone, regrtest, support\""] diff --git a/tests/virtualenv/virtualenv_python37.yaml b/tests/virtualenv/virtualenv_python37.yaml new file mode 100644 index 00000000..9810c78e --- /dev/null +++ b/tests/virtualenv/virtualenv_python37.yaml @@ -0,0 +1,54 @@ +schemaVersion: "1.0.0" + +globalEnvVars: + - key: "VIRTUAL_ENV" + value: "/env" + - key: "PATH" + value: "/env/bin:$PATH" + +commandTests: + - name: "virtualenv37 python installation" + setup: [["virtualenv", "-p", "python3.7", "/env"]] + command: ["which", "python"] + expectedOutput: ["/env/bin/python\n"] + + - name: "virtualenv37 python3 installation" + setup: [["virtualenv", "-p", "python3.7", "/env"]] + command: ["which", "python3"] + expectedOutput: ["/env/bin/python3\n"] + + - name: "virtualenv37 python3.7 installation" + setup: [["virtualenv", "-p", "python3.7", "/env"]] + command: ["which", "python3.7"] + expectedOutput: ["/env/bin/python3.7\n"] + + - name: "virtualenv37 python version" + setup: [["virtualenv", "-p", "python3.7", "/env"]] + command: ["python", "--version"] + expectedOutput: ["Python 3.7.9\n"] + + - name: "virtualenv37 pip installation" + setup: [["virtualenv", "-p", "python3.7", "/env"]] + command: ["which", "pip"] + expectedOutput: ["/env/bin/pip\n"] + + - name: "virtualenv37 pip3 installation" + setup: [["virtualenv", "-p", "python3.7", "/env"]] + command: ["which", "pip3"] + expectedOutput: ["/env/bin/pip3\n"] + + - name: "virtualenv37 gunicorn installation" + setup: [["virtualenv", "-p", "python3.7", "/env"], + ["pip", "install", "gunicorn"]] + command: ["which", "gunicorn"] + expectedOutput: ["/env/bin/gunicorn"] + + - name: "virtualenv37 flask installation" + setup: [["virtualenv", "-p", "python3.7", "/env"], + ["pip", "install", "flask"]] + command: ["python", "-c", "import flask; print(flask.__file__)"] + expectedOutput: ["/env/lib/python3.7/site-packages/flask/__init__.py"] + + - name: "virtualenv37 test.support availability" + setup: [["virtualenv", "-p", "python3.7", "/env"]] + command: ["python", "-c", "\"from test import pystone, regrtest, support\""]