浏览代码

Merge pull request #7155 from docker/bump-1.25.2-rc1

Bump 1.25.2-rc1
Ulysses Souza 5 年之前
父节点
当前提交
e9f9a1e9e1

+ 0 - 66
.circleci/config.yml

@@ -1,66 +0,0 @@
-version: 2
-jobs:
-  test:
-    macos:
-      xcode: "9.4.1"
-    steps:
-    - checkout
-    - run:
-        name: setup script
-        command: ./script/setup/osx
-    - run:
-        name: install tox
-        command: sudo pip install --upgrade tox==2.1.1 virtualenv==16.2.0
-    - run:
-        name: unit tests
-        command: tox -e py27,py37 -- tests/unit
-
-  build-osx-binary:
-    macos:
-      xcode: "9.4.1"
-    steps:
-      - checkout
-      - run:
-          name: upgrade python tools
-          command: sudo pip install --upgrade pip virtualenv==16.2.0
-      - run:
-         name: setup script
-         command: DEPLOYMENT_TARGET=10.11 ./script/setup/osx
-      - run:
-         name: build script
-         command: ./script/build/osx
-      - store_artifacts:
-          path: dist/docker-compose-Darwin-x86_64
-          destination: docker-compose-Darwin-x86_64
-      - store_artifacts:
-          path: dist/docker-compose-Darwin-x86_64.tgz
-          destination: docker-compose-Darwin-x86_64.tgz
-      - deploy:
-          name: Deploy binary to bintray
-          command: |
-            OS_NAME=Darwin PKG_NAME=osx ./script/circle/bintray-deploy.sh
-
-  build-linux-binary:
-    machine:
-      enabled: true
-    steps:
-      - checkout
-      - run:
-          name: build Linux binary
-          command: ./script/build/linux
-      - store_artifacts:
-          path: dist/docker-compose-Linux-x86_64
-          destination: docker-compose-Linux-x86_64
-      - deploy:
-          name: Deploy binary to bintray
-          command: |
-            OS_NAME=Linux PKG_NAME=linux ./script/circle/bintray-deploy.sh
-
-
-workflows:
-  version: 2
-  all:
-    jobs:
-      - test
-      - build-linux-binary
-      - build-osx-binary

+ 1 - 0
.dockerignore

@@ -11,3 +11,4 @@ docs/_site
 .tox
 .tox
 **/__pycache__
 **/__pycache__
 *.pyc
 *.pyc
+Jenkinsfile

+ 35 - 0
CHANGELOG.md

@@ -1,6 +1,41 @@
 Change log
 Change log
 ==========
 ==========
 
 
+1.25.2 (2020-01-17)
+-------------------
+
+### Features
+
+- Allow compatibility option with `COMPOSE_COMPATIBILITY` environment variable
+
+- Bump PyInstaller from 3.5 to 3.6
+
+- Bump pysocks from 1.6.7 to 1.7.1
+
+- Bump websocket-client from 0.32.0 to 0.57.0
+
+- Bump urllib3 from 1.24.2 to 1.25.7
+
+- Bump jsonschema from 3.0.1 to 3.2.0
+
+- Bump PyYAML from 4.2b1 to 5.3
+
+- Bump certifi from 2017.4.17 to 2019.11.28
+
+- Bump coverage from 4.5.4 to 5.0.3
+
+- Bump paramiko from 2.6.0 to 2.7.1
+
+- Bump cached-property from 1.3.0 to 1.5.1
+
+- Bump minor Linux and MacOSX dependencies
+
+### Bugfixes
+
+- Validate version format on formats 2+
+
+- Assume infinite terminal width when not running in a terminal
+
 1.25.1 (2020-01-06)
 1.25.1 (2020-01-06)
 -------------------
 -------------------
 
 

+ 4 - 4
Dockerfile

@@ -1,9 +1,9 @@
-ARG DOCKER_VERSION=18.09.7
-ARG PYTHON_VERSION=3.7.4
+ARG DOCKER_VERSION=19.03.5
+ARG PYTHON_VERSION=3.7.5
 ARG BUILD_ALPINE_VERSION=3.10
 ARG BUILD_ALPINE_VERSION=3.10
 ARG BUILD_DEBIAN_VERSION=slim-stretch
 ARG BUILD_DEBIAN_VERSION=slim-stretch
-ARG RUNTIME_ALPINE_VERSION=3.10.1
-ARG RUNTIME_DEBIAN_VERSION=stretch-20190812-slim
+ARG RUNTIME_ALPINE_VERSION=3.10.3
+ARG RUNTIME_DEBIAN_VERSION=stretch-20191118-slim
 
 
 ARG BUILD_PLATFORM=alpine
 ARG BUILD_PLATFORM=alpine
 
 

+ 99 - 82
Jenkinsfile

@@ -1,95 +1,112 @@
 #!groovy
 #!groovy
 
 
-def buildImage = { String baseImage ->
-  def image
-  wrappedNode(label: "ubuntu && amd64 && !zfs", cleanWorkspace: true) {
-    stage("build image for \"${baseImage}\"") {
-      checkout(scm)
-      def imageName = "dockerbuildbot/compose:${baseImage}-${gitCommit()}"
-      image = docker.image(imageName)
-      try {
-        image.pull()
-      } catch (Exception exc) {
-        sh """GIT_COMMIT=\$(script/build/write-git-sha) && \\
-            docker build -t ${imageName} \\
-            --target build \\
-            --build-arg BUILD_PLATFORM="${baseImage}" \\
-            --build-arg GIT_COMMIT="${GIT_COMMIT}" \\
-            .\\
-        """
-        sh "docker push ${imageName}"
-        echo "${imageName}"
-        return imageName
-      }
+def dockerVersions = ['19.03.5', '18.09.9']
+def baseImages = ['alpine', 'debian']
+def pythonVersions = ['py27', 'py37']
+
+pipeline {
+    agent none
+
+    options {
+        skipDefaultCheckout(true)
+        buildDiscarder(logRotator(daysToKeepStr: '30'))
+        timeout(time: 2, unit: 'HOURS')
+        timestamps()
     }
     }
-  }
-  echo "image.id: ${image.id}"
-  return image.id
-}
 
 
-def get_versions = { String imageId, int number ->
-  def docker_versions
-  wrappedNode(label: "ubuntu && amd64 && !zfs") {
-    def result = sh(script: """docker run --rm \\
-        --entrypoint=/code/.tox/py27/bin/python \\
-        ${imageId} \\
-        /code/script/test/versions.py -n ${number} docker/docker-ce recent
-      """, returnStdout: true
-    )
-    docker_versions = result.split()
-  }
-  return docker_versions
+    stages {
+        stage('Build test images') {
+            // TODO use declarative 1.5.0 `matrix` once available on CI
+            parallel {
+                stage('alpine') {
+                    agent {
+                        label 'ubuntu && amd64 && !zfs'
+                    }
+                    steps {
+                        buildImage('alpine')
+                    }
+                }
+                stage('debian') {
+                    agent {
+                        label 'ubuntu && amd64 && !zfs'
+                    }
+                    steps {
+                        buildImage('debian')
+                    }
+                }
+            }
+        }
+        stage('Test') {
+            steps {
+                // TODO use declarative 1.5.0 `matrix` once available on CI
+                script {
+                    def testMatrix = [:]
+                    baseImages.each { baseImage ->
+                      dockerVersions.each { dockerVersion ->
+                        pythonVersions.each { pythonVersion ->
+                          testMatrix["${baseImage}_${dockerVersion}_${pythonVersion}"] = runTests(dockerVersion, pythonVersion, baseImage)
+                        }
+                      }
+                    }
+
+                    parallel testMatrix
+                }
+            }
+        }
+    }
 }
 }
 
 
-def runTests = { Map settings ->
-  def dockerVersions = settings.get("dockerVersions", null)
-  def pythonVersions = settings.get("pythonVersions", null)
-  def baseImage = settings.get("baseImage", null)
-  def imageName = settings.get("image", null)
 
 
-  if (!pythonVersions) {
-    throw new Exception("Need Python versions to test. e.g.: `runTests(pythonVersions: 'py27,py37')`")
-  }
-  if (!dockerVersions) {
-    throw new Exception("Need Docker versions to test. e.g.: `runTests(dockerVersions: 'all')`")
-  }
+def buildImage(baseImage) {
+    def scmvar = checkout(scm)
+    def imageName = "dockerbuildbot/compose:${baseImage}-${scmvar.GIT_COMMIT}"
+    image = docker.image(imageName)
 
 
-  { ->
-    wrappedNode(label: "ubuntu && amd64 && !zfs", cleanWorkspace: true) {
-      stage("test python=${pythonVersions} / docker=${dockerVersions} / baseImage=${baseImage}") {
-        checkout(scm)
-        def storageDriver = sh(script: 'docker info | awk -F \': \' \'$1 == "Storage Driver" { print $2; exit }\'', returnStdout: true).trim()
-        echo "Using local system's storage driver: ${storageDriver}"
-        sh """docker run \\
-          -t \\
-          --rm \\
-          --privileged \\
-          --volume="\$(pwd)/.git:/code/.git" \\
-          --volume="/var/run/docker.sock:/var/run/docker.sock" \\
-          -e "TAG=${imageName}" \\
-          -e "STORAGE_DRIVER=${storageDriver}" \\
-          -e "DOCKER_VERSIONS=${dockerVersions}" \\
-          -e "BUILD_NUMBER=\$BUILD_TAG" \\
-          -e "PY_TEST_VERSIONS=${pythonVersions}" \\
-          --entrypoint="script/test/ci" \\
-          ${imageName} \\
-          --verbose
-        """
-      }
+    withDockerRegistry(credentialsId:'dockerbuildbot-index.docker.io') {
+        try {
+            image.pull()
+        } catch (Exception exc) {
+            ansiColor('xterm') {
+                sh """docker build -t ${imageName} \\
+                    --target build \\
+                    --build-arg BUILD_PLATFORM="${baseImage}" \\
+                    --build-arg GIT_COMMIT="${scmvar.GIT_COMMIT}" \\
+                    .\\
+                """
+                sh "docker push ${imageName}"
+            }
+            echo "${imageName}"
+            return imageName
+        }
     }
     }
-  }
 }
 }
 
 
-def testMatrix = [failFast: true]
-def baseImages = ['alpine', 'debian']
-def pythonVersions = ['py27', 'py37']
-baseImages.each { baseImage ->
-  def imageName = buildImage(baseImage)
-  get_versions(imageName, 2).each { dockerVersion ->
-    pythonVersions.each { pyVersion ->
-      testMatrix["${baseImage}_${dockerVersion}_${pyVersion}"] = runTests([baseImage: baseImage, image: imageName, dockerVersions: dockerVersion, pythonVersions: pyVersion])
+def runTests(dockerVersion, pythonVersion, baseImage) {
+    return {
+        stage("python=${pythonVersion} docker=${dockerVersion} ${baseImage}") {
+            node("ubuntu && amd64 && !zfs") {
+                def scmvar = checkout(scm)
+                def imageName = "dockerbuildbot/compose:${baseImage}-${scmvar.GIT_COMMIT}"
+                def storageDriver = sh(script: "docker info -f \'{{.Driver}}\'", returnStdout: true).trim()
+                echo "Using local system's storage driver: ${storageDriver}"
+                withDockerRegistry(credentialsId:'dockerbuildbot-index.docker.io') {
+                    sh """docker run \\
+                      -t \\
+                      --rm \\
+                      --privileged \\
+                      --volume="\$(pwd)/.git:/code/.git" \\
+                      --volume="/var/run/docker.sock:/var/run/docker.sock" \\
+                      -e "TAG=${imageName}" \\
+                      -e "STORAGE_DRIVER=${storageDriver}" \\
+                      -e "DOCKER_VERSIONS=${dockerVersion}" \\
+                      -e "BUILD_NUMBER=${env.BUILD_NUMBER}" \\
+                      -e "PY_TEST_VERSIONS=${pythonVersion}" \\
+                      --entrypoint="script/test/ci" \\
+                      ${imageName} \\
+                      --verbose
+                    """
+                }
+            }
+        }
     }
     }
-  }
 }
 }
-
-parallel(testMatrix)

+ 315 - 0
Release.Jenkinsfile

@@ -0,0 +1,315 @@
+#!groovy
+
+def dockerVersions = ['19.03.5', '18.09.9']
+def baseImages = ['alpine', 'debian']
+def pythonVersions = ['py27', 'py37']
+
+pipeline {
+    agent none
+
+    options {
+        skipDefaultCheckout(true)
+        buildDiscarder(logRotator(daysToKeepStr: '30'))
+        timeout(time: 2, unit: 'HOURS')
+        timestamps()
+    }
+
+    stages {
+        stage('Build test images') {
+            // TODO use declarative 1.5.0 `matrix` once available on CI
+            parallel {
+                stage('alpine') {
+                    agent {
+                        label 'linux'
+                    }
+                    steps {
+                        buildImage('alpine')
+                    }
+                }
+                stage('debian') {
+                    agent {
+                        label 'linux'
+                    }
+                    steps {
+                        buildImage('debian')
+                    }
+                }
+            }
+        }
+        stage('Test') {
+            steps {
+                // TODO use declarative 1.5.0 `matrix` once available on CI
+                script {
+                    def testMatrix = [:]
+                    baseImages.each { baseImage ->
+                      dockerVersions.each { dockerVersion ->
+                        pythonVersions.each { pythonVersion ->
+                          testMatrix["${baseImage}_${dockerVersion}_${pythonVersion}"] = runTests(dockerVersion, pythonVersion, baseImage)
+                        }
+                      }
+                    }
+
+                    parallel testMatrix
+                }
+            }
+        }
+        stage('Generate Changelog') {
+            agent {
+                label 'linux'
+            }
+            steps {
+                checkout scm
+                withCredentials([string(credentialsId: 'github-compose-release-test-token', variable: 'GITHUB_TOKEN')]) {
+                    sh "./script/release/generate_changelog.sh"
+                }
+                archiveArtifacts artifacts: 'CHANGELOG.md'
+                stash( name: "changelog", includes: 'CHANGELOG.md' )
+            }
+        }
+        stage('Package') {
+            parallel {
+                stage('macosx binary') {
+                    agent {
+                        label 'mac-python'
+                    }
+                    steps {
+                        checkout scm
+                        sh './script/setup/osx'
+                        sh 'tox -e py27,py37 -- tests/unit'
+                        sh './script/build/osx'
+                        dir ('dist') {
+                          checksum('docker-compose-Darwin-x86_64')
+                          checksum('docker-compose-Darwin-x86_64.tgz')
+                        }
+                        archiveArtifacts artifacts: 'dist/*', fingerprint: true
+                        dir("dist") {
+                            stash name: "bin-darwin"
+                        }
+                    }
+                }
+                stage('linux binary') {
+                    agent {
+                        label 'linux'
+                    }
+                    steps {
+                        checkout scm
+                        sh ' ./script/build/linux'
+                        dir ('dist') {
+                          checksum('docker-compose-Linux-x86_64')
+                        }
+                        archiveArtifacts artifacts: 'dist/*', fingerprint: true
+                        dir("dist") {
+                            stash name: "bin-linux"
+                        }
+                    }
+                }
+                stage('windows binary') {
+                    agent {
+                        label 'windows-python'
+                    }
+                    environment {
+                        PATH = "$PATH;C:\\Python37;C:\\Python37\\Scripts"
+                    }
+                    steps {
+                        checkout scm
+                        bat 'tox.exe -e py27,py37 -- tests/unit'
+                        powershell '.\\script\\build\\windows.ps1'
+                        dir ('dist') {
+                            checksum('docker-compose-Windows-x86_64.exe')
+                        }
+                        archiveArtifacts artifacts: 'dist/*', fingerprint: true
+                        dir("dist") {
+                            stash name: "bin-win"
+                        }
+                    }
+                }
+                stage('alpine image') {
+                    agent {
+                        label 'linux'
+                    }
+                    steps {
+                        buildRuntimeImage('alpine')
+                    }
+                }
+                stage('debian image') {
+                    agent {
+                        label 'linux'
+                    }
+                    steps {
+                        buildRuntimeImage('debian')
+                    }
+                }
+            }
+        }
+        stage('Release') {
+            when {
+                buildingTag()
+            }
+            parallel {
+                stage('Pushing images') {
+                    agent {
+                        label 'linux'
+                    }
+                    steps {
+                        pushRuntimeImage('alpine')
+                        pushRuntimeImage('debian')
+                    }
+                }
+                stage('Creating Github Release') {
+                    agent {
+                        label 'linux'
+                    }
+                    steps {
+                        checkout scm
+                        sh 'mkdir -p dist'
+                        dir("dist") {
+                            unstash "bin-darwin"
+                            unstash "bin-linux"
+                            unstash "bin-win"
+                            unstash "changelog"
+                            githubRelease()
+                        }
+                    }
+                }
+                stage('Publishing Python packages') {
+                    agent {
+                        label 'linux'
+                    }
+                    steps {
+                        checkout scm
+                        withCredentials([[$class: "FileBinding", credentialsId: 'pypirc-docker-dsg-cibot', variable: 'PYPIRC']]) {
+                            sh """
+                                virtualenv venv-publish
+                                source venv-publish/bin/activate
+                                python setup.py sdist bdist_wheel
+                                pip install twine
+                                twine upload --config-file ${PYPIRC} ./dist/docker-compose-${env.TAG_NAME}.tar.gz ./dist/docker_compose-${env.TAG_NAME}-py2.py3-none-any.whl
+                            """
+                        }
+                    }
+                    post {
+                        sh 'deactivate; rm -rf venv-publish'
+                    }
+                }
+            }
+        }
+    }
+}
+
+
+def buildImage(baseImage) {
+    def scmvar = checkout(scm)
+    def imageName = "dockerbuildbot/compose:${baseImage}-${scmvar.GIT_COMMIT}"
+    image = docker.image(imageName)
+
+    withDockerRegistry(credentialsId:'dockerbuildbot-index.docker.io') {
+        try {
+            image.pull()
+        } catch (Exception exc) {
+            ansiColor('xterm') {
+                sh """docker build -t ${imageName} \\
+                    --target build \\
+                    --build-arg BUILD_PLATFORM="${baseImage}" \\
+                    --build-arg GIT_COMMIT="${scmvar.GIT_COMMIT}" \\
+                    .\\
+                """
+                sh "docker push ${imageName}"
+            }
+            echo "${imageName}"
+            return imageName
+        }
+    }
+}
+
+def runTests(dockerVersion, pythonVersion, baseImage) {
+    return {
+        stage("python=${pythonVersion} docker=${dockerVersion} ${baseImage}") {
+            node("linux") {
+                def scmvar = checkout(scm)
+                def imageName = "dockerbuildbot/compose:${baseImage}-${scmvar.GIT_COMMIT}"
+                def storageDriver = sh(script: "docker info -f \'{{.Driver}}\'", returnStdout: true).trim()
+                echo "Using local system's storage driver: ${storageDriver}"
+                withDockerRegistry(credentialsId:'dockerbuildbot-index.docker.io') {
+                    sh """docker run \\
+                      -t \\
+                      --rm \\
+                      --privileged \\
+                      --volume="\$(pwd)/.git:/code/.git" \\
+                      --volume="/var/run/docker.sock:/var/run/docker.sock" \\
+                      -e "TAG=${imageName}" \\
+                      -e "STORAGE_DRIVER=${storageDriver}" \\
+                      -e "DOCKER_VERSIONS=${dockerVersion}" \\
+                      -e "BUILD_NUMBER=${env.BUILD_NUMBER}" \\
+                      -e "PY_TEST_VERSIONS=${pythonVersion}" \\
+                      --entrypoint="script/test/ci" \\
+                      ${imageName} \\
+                      --verbose
+                    """
+                }
+            }
+        }
+    }
+}
+
+def buildRuntimeImage(baseImage) {
+    scmvar = checkout scm
+    def imageName = "docker/compose:${baseImage}-${env.BRANCH_NAME}"
+    ansiColor('xterm') {
+        sh """docker build -t ${imageName} \\
+            --build-arg BUILD_PLATFORM="${baseImage}" \\
+            --build-arg GIT_COMMIT="${scmvar.GIT_COMMIT.take(7)}" \\
+            .
+        """
+    }
+    sh "mkdir -p dist"
+    sh "docker save ${imageName} -o dist/docker-compose-${baseImage}.tar"
+    stash name: "compose-${baseImage}", includes: "dist/docker-compose-${baseImage}.tar"
+}
+
+def pushRuntimeImage(baseImage) {
+    unstash "compose-${baseImage}"
+    sh 'echo -n "${DOCKERHUB_CREDS_PSW}" | docker login --username "${DOCKERHUB_CREDS_USR}" --password-stdin'
+    sh "docker load -i dist/docker-compose-${baseImage}.tar"
+    withDockerRegistry(credentialsId: 'dockerbuildbot-hub.docker.com') {
+        sh "docker push docker/compose:${baseImage}-${env.TAG_NAME}"
+        if (baseImage == "alpine" && env.TAG_NAME != null) {
+            sh "docker tag docker/compose:alpine-${env.TAG_NAME} docker/compose:${env.TAG_NAME}"
+            sh "docker push docker/compose:${env.TAG_NAME}"
+        }
+    }
+}
+
+def githubRelease() {
+    withCredentials([string(credentialsId: 'github-compose-release-test-token', variable: 'GITHUB_TOKEN')]) {
+        def prerelease = !( env.TAG_NAME ==~ /v[0-9\.]+/ )
+        changelog = readFile "CHANGELOG.md"
+        def data = """{
+            \"tag_name\": \"${env.TAG_NAME}\",
+            \"name\": \"${env.TAG_NAME}\",
+            \"draft\": true,
+            \"prerelease\": ${prerelease},
+            \"body\" : \"${changelog}\"
+        """
+        echo $data
+
+        def url = "https://api.github.com/repos/docker/compose/releases"
+        def upload_url = sh(returnStdout: true, script: """
+            curl -sSf -H 'Authorization: token ${GITHUB_TOKEN}' -H 'Accept: application/json' -H 'Content-type: application/json' -X POST -d '$data' $url") \\
+            | jq '.upload_url | .[:rindex("{")]'
+        """)
+        sh("""
+            for f in * ; do
+                curl -sf -H 'Authorization: token ${GITHUB_TOKEN}' -H 'Accept: application/json' -H 'Content-type: application/octet-stream' \\
+                -X POST --data-binary @\$f ${upload_url}?name=\$f;
+            done
+        """)
+    }
+}
+
+def checksum(filepath) {
+    if (isUnix()) {
+        sh "openssl sha256 -r -out ${filepath}.sha256 ${filepath}"
+    } else {
+        powershell "(Get-FileHash -Path ${filepath} -Algorithm SHA256 | % hash) + ' *${filepath}' > ${filepath}.sha256"
+    }
+}

+ 0 - 24
appveyor.yml

@@ -1,24 +0,0 @@
-
-version: '{branch}-{build}'
-
-install:
-  - "SET PATH=C:\\Python37-x64;C:\\Python37-x64\\Scripts;%PATH%"
-  - "python --version"
-  - "pip install tox==2.9.1 virtualenv==16.2.0"
-
-# Build the binary after tests
-build: false
-
-test_script:
-  - "tox -e py27,py37 -- tests/unit"
-  - ps: ".\\script\\build\\windows.ps1"
-
-artifacts:
-  - path: .\dist\docker-compose-Windows-x86_64.exe
-    name: "Compose Windows binary"
-
-deploy:
-  - provider: Environment
-    name: master-builds
-    on:
-      branch: master

+ 1 - 1
compose/__init__.py

@@ -1,4 +1,4 @@
 from __future__ import absolute_import
 from __future__ import absolute_import
 from __future__ import unicode_literals
 from __future__ import unicode_literals
 
 
-__version__ = '1.25.1'
+__version__ = '1.25.2-rc1'

+ 0 - 275
compose/bundle.py

@@ -1,275 +0,0 @@
-from __future__ import absolute_import
-from __future__ import unicode_literals
-
-import json
-import logging
-
-import six
-from docker.utils import split_command
-from docker.utils.ports import split_port
-
-from .cli.errors import UserError
-from .config.serialize import denormalize_config
-from .network import get_network_defs_for_service
-from .service import format_environment
-from .service import NoSuchImageError
-from .service import parse_repository_tag
-
-
-log = logging.getLogger(__name__)
-
-
-SERVICE_KEYS = {
-    'working_dir': 'WorkingDir',
-    'user': 'User',
-    'labels': 'Labels',
-}
-
-IGNORED_KEYS = {'build'}
-
-SUPPORTED_KEYS = {
-    'image',
-    'ports',
-    'expose',
-    'networks',
-    'command',
-    'environment',
-    'entrypoint',
-} | set(SERVICE_KEYS)
-
-VERSION = '0.1'
-
-
-class NeedsPush(Exception):
-    def __init__(self, image_name):
-        self.image_name = image_name
-
-
-class NeedsPull(Exception):
-    def __init__(self, image_name, service_name):
-        self.image_name = image_name
-        self.service_name = service_name
-
-
-class MissingDigests(Exception):
-    def __init__(self, needs_push, needs_pull):
-        self.needs_push = needs_push
-        self.needs_pull = needs_pull
-
-
-def serialize_bundle(config, image_digests):
-    return json.dumps(to_bundle(config, image_digests), indent=2, sort_keys=True)
-
-
-def get_image_digests(project, allow_push=False):
-    digests = {}
-    needs_push = set()
-    needs_pull = set()
-
-    for service in project.services:
-        try:
-            digests[service.name] = get_image_digest(
-                service,
-                allow_push=allow_push,
-            )
-        except NeedsPush as e:
-            needs_push.add(e.image_name)
-        except NeedsPull as e:
-            needs_pull.add(e.service_name)
-
-    if needs_push or needs_pull:
-        raise MissingDigests(needs_push, needs_pull)
-
-    return digests
-
-
-def get_image_digest(service, allow_push=False):
-    if 'image' not in service.options:
-        raise UserError(
-            "Service '{s.name}' doesn't define an image tag. An image name is "
-            "required to generate a proper image digest for the bundle. Specify "
-            "an image repo and tag with the 'image' option.".format(s=service))
-
-    _, _, separator = parse_repository_tag(service.options['image'])
-    # Compose file already uses a digest, no lookup required
-    if separator == '@':
-        return service.options['image']
-
-    digest = get_digest(service)
-
-    if digest:
-        return digest
-
-    if 'build' not in service.options:
-        raise NeedsPull(service.image_name, service.name)
-
-    if not allow_push:
-        raise NeedsPush(service.image_name)
-
-    return push_image(service)
-
-
-def get_digest(service):
-    digest = None
-    try:
-        image = service.image()
-        # TODO: pick a digest based on the image tag if there are multiple
-        # digests
-        if image['RepoDigests']:
-            digest = image['RepoDigests'][0]
-    except NoSuchImageError:
-        try:
-            # Fetch the image digest from the registry
-            distribution = service.get_image_registry_data()
-
-            if distribution['Descriptor']['digest']:
-                digest = '{image_name}@{digest}'.format(
-                    image_name=service.image_name,
-                    digest=distribution['Descriptor']['digest']
-                )
-        except NoSuchImageError:
-            raise UserError(
-                "Digest not found for service '{service}'. "
-                "Repository does not exist or may require 'docker login'"
-                .format(service=service.name))
-    return digest
-
-
-def push_image(service):
-    try:
-        digest = service.push()
-    except Exception:
-        log.error(
-            "Failed to push image for service '{s.name}'. Please use an "
-            "image tag that can be pushed to a Docker "
-            "registry.".format(s=service))
-        raise
-
-    if not digest:
-        raise ValueError("Failed to get digest for %s" % service.name)
-
-    repo, _, _ = parse_repository_tag(service.options['image'])
-    identifier = '{repo}@{digest}'.format(repo=repo, digest=digest)
-
-    # only do this if RepoDigests isn't already populated
-    image = service.image()
-    if not image['RepoDigests']:
-        # Pull by digest so that image['RepoDigests'] is populated for next time
-        # and we don't have to pull/push again
-        service.client.pull(identifier)
-        log.info("Stored digest for {}".format(service.image_name))
-
-    return identifier
-
-
-def to_bundle(config, image_digests):
-    if config.networks:
-        log.warning("Unsupported top level key 'networks' - ignoring")
-
-    if config.volumes:
-        log.warning("Unsupported top level key 'volumes' - ignoring")
-
-    config = denormalize_config(config)
-
-    return {
-        'Version': VERSION,
-        'Services': {
-            name: convert_service_to_bundle(
-                name,
-                service_dict,
-                image_digests[name],
-            )
-            for name, service_dict in config['services'].items()
-        },
-    }
-
-
-def convert_service_to_bundle(name, service_dict, image_digest):
-    container_config = {'Image': image_digest}
-
-    for key, value in service_dict.items():
-        if key in IGNORED_KEYS:
-            continue
-
-        if key not in SUPPORTED_KEYS:
-            log.warning("Unsupported key '{}' in services.{} - ignoring".format(key, name))
-            continue
-
-        if key == 'environment':
-            container_config['Env'] = format_environment({
-                envkey: envvalue for envkey, envvalue in value.items()
-                if envvalue
-            })
-            continue
-
-        if key in SERVICE_KEYS:
-            container_config[SERVICE_KEYS[key]] = value
-            continue
-
-    set_command_and_args(
-        container_config,
-        service_dict.get('entrypoint', []),
-        service_dict.get('command', []))
-    container_config['Networks'] = make_service_networks(name, service_dict)
-
-    ports = make_port_specs(service_dict)
-    if ports:
-        container_config['Ports'] = ports
-
-    return container_config
-
-
-# See https://github.com/docker/swarmkit/blob/agent/exec/container/container.go#L95
-def set_command_and_args(config, entrypoint, command):
-    if isinstance(entrypoint, six.string_types):
-        entrypoint = split_command(entrypoint)
-    if isinstance(command, six.string_types):
-        command = split_command(command)
-
-    if entrypoint:
-        config['Command'] = entrypoint + command
-        return
-
-    if command:
-        config['Args'] = command
-
-
-def make_service_networks(name, service_dict):
-    networks = []
-
-    for network_name, network_def in get_network_defs_for_service(service_dict).items():
-        for key in network_def.keys():
-            log.warning(
-                "Unsupported key '{}' in services.{}.networks.{} - ignoring"
-                .format(key, name, network_name))
-
-        networks.append(network_name)
-
-    return networks
-
-
-def make_port_specs(service_dict):
-    ports = []
-
-    internal_ports = [
-        internal_port
-        for port_def in service_dict.get('ports', [])
-        for internal_port in split_port(port_def)[0]
-    ]
-
-    internal_ports += service_dict.get('expose', [])
-
-    for internal_port in internal_ports:
-        spec = make_port_spec(internal_port)
-        if spec not in ports:
-            ports.append(spec)
-
-    return ports
-
-
-def make_port_spec(value):
-    components = six.text_type(value).partition('/')
-    return {
-        'Protocol': components[2] or 'tcp',
-        'Port': int(components[0]),
-    }

+ 16 - 4
compose/cli/command.py

@@ -40,7 +40,8 @@ SILENT_COMMANDS = {
 }
 }
 
 
 
 
-def project_from_options(project_dir, options, additional_options={}):
+def project_from_options(project_dir, options, additional_options=None):
+    additional_options = additional_options or {}
     override_dir = options.get('--project-directory')
     override_dir = options.get('--project-directory')
     environment_file = options.get('--env-file')
     environment_file = options.get('--env-file')
     environment = Environment.from_env_file(override_dir or project_dir, environment_file)
     environment = Environment.from_env_file(override_dir or project_dir, environment_file)
@@ -59,7 +60,7 @@ def project_from_options(project_dir, options, additional_options={}):
         tls_config=tls_config_from_options(options, environment),
         tls_config=tls_config_from_options(options, environment),
         environment=environment,
         environment=environment,
         override_dir=override_dir,
         override_dir=override_dir,
-        compatibility=options.get('--compatibility'),
+        compatibility=compatibility_from_options(project_dir, options, environment),
         interpolate=(not additional_options.get('--no-interpolate')),
         interpolate=(not additional_options.get('--no-interpolate')),
         environment_file=environment_file
         environment_file=environment_file
     )
     )
@@ -81,7 +82,8 @@ def set_parallel_limit(environment):
         parallel.GlobalLimit.set_global_limit(parallel_limit)
         parallel.GlobalLimit.set_global_limit(parallel_limit)
 
 
 
 
-def get_config_from_options(base_dir, options, additional_options={}):
+def get_config_from_options(base_dir, options, additional_options=None):
+    additional_options = additional_options or {}
     override_dir = options.get('--project-directory')
     override_dir = options.get('--project-directory')
     environment_file = options.get('--env-file')
     environment_file = options.get('--env-file')
     environment = Environment.from_env_file(override_dir or base_dir, environment_file)
     environment = Environment.from_env_file(override_dir or base_dir, environment_file)
@@ -90,7 +92,7 @@ def get_config_from_options(base_dir, options, additional_options={}):
     )
     )
     return config.load(
     return config.load(
         config.find(base_dir, config_path, environment, override_dir),
         config.find(base_dir, config_path, environment, override_dir),
-        options.get('--compatibility'),
+        compatibility_from_options(config_path, options, environment),
         not additional_options.get('--no-interpolate')
         not additional_options.get('--no-interpolate')
     )
     )
 
 
@@ -198,3 +200,13 @@ def get_project_name(working_dir, project_name=None, environment=None):
         return normalize_name(project)
         return normalize_name(project)
 
 
     return 'default'
     return 'default'
+
+
+def compatibility_from_options(working_dir, options=None, environment=None):
+    """Get compose v3 compatibility from --compatibility option
+       or from COMPOSE_COMPATIBILITY environment variable."""
+
+    compatibility_option = options.get('--compatibility')
+    compatibility_environment = environment.get_boolean('COMPOSE_COMPATIBILITY')
+
+    return compatibility_option or compatibility_environment

+ 6 - 1
compose/cli/formatter.py

@@ -17,7 +17,12 @@ else:
 
 
 def get_tty_width():
 def get_tty_width():
     try:
     try:
-        width, _ = get_terminal_size()
+        # get_terminal_size can't determine the size if compose is piped
+        # to another command. But in such case it doesn't make sense to
+        # try format the output by terminal size as this output is consumed
+        # by another command. So let's pretend we have a huge terminal so
+        # output is single-lined
+        width, _ = get_terminal_size(fallback=(999, 0))
         return int(width)
         return int(width)
     except OSError:
     except OSError:
         return 0
         return 0

+ 19 - 48
compose/cli/main.py

@@ -15,14 +15,12 @@ from distutils.spawn import find_executable
 from inspect import getdoc
 from inspect import getdoc
 from operator import attrgetter
 from operator import attrgetter
 
 
-import docker
+import docker.errors
+import docker.utils
 
 
 from . import errors
 from . import errors
 from . import signals
 from . import signals
 from .. import __version__
 from .. import __version__
-from ..bundle import get_image_digests
-from ..bundle import MissingDigests
-from ..bundle import serialize_bundle
 from ..config import ConfigurationError
 from ..config import ConfigurationError
 from ..config import parse_environment
 from ..config import parse_environment
 from ..config import parse_labels
 from ..config import parse_labels
@@ -34,6 +32,8 @@ from ..const import COMPOSEFILE_V2_2 as V2_2
 from ..const import IS_WINDOWS_PLATFORM
 from ..const import IS_WINDOWS_PLATFORM
 from ..errors import StreamParseError
 from ..errors import StreamParseError
 from ..progress_stream import StreamOutputError
 from ..progress_stream import StreamOutputError
+from ..project import get_image_digests
+from ..project import MissingDigests
 from ..project import NoSuchService
 from ..project import NoSuchService
 from ..project import OneOffFilter
 from ..project import OneOffFilter
 from ..project import ProjectError
 from ..project import ProjectError
@@ -213,7 +213,6 @@ class TopLevelCommand(object):
 
 
     Commands:
     Commands:
       build              Build or rebuild services
       build              Build or rebuild services
-      bundle             Generate a Docker bundle from the Compose file
       config             Validate and view the Compose file
       config             Validate and view the Compose file
       create             Create services
       create             Create services
       down               Stop and remove containers, networks, images, and volumes
       down               Stop and remove containers, networks, images, and volumes
@@ -304,38 +303,6 @@ class TopLevelCommand(object):
             progress=options.get('--progress'),
             progress=options.get('--progress'),
         )
         )
 
 
-    def bundle(self, options):
-        """
-        Generate a Distributed Application Bundle (DAB) from the Compose file.
-
-        Images must have digests stored, which requires interaction with a
-        Docker registry. If digests aren't stored for all images, you can fetch
-        them with `docker-compose pull` or `docker-compose push`. To push images
-        automatically when bundling, pass `--push-images`. Only services with
-        a `build` option specified will have their images pushed.
-
-        Usage: bundle [options]
-
-        Options:
-            --push-images              Automatically push images for any services
-                                       which have a `build` option specified.
-
-            -o, --output PATH          Path to write the bundle file to.
-                                       Defaults to "<project name>.dab".
-        """
-        compose_config = get_config_from_options('.', self.toplevel_options)
-
-        output = options["--output"]
-        if not output:
-            output = "{}.dab".format(self.project.name)
-
-        image_digests = image_digests_for_project(self.project, options['--push-images'])
-
-        with open(output, 'w') as f:
-            f.write(serialize_bundle(compose_config, image_digests))
-
-        log.info("Wrote bundle to {}".format(output))
-
     def config(self, options):
     def config(self, options):
         """
         """
         Validate and view the Compose file.
         Validate and view the Compose file.
@@ -1045,6 +1012,7 @@ class TopLevelCommand(object):
             --build                    Build images before starting containers.
             --build                    Build images before starting containers.
             --abort-on-container-exit  Stops all containers if any container was
             --abort-on-container-exit  Stops all containers if any container was
                                        stopped. Incompatible with -d.
                                        stopped. Incompatible with -d.
+            --attach-dependencies      Attach to dependent containers
             -t, --timeout TIMEOUT      Use this timeout in seconds for container
             -t, --timeout TIMEOUT      Use this timeout in seconds for container
                                        shutdown when attached or when containers are
                                        shutdown when attached or when containers are
                                        already running. (default: 10)
                                        already running. (default: 10)
@@ -1066,16 +1034,18 @@ class TopLevelCommand(object):
         remove_orphans = options['--remove-orphans']
         remove_orphans = options['--remove-orphans']
         detached = options.get('--detach')
         detached = options.get('--detach')
         no_start = options.get('--no-start')
         no_start = options.get('--no-start')
+        attach_dependencies = options.get('--attach-dependencies')
 
 
-        if detached and (cascade_stop or exit_value_from):
-            raise UserError("--abort-on-container-exit and -d cannot be combined.")
+        if detached and (cascade_stop or exit_value_from or attach_dependencies):
+            raise UserError(
+                "-d cannot be combined with --abort-on-container-exit or --attach-dependencies.")
 
 
         ignore_orphans = self.toplevel_environment.get_boolean('COMPOSE_IGNORE_ORPHANS')
         ignore_orphans = self.toplevel_environment.get_boolean('COMPOSE_IGNORE_ORPHANS')
 
 
         if ignore_orphans and remove_orphans:
         if ignore_orphans and remove_orphans:
             raise UserError("COMPOSE_IGNORE_ORPHANS and --remove-orphans cannot be combined.")
             raise UserError("COMPOSE_IGNORE_ORPHANS and --remove-orphans cannot be combined.")
 
 
-        opts = ['--detach', '--abort-on-container-exit', '--exit-code-from']
+        opts = ['--detach', '--abort-on-container-exit', '--exit-code-from', '--attach-dependencies']
         for excluded in [x for x in opts if options.get(x) and no_start]:
         for excluded in [x for x in opts if options.get(x) and no_start]:
             raise UserError('--no-start and {} cannot be combined.'.format(excluded))
             raise UserError('--no-start and {} cannot be combined.'.format(excluded))
 
 
@@ -1120,7 +1090,10 @@ class TopLevelCommand(object):
             if detached or no_start:
             if detached or no_start:
                 return
                 return
 
 
-            attached_containers = filter_containers_to_service_names(to_attach, service_names)
+            attached_containers = filter_attached_containers(
+                to_attach,
+                service_names,
+                attach_dependencies)
 
 
             log_printer = log_printer_from_project(
             log_printer = log_printer_from_project(
                 self.project,
                 self.project,
@@ -1216,12 +1189,10 @@ def timeout_from_opts(options):
     return None if timeout is None else int(timeout)
     return None if timeout is None else int(timeout)
 
 
 
 
-def image_digests_for_project(project, allow_push=False):
+def image_digests_for_project(project):
     try:
     try:
-        return get_image_digests(
-            project,
-            allow_push=allow_push
-        )
+        return get_image_digests(project)
+
     except MissingDigests as e:
     except MissingDigests as e:
         def list_images(images):
         def list_images(images):
             return "\n".join("    {}".format(name) for name in sorted(images))
             return "\n".join("    {}".format(name) for name in sorted(images))
@@ -1427,8 +1398,8 @@ def log_printer_from_project(
         log_args=log_args)
         log_args=log_args)
 
 
 
 
-def filter_containers_to_service_names(containers, service_names):
-    if not service_names:
+def filter_attached_containers(containers, service_names, attach_dependencies=False):
+    if attach_dependencies or not service_names:
         return containers
         return containers
 
 
     return [
     return [

+ 7 - 0
compose/config/config.py

@@ -5,6 +5,7 @@ import functools
 import io
 import io
 import logging
 import logging
 import os
 import os
+import re
 import string
 import string
 import sys
 import sys
 from collections import namedtuple
 from collections import namedtuple
@@ -214,6 +215,12 @@ class ConfigFile(namedtuple('_ConfigFile', 'filename config')):
                 .format(self.filename, VERSION_EXPLANATION)
                 .format(self.filename, VERSION_EXPLANATION)
             )
             )
 
 
+        version_pattern = re.compile(r"^[2-9]+(\.\d+)?$")
+        if not version_pattern.match(version):
+            raise ConfigurationError(
+                'Version "{}" in "{}" is invalid.'
+                .format(version, self.filename))
+
         if version == '2':
         if version == '2':
             return const.COMPOSEFILE_V2_0
             return const.COMPOSEFILE_V2_0
 
 

+ 90 - 2
compose/project.py

@@ -16,6 +16,7 @@ from docker.errors import NotFound
 from docker.utils import version_lt
 from docker.utils import version_lt
 
 
 from . import parallel
 from . import parallel
+from .cli.errors import UserError
 from .config import ConfigurationError
 from .config import ConfigurationError
 from .config.config import V1
 from .config.config import V1
 from .config.sort_services import get_container_name_from_network_mode
 from .config.sort_services import get_container_name_from_network_mode
@@ -33,6 +34,7 @@ from .service import ContainerNetworkMode
 from .service import ContainerPidMode
 from .service import ContainerPidMode
 from .service import ConvergenceStrategy
 from .service import ConvergenceStrategy
 from .service import NetworkMode
 from .service import NetworkMode
+from .service import NoSuchImageError
 from .service import parse_repository_tag
 from .service import parse_repository_tag
 from .service import PidMode
 from .service import PidMode
 from .service import Service
 from .service import Service
@@ -42,7 +44,6 @@ from .utils import microseconds_from_time_nano
 from .utils import truncate_string
 from .utils import truncate_string
 from .volume import ProjectVolumes
 from .volume import ProjectVolumes
 
 
-
 log = logging.getLogger(__name__)
 log = logging.getLogger(__name__)
 
 
 
 
@@ -86,10 +87,11 @@ class Project(object):
         return labels
         return labels
 
 
     @classmethod
     @classmethod
-    def from_config(cls, name, config_data, client, default_platform=None, extra_labels=[]):
+    def from_config(cls, name, config_data, client, default_platform=None, extra_labels=None):
         """
         """
         Construct a Project from a config.Config object.
         Construct a Project from a config.Config object.
         """
         """
+        extra_labels = extra_labels or []
         use_networking = (config_data.version and config_data.version != V1)
         use_networking = (config_data.version and config_data.version != V1)
         networks = build_networks(name, config_data, client)
         networks = build_networks(name, config_data, client)
         project_networks = ProjectNetworks.from_services(
         project_networks = ProjectNetworks.from_services(
@@ -381,6 +383,7 @@ class Project(object):
 
 
         def build_service(service):
         def build_service(service):
             service.build(no_cache, pull, force_rm, memory, build_args, gzip, rm, silent, cli, progress)
             service.build(no_cache, pull, force_rm, memory, build_args, gzip, rm, silent, cli, progress)
+
         if parallel_build:
         if parallel_build:
             _, errors = parallel.parallel_execute(
             _, errors = parallel.parallel_execute(
                 services,
                 services,
@@ -844,6 +847,91 @@ def get_secrets(service, service_secrets, secret_defs):
     return secrets
     return secrets
 
 
 
 
+def get_image_digests(project):
+    digests = {}
+    needs_push = set()
+    needs_pull = set()
+
+    for service in project.services:
+        try:
+            digests[service.name] = get_image_digest(service)
+        except NeedsPush as e:
+            needs_push.add(e.image_name)
+        except NeedsPull as e:
+            needs_pull.add(e.service_name)
+
+    if needs_push or needs_pull:
+        raise MissingDigests(needs_push, needs_pull)
+
+    return digests
+
+
+def get_image_digest(service):
+    if 'image' not in service.options:
+        raise UserError(
+            "Service '{s.name}' doesn't define an image tag. An image name is "
+            "required to generate a proper image digest. Specify an image repo "
+            "and tag with the 'image' option.".format(s=service))
+
+    _, _, separator = parse_repository_tag(service.options['image'])
+    # Compose file already uses a digest, no lookup required
+    if separator == '@':
+        return service.options['image']
+
+    digest = get_digest(service)
+
+    if digest:
+        return digest
+
+    if 'build' not in service.options:
+        raise NeedsPull(service.image_name, service.name)
+
+    raise NeedsPush(service.image_name)
+
+
+def get_digest(service):
+    digest = None
+    try:
+        image = service.image()
+        # TODO: pick a digest based on the image tag if there are multiple
+        # digests
+        if image['RepoDigests']:
+            digest = image['RepoDigests'][0]
+    except NoSuchImageError:
+        try:
+            # Fetch the image digest from the registry
+            distribution = service.get_image_registry_data()
+
+            if distribution['Descriptor']['digest']:
+                digest = '{image_name}@{digest}'.format(
+                    image_name=service.image_name,
+                    digest=distribution['Descriptor']['digest']
+                )
+        except NoSuchImageError:
+            raise UserError(
+                "Digest not found for service '{service}'. "
+                "Repository does not exist or may require 'docker login'"
+                .format(service=service.name))
+    return digest
+
+
+class MissingDigests(Exception):
+    def __init__(self, needs_push, needs_pull):
+        self.needs_push = needs_push
+        self.needs_pull = needs_pull
+
+
+class NeedsPush(Exception):
+    def __init__(self, image_name):
+        self.image_name = image_name
+
+
+class NeedsPull(Exception):
+    def __init__(self, image_name, service_name):
+        self.image_name = image_name
+        self.service_name = service_name
+
+
 class NoSuchService(Exception):
 class NoSuchService(Exception):
     def __init__(self, name):
     def __init__(self, name):
         if isinstance(name, six.binary_type):
         if isinstance(name, six.binary_type):

+ 2 - 2
compose/service.py

@@ -185,7 +185,7 @@ class Service(object):
             scale=1,
             scale=1,
             pid_mode=None,
             pid_mode=None,
             default_platform=None,
             default_platform=None,
-            extra_labels=[],
+            extra_labels=None,
             **options
             **options
     ):
     ):
         self.name = name
         self.name = name
@@ -201,7 +201,7 @@ class Service(object):
         self.scale_num = scale
         self.scale_num = scale
         self.default_platform = default_platform
         self.default_platform = default_platform
         self.options = options
         self.options = options
-        self.extra_labels = extra_labels
+        self.extra_labels = extra_labels or []
 
 
     def __repr__(self):
     def __repr__(self):
         return '<Service: {}>'.format(self.name)
         return '<Service: {}>'.format(self.name)

+ 1 - 14
contrib/completion/bash/docker-compose

@@ -126,18 +126,6 @@ _docker_compose_build() {
 }
 }
 
 
 
 
-_docker_compose_bundle() {
-	case "$prev" in
-		--output|-o)
-			_filedir
-			return
-			;;
-	esac
-
-	COMPREPLY=( $( compgen -W "--push-images --help --output -o" -- "$cur" ) )
-}
-
-
 _docker_compose_config() {
 _docker_compose_config() {
 	case "$prev" in
 	case "$prev" in
 		--hash)
 		--hash)
@@ -557,7 +545,7 @@ _docker_compose_up() {
 
 
 	case "$cur" in
 	case "$cur" in
 		-*)
 		-*)
-			COMPREPLY=( $( compgen -W "--abort-on-container-exit --always-recreate-deps --build -d --detach --exit-code-from --force-recreate --help --no-build --no-color --no-deps --no-recreate --no-start --renew-anon-volumes -V --remove-orphans --scale --timeout -t" -- "$cur" ) )
+			COMPREPLY=( $( compgen -W "--abort-on-container-exit --always-recreate-deps --attach-dependencies --build -d --detach --exit-code-from --force-recreate --help --no-build --no-color --no-deps --no-recreate --no-start --renew-anon-volumes -V --remove-orphans --scale --timeout -t" -- "$cur" ) )
 			;;
 			;;
 		*)
 		*)
 			__docker_compose_complete_services
 			__docker_compose_complete_services
@@ -581,7 +569,6 @@ _docker_compose() {
 
 
 	local commands=(
 	local commands=(
 		build
 		build
-		bundle
 		config
 		config
 		create
 		create
 		down
 		down

+ 2 - 7
contrib/completion/zsh/_docker-compose

@@ -121,12 +121,6 @@ __docker-compose_subcommand() {
                 '--parallel[Build images in parallel.]' \
                 '--parallel[Build images in parallel.]' \
                 '*:services:__docker-compose_services_from_build' && ret=0
                 '*:services:__docker-compose_services_from_build' && ret=0
             ;;
             ;;
-        (bundle)
-            _arguments \
-                $opts_help \
-                '--push-images[Automatically push images for any services which have a `build` option specified.]' \
-                '(--output -o)'{--output,-o}'[Path to write the bundle file to. Defaults to "<project name>.dab".]:file:_files' && ret=0
-            ;;
         (config)
         (config)
             _arguments \
             _arguments \
                 $opts_help \
                 $opts_help \
@@ -290,7 +284,7 @@ __docker-compose_subcommand() {
         (up)
         (up)
             _arguments \
             _arguments \
                 $opts_help \
                 $opts_help \
-                '(--abort-on-container-exit)-d[Detached mode: Run containers in the background, print new container names. Incompatible with --abort-on-container-exit.]' \
+                '(--abort-on-container-exit)-d[Detached mode: Run containers in the background, print new container names. Incompatible with --abort-on-container-exit and --attach-dependencies.]' \
                 $opts_no_color \
                 $opts_no_color \
                 $opts_no_deps \
                 $opts_no_deps \
                 $opts_force_recreate \
                 $opts_force_recreate \
@@ -298,6 +292,7 @@ __docker-compose_subcommand() {
                 $opts_no_build \
                 $opts_no_build \
                 "(--no-build)--build[Build images before starting containers.]" \
                 "(--no-build)--build[Build images before starting containers.]" \
                 "(-d)--abort-on-container-exit[Stops all containers if any container was stopped. Incompatible with -d.]" \
                 "(-d)--abort-on-container-exit[Stops all containers if any container was stopped. Incompatible with -d.]" \
+                "(-d)--attach-dependencies[Attach to dependent containers. Incompatible with -d.]" \
                 '(-t --timeout)'{-t,--timeout}"[Use this timeout in seconds for container shutdown when attached or when containers are already running. (default: 10)]:seconds: " \
                 '(-t --timeout)'{-t,--timeout}"[Use this timeout in seconds for container shutdown when attached or when containers are already running. (default: 10)]:seconds: " \
                 '--scale[SERVICE=NUM Scale SERVICE to NUM instances. Overrides the `scale` setting in the Compose file if present.]:service scale SERVICE=NUM: ' \
                 '--scale[SERVICE=NUM Scale SERVICE to NUM instances. Overrides the `scale` setting in the Compose file if present.]:service scale SERVICE=NUM: ' \
                 '--exit-code-from=[Return the exit code of the selected service container. Implies --abort-on-container-exit]:service:__docker-compose_services' \
                 '--exit-code-from=[Return the exit code of the selected service container. Implies --abort-on-container-exit]:service:__docker-compose_services' \

+ 1 - 1
requirements-build.txt

@@ -1 +1 @@
-pyinstaller==3.5
+pyinstaller==3.6

+ 4 - 3
requirements-dev.txt

@@ -1,6 +1,7 @@
-coverage==4.5.4
-ddt==1.2.0
+coverage==5.0.3
+ddt==1.2.2
 flake8==3.7.9
 flake8==3.7.9
 mock==3.0.5
 mock==3.0.5
-pytest==3.6.3
+pytest==5.3.2; python_version >= '3.5'
+pytest==4.6.5; python_version < '3.5'
 pytest-cov==2.8.1
 pytest-cov==2.8.1

+ 12 - 11
requirements.txt

@@ -1,25 +1,26 @@
 backports.shutil_get_terminal_size==1.0.0
 backports.shutil_get_terminal_size==1.0.0
 backports.ssl-match-hostname==3.5.0.1; python_version < '3'
 backports.ssl-match-hostname==3.5.0.1; python_version < '3'
-cached-property==1.3.0
-certifi==2017.4.17
+cached-property==1.5.1
+certifi==2019.11.28
 chardet==3.0.4
 chardet==3.0.4
-colorama==0.4.0; sys_platform == 'win32'
+colorama==0.4.3; sys_platform == 'win32'
 docker==4.1.0
 docker==4.1.0
 docker-pycreds==0.4.0
 docker-pycreds==0.4.0
 dockerpty==0.4.1
 dockerpty==0.4.1
 docopt==0.6.2
 docopt==0.6.2
 enum34==1.1.6; python_version < '3.4'
 enum34==1.1.6; python_version < '3.4'
 functools32==3.2.3.post2; python_version < '3.2'
 functools32==3.2.3.post2; python_version < '3.2'
-idna==2.5
-ipaddress==1.0.18
-jsonschema==3.0.1
-paramiko==2.6.0
+idna==2.8
+ipaddress==1.0.23
+jsonschema==3.2.0
+paramiko==2.7.1
 pypiwin32==219; sys_platform == 'win32' and python_version < '3.6'
 pypiwin32==219; sys_platform == 'win32' and python_version < '3.6'
 pypiwin32==223; sys_platform == 'win32' and python_version >= '3.6'
 pypiwin32==223; sys_platform == 'win32' and python_version >= '3.6'
-PySocks==1.6.7
-PyYAML==4.2b1
+PySocks==1.7.1
+PyYAML==5.3
 requests==2.22.0
 requests==2.22.0
 six==1.12.0
 six==1.12.0
+subprocess32==3.5.4; python_version < '3.2'
 texttable==1.6.2
 texttable==1.6.2
-urllib3==1.24.2; python_version == '3.3'
-websocket-client==0.32.0
+urllib3==1.25.7; python_version == '3.3'
+websocket-client==0.57.0

+ 0 - 20
script/Jenkinsfile.fossa

@@ -1,20 +0,0 @@
-pipeline {
-    agent any
-    stages {
-        stage("License Scan") {
-            agent {
-                label 'ubuntu-1604-aufs-edge'
-            }
-
-            steps {
-                withCredentials([
-                    string(credentialsId: 'fossa-api-key', variable: 'FOSSA_API_KEY')
-                ]) {
-                    checkout scm
-                    sh "FOSSA_API_KEY='${FOSSA_API_KEY}' BRANCH_NAME='${env.BRANCH_NAME}' make -f script/fossa.mk fossa-analyze"
-                    sh "FOSSA_API_KEY='${FOSSA_API_KEY}' make -f script/fossa.mk fossa-test"
-                }
-            }
-        }
-    }
-}

+ 1 - 1
script/build/linux-entrypoint

@@ -24,7 +24,7 @@ if [ ! -z "${BUILD_BOOTLOADER}" ]; then
     git clone --single-branch --branch develop https://github.com/pyinstaller/pyinstaller.git /tmp/pyinstaller
     git clone --single-branch --branch develop https://github.com/pyinstaller/pyinstaller.git /tmp/pyinstaller
     cd /tmp/pyinstaller/bootloader
     cd /tmp/pyinstaller/bootloader
     # Checkout commit corresponding to version in requirements-build
     # Checkout commit corresponding to version in requirements-build
-    git checkout v3.5
+    git checkout v3.6
     "${VENV}"/bin/python3 ./waf configure --no-lsb all
     "${VENV}"/bin/python3 ./waf configure --no-lsb all
     "${VENV}"/bin/pip3 install ..
     "${VENV}"/bin/pip3 install ..
     cd "${CODE_PATH}"
     cd "${CODE_PATH}"

+ 0 - 16
script/fossa.mk

@@ -1,16 +0,0 @@
-# Variables for Fossa
-BUILD_ANALYZER?=docker/fossa-analyzer
-FOSSA_OPTS?=--option all-tags:true --option allow-unresolved:true
-
-fossa-analyze:
-	docker run --rm -e FOSSA_API_KEY=$(FOSSA_API_KEY) \
-		-v $(CURDIR)/$*:/go/src/github.com/docker/compose \
-		-w /go/src/github.com/docker/compose \
-		$(BUILD_ANALYZER) analyze ${FOSSA_OPTS} --branch ${BRANCH_NAME}
-
- # This command is used to run the fossa test command
-fossa-test:
-	docker run -i -e FOSSA_API_KEY=$(FOSSA_API_KEY) \
-		-v $(CURDIR)/$*:/go/src/github.com/docker/compose \
-		-w /go/src/github.com/docker/compose \
-		$(BUILD_ANALYZER) test

+ 39 - 0
script/release/generate_changelog.sh

@@ -0,0 +1,39 @@
+#!/bin/bash
+
+set -e
+set -x
+
+## Usage :
+## changelog PREVIOUS_TAG..HEAD
+
+# configure refs so we get pull-requests metadata
+git config --add remote.origin.fetch +refs/pull/*/head:refs/remotes/origin/pull/*
+git fetch origin
+
+RANGE=${1:-"$(git describe --tags  --abbrev=0)..HEAD"}
+echo "Generate changelog for range ${RANGE}"
+echo
+
+pullrequests() {
+    for commit in $(git log ${RANGE} --format='format:%H'); do
+        # Get the oldest remotes/origin/pull/* branch to include this commit, i.e. the one to introduce it
+        git branch -a --sort=committerdate  --contains $commit --list 'origin/pull/*' | head -1 | cut -d'/' -f4
+    done
+}
+
+changes=$(pullrequests | uniq)
+
+echo "pull requests merged within range:"
+echo $changes
+
+echo '#Features' > CHANGELOG.md
+for pr in $changes; do
+    curl -fs -H "Authorization: token ${GITHUB_TOKEN}" https://api.github.com/repos/docker/compose/pulls/${pr} \
+    | jq -r ' select( .labels[].name | contains("kind/feature") ) | "* "+.title' >> CHANGELOG.md
+done
+
+echo '#Bugs' >> CHANGELOG.md
+for pr in $changes; do
+    curl -fs -H "Authorization: token ${GITHUB_TOKEN}" https://api.github.com/repos/docker/compose/pulls/${pr} \
+    | jq -r ' select( .labels[].name | contains("kind/bug") ) | "* "+.title' >> CHANGELOG.md
+done

+ 1 - 1
script/run/run.sh

@@ -15,7 +15,7 @@
 
 
 set -e
 set -e
 
 
-VERSION="1.25.1"
+VERSION="1.25.2-rc1"
 IMAGE="docker/compose:$VERSION"
 IMAGE="docker/compose:$VERSION"
 
 
 
 

+ 4 - 4
script/setup/osx

@@ -13,13 +13,13 @@ if ! [ ${DEPLOYMENT_TARGET} == "$(macos_version)" ]; then
   SDK_SHA1=dd228a335194e3392f1904ce49aff1b1da26ca62
   SDK_SHA1=dd228a335194e3392f1904ce49aff1b1da26ca62
 fi
 fi
 
 
-OPENSSL_VERSION=1.1.1c
+OPENSSL_VERSION=1.1.1d
 OPENSSL_URL=https://www.openssl.org/source/openssl-${OPENSSL_VERSION}.tar.gz
 OPENSSL_URL=https://www.openssl.org/source/openssl-${OPENSSL_VERSION}.tar.gz
-OPENSSL_SHA1=71b830a077276cbeccc994369538617a21bee808
+OPENSSL_SHA1=056057782325134b76d1931c48f2c7e6595d7ef4
 
 
-PYTHON_VERSION=3.7.4
+PYTHON_VERSION=3.7.5
 PYTHON_URL=https://www.python.org/ftp/python/${PYTHON_VERSION}/Python-${PYTHON_VERSION}.tgz
 PYTHON_URL=https://www.python.org/ftp/python/${PYTHON_VERSION}/Python-${PYTHON_VERSION}.tgz
-PYTHON_SHA1=fb1d764be8a9dcd40f2f152a610a0ab04e0d0ed3
+PYTHON_SHA1=8b0311d4cca19f0ea9181731189fa33c9f5aedf9
 
 
 #
 #
 # Install prerequisites.
 # Install prerequisites.

+ 1 - 1
setup.py

@@ -32,7 +32,7 @@ def find_version(*file_paths):
 install_requires = [
 install_requires = [
     'cached-property >= 1.2.0, < 2',
     'cached-property >= 1.2.0, < 2',
     'docopt >= 0.6.1, < 1',
     'docopt >= 0.6.1, < 1',
-    'PyYAML >= 3.10, < 5',
+    'PyYAML >= 3.10, < 6',
     'requests >= 2.20.0, < 3',
     'requests >= 2.20.0, < 3',
     'texttable >= 0.9.0, < 2',
     'texttable >= 0.9.0, < 2',
     'websocket-client >= 0.32.0, < 1',
     'websocket-client >= 0.32.0, < 1',

+ 68 - 57
tests/acceptance/cli_test.py

@@ -43,6 +43,24 @@ ProcessResult = namedtuple('ProcessResult', 'stdout stderr')
 
 
 BUILD_CACHE_TEXT = 'Using cache'
 BUILD_CACHE_TEXT = 'Using cache'
 BUILD_PULL_TEXT = 'Status: Image is up to date for busybox:1.27.2'
 BUILD_PULL_TEXT = 'Status: Image is up to date for busybox:1.27.2'
+COMPOSE_COMPATIBILITY_DICT = {
+    'version': '2.3',
+    'volumes': {'foo': {'driver': 'default'}},
+    'networks': {'bar': {}},
+    'services': {
+        'foo': {
+            'command': '/bin/true',
+            'image': 'alpine:3.10.1',
+            'scale': 3,
+            'restart': 'always:7',
+            'mem_limit': '300M',
+            'mem_reservation': '100M',
+            'cpus': 0.7,
+            'volumes': ['foo:/bar:rw'],
+            'networks': {'bar': None},
+        }
+    },
+}
 
 
 
 
 def start_process(base_dir, options):
 def start_process(base_dir, options):
@@ -269,7 +287,7 @@ services:
         # assert there are no python objects encoded in the output
         # assert there are no python objects encoded in the output
         assert '!!' not in result.stdout
         assert '!!' not in result.stdout
 
 
-        output = yaml.load(result.stdout)
+        output = yaml.safe_load(result.stdout)
         expected = {
         expected = {
             'version': '2.0',
             'version': '2.0',
             'volumes': {'data': {'driver': 'local'}},
             'volumes': {'data': {'driver': 'local'}},
@@ -294,7 +312,7 @@ services:
     def test_config_restart(self):
     def test_config_restart(self):
         self.base_dir = 'tests/fixtures/restart'
         self.base_dir = 'tests/fixtures/restart'
         result = self.dispatch(['config'])
         result = self.dispatch(['config'])
-        assert yaml.load(result.stdout) == {
+        assert yaml.safe_load(result.stdout) == {
             'version': '2.0',
             'version': '2.0',
             'services': {
             'services': {
                 'never': {
                 'never': {
@@ -323,7 +341,7 @@ services:
     def test_config_external_network(self):
     def test_config_external_network(self):
         self.base_dir = 'tests/fixtures/networks'
         self.base_dir = 'tests/fixtures/networks'
         result = self.dispatch(['-f', 'external-networks.yml', 'config'])
         result = self.dispatch(['-f', 'external-networks.yml', 'config'])
-        json_result = yaml.load(result.stdout)
+        json_result = yaml.safe_load(result.stdout)
         assert 'networks' in json_result
         assert 'networks' in json_result
         assert json_result['networks'] == {
         assert json_result['networks'] == {
             'networks_foo': {
             'networks_foo': {
@@ -337,7 +355,7 @@ services:
     def test_config_with_dot_env(self):
     def test_config_with_dot_env(self):
         self.base_dir = 'tests/fixtures/default-env-file'
         self.base_dir = 'tests/fixtures/default-env-file'
         result = self.dispatch(['config'])
         result = self.dispatch(['config'])
-        json_result = yaml.load(result.stdout)
+        json_result = yaml.safe_load(result.stdout)
         assert json_result == {
         assert json_result == {
             'services': {
             'services': {
                 'web': {
                 'web': {
@@ -352,7 +370,7 @@ services:
     def test_config_with_env_file(self):
     def test_config_with_env_file(self):
         self.base_dir = 'tests/fixtures/default-env-file'
         self.base_dir = 'tests/fixtures/default-env-file'
         result = self.dispatch(['--env-file', '.env2', 'config'])
         result = self.dispatch(['--env-file', '.env2', 'config'])
-        json_result = yaml.load(result.stdout)
+        json_result = yaml.safe_load(result.stdout)
         assert json_result == {
         assert json_result == {
             'services': {
             'services': {
                 'web': {
                 'web': {
@@ -367,7 +385,7 @@ services:
     def test_config_with_dot_env_and_override_dir(self):
     def test_config_with_dot_env_and_override_dir(self):
         self.base_dir = 'tests/fixtures/default-env-file'
         self.base_dir = 'tests/fixtures/default-env-file'
         result = self.dispatch(['--project-directory', 'alt/', 'config'])
         result = self.dispatch(['--project-directory', 'alt/', 'config'])
-        json_result = yaml.load(result.stdout)
+        json_result = yaml.safe_load(result.stdout)
         assert json_result == {
         assert json_result == {
             'services': {
             'services': {
                 'web': {
                 'web': {
@@ -382,7 +400,7 @@ services:
     def test_config_external_volume_v2(self):
     def test_config_external_volume_v2(self):
         self.base_dir = 'tests/fixtures/volumes'
         self.base_dir = 'tests/fixtures/volumes'
         result = self.dispatch(['-f', 'external-volumes-v2.yml', 'config'])
         result = self.dispatch(['-f', 'external-volumes-v2.yml', 'config'])
-        json_result = yaml.load(result.stdout)
+        json_result = yaml.safe_load(result.stdout)
         assert 'volumes' in json_result
         assert 'volumes' in json_result
         assert json_result['volumes'] == {
         assert json_result['volumes'] == {
             'foo': {
             'foo': {
@@ -398,7 +416,7 @@ services:
     def test_config_external_volume_v2_x(self):
     def test_config_external_volume_v2_x(self):
         self.base_dir = 'tests/fixtures/volumes'
         self.base_dir = 'tests/fixtures/volumes'
         result = self.dispatch(['-f', 'external-volumes-v2-x.yml', 'config'])
         result = self.dispatch(['-f', 'external-volumes-v2-x.yml', 'config'])
-        json_result = yaml.load(result.stdout)
+        json_result = yaml.safe_load(result.stdout)
         assert 'volumes' in json_result
         assert 'volumes' in json_result
         assert json_result['volumes'] == {
         assert json_result['volumes'] == {
             'foo': {
             'foo': {
@@ -414,7 +432,7 @@ services:
     def test_config_external_volume_v3_x(self):
     def test_config_external_volume_v3_x(self):
         self.base_dir = 'tests/fixtures/volumes'
         self.base_dir = 'tests/fixtures/volumes'
         result = self.dispatch(['-f', 'external-volumes-v3-x.yml', 'config'])
         result = self.dispatch(['-f', 'external-volumes-v3-x.yml', 'config'])
-        json_result = yaml.load(result.stdout)
+        json_result = yaml.safe_load(result.stdout)
         assert 'volumes' in json_result
         assert 'volumes' in json_result
         assert json_result['volumes'] == {
         assert json_result['volumes'] == {
             'foo': {
             'foo': {
@@ -430,7 +448,7 @@ services:
     def test_config_external_volume_v3_4(self):
     def test_config_external_volume_v3_4(self):
         self.base_dir = 'tests/fixtures/volumes'
         self.base_dir = 'tests/fixtures/volumes'
         result = self.dispatch(['-f', 'external-volumes-v3-4.yml', 'config'])
         result = self.dispatch(['-f', 'external-volumes-v3-4.yml', 'config'])
-        json_result = yaml.load(result.stdout)
+        json_result = yaml.safe_load(result.stdout)
         assert 'volumes' in json_result
         assert 'volumes' in json_result
         assert json_result['volumes'] == {
         assert json_result['volumes'] == {
             'foo': {
             'foo': {
@@ -446,7 +464,7 @@ services:
     def test_config_external_network_v3_5(self):
     def test_config_external_network_v3_5(self):
         self.base_dir = 'tests/fixtures/networks'
         self.base_dir = 'tests/fixtures/networks'
         result = self.dispatch(['-f', 'external-networks-v3-5.yml', 'config'])
         result = self.dispatch(['-f', 'external-networks-v3-5.yml', 'config'])
-        json_result = yaml.load(result.stdout)
+        json_result = yaml.safe_load(result.stdout)
         assert 'networks' in json_result
         assert 'networks' in json_result
         assert json_result['networks'] == {
         assert json_result['networks'] == {
             'foo': {
             'foo': {
@@ -462,7 +480,7 @@ services:
     def test_config_v1(self):
     def test_config_v1(self):
         self.base_dir = 'tests/fixtures/v1-config'
         self.base_dir = 'tests/fixtures/v1-config'
         result = self.dispatch(['config'])
         result = self.dispatch(['config'])
-        assert yaml.load(result.stdout) == {
+        assert yaml.safe_load(result.stdout) == {
             'version': '2.1',
             'version': '2.1',
             'services': {
             'services': {
                 'net': {
                 'net': {
@@ -487,7 +505,7 @@ services:
         self.base_dir = 'tests/fixtures/v3-full'
         self.base_dir = 'tests/fixtures/v3-full'
         result = self.dispatch(['config'])
         result = self.dispatch(['config'])
 
 
-        assert yaml.load(result.stdout) == {
+        assert yaml.safe_load(result.stdout) == {
             'version': '3.5',
             'version': '3.5',
             'volumes': {
             'volumes': {
                 'foobar': {
                 'foobar': {
@@ -564,24 +582,23 @@ services:
         self.base_dir = 'tests/fixtures/compatibility-mode'
         self.base_dir = 'tests/fixtures/compatibility-mode'
         result = self.dispatch(['--compatibility', 'config'])
         result = self.dispatch(['--compatibility', 'config'])
 
 
-        assert yaml.load(result.stdout) == {
-            'version': '2.3',
-            'volumes': {'foo': {'driver': 'default'}},
-            'networks': {'bar': {}},
-            'services': {
-                'foo': {
-                    'command': '/bin/true',
-                    'image': 'alpine:3.10.1',
-                    'scale': 3,
-                    'restart': 'always:7',
-                    'mem_limit': '300M',
-                    'mem_reservation': '100M',
-                    'cpus': 0.7,
-                    'volumes': ['foo:/bar:rw'],
-                    'networks': {'bar': None},
-                }
-            },
-        }
+        assert yaml.load(result.stdout) == COMPOSE_COMPATIBILITY_DICT
+
+    @mock.patch.dict(os.environ)
+    def test_config_compatibility_mode_from_env(self):
+        self.base_dir = 'tests/fixtures/compatibility-mode'
+        os.environ['COMPOSE_COMPATIBILITY'] = 'true'
+        result = self.dispatch(['config'])
+
+        assert yaml.load(result.stdout) == COMPOSE_COMPATIBILITY_DICT
+
+    @mock.patch.dict(os.environ)
+    def test_config_compatibility_mode_from_env_and_option_precedence(self):
+        self.base_dir = 'tests/fixtures/compatibility-mode'
+        os.environ['COMPOSE_COMPATIBILITY'] = 'false'
+        result = self.dispatch(['--compatibility', 'config'])
+
+        assert yaml.load(result.stdout) == COMPOSE_COMPATIBILITY_DICT
 
 
     def test_ps(self):
     def test_ps(self):
         self.project.get_service('simple').create_container()
         self.project.get_service('simple').create_container()
@@ -855,32 +872,6 @@ services:
         )
         )
         assert 'Favorite Touhou Character: hong.meiling' in result.stdout
         assert 'Favorite Touhou Character: hong.meiling' in result.stdout
 
 
-    def test_bundle_with_digests(self):
-        self.base_dir = 'tests/fixtures/bundle-with-digests/'
-        tmpdir = pytest.ensuretemp('cli_test_bundle')
-        self.addCleanup(tmpdir.remove)
-        filename = str(tmpdir.join('example.dab'))
-
-        self.dispatch(['bundle', '--output', filename])
-        with open(filename, 'r') as fh:
-            bundle = json.load(fh)
-
-        assert bundle == {
-            'Version': '0.1',
-            'Services': {
-                'web': {
-                    'Image': ('dockercloud/hello-world@sha256:fe79a2cfbd17eefc3'
-                              '44fb8419420808df95a1e22d93b7f621a7399fd1e9dca1d'),
-                    'Networks': ['default'],
-                },
-                'redis': {
-                    'Image': ('redis@sha256:a84cb8f53a70e19f61ff2e1d5e73fb7ae62d'
-                              '374b2b7392de1e7d77be26ef8f7b'),
-                    'Networks': ['default'],
-                }
-            },
-        }
-
     def test_build_override_dir(self):
     def test_build_override_dir(self):
         self.base_dir = 'tests/fixtures/build-path-override-dir'
         self.base_dir = 'tests/fixtures/build-path-override-dir'
         self.override_dir = os.path.abspath('tests/fixtures')
         self.override_dir = os.path.abspath('tests/fixtures')
@@ -1580,6 +1571,26 @@ services:
         assert len(db.containers()) == 0
         assert len(db.containers()) == 0
         assert len(console.containers()) == 0
         assert len(console.containers()) == 0
 
 
+    def test_up_with_attach_dependencies(self):
+        self.base_dir = 'tests/fixtures/echo-services-dependencies'
+        result = self.dispatch(['up', '--attach-dependencies', '--no-color', 'simple'], None)
+        simple_name = self.project.get_service('simple').containers(stopped=True)[0].name_without_project
+        another_name = self.project.get_service('another').containers(
+            stopped=True
+        )[0].name_without_project
+
+        assert '{}   | simple'.format(simple_name) in result.stdout
+        assert '{}  | another'.format(another_name) in result.stdout
+
+    def test_up_handles_aborted_dependencies(self):
+        self.base_dir = 'tests/fixtures/abort-on-container-exit-dependencies'
+        proc = start_process(
+            self.base_dir,
+            ['up', 'simple', '--attach-dependencies', '--abort-on-container-exit'])
+        wait_on_condition(ContainerCountCondition(self.project, 0))
+        proc.wait()
+        assert proc.returncode == 1
+
     def test_up_with_force_recreate(self):
     def test_up_with_force_recreate(self):
         self.dispatch(['up', '-d'], None)
         self.dispatch(['up', '-d'], None)
         service = self.project.get_service('simple')
         service = self.project.get_service('simple')

+ 10 - 0
tests/fixtures/abort-on-container-exit-dependencies/docker-compose.yml

@@ -0,0 +1,10 @@
+version: "2.0"
+services:
+  simple:
+    image: busybox:1.31.0-uclibc
+    command: top
+    depends_on:
+      - another
+  another:
+    image: busybox:1.31.0-uclibc
+    command: ls /thecakeisalie

+ 0 - 9
tests/fixtures/bundle-with-digests/docker-compose.yml

@@ -1,9 +0,0 @@
-
-version: '2.0'
-
-services:
-  web:
-    image: dockercloud/hello-world@sha256:fe79a2cfbd17eefc344fb8419420808df95a1e22d93b7f621a7399fd1e9dca1d
-
-  redis:
-    image: redis@sha256:a84cb8f53a70e19f61ff2e1d5e73fb7ae62d374b2b7392de1e7d77be26ef8f7b

+ 10 - 0
tests/fixtures/echo-services-dependencies/docker-compose.yml

@@ -0,0 +1,10 @@
+version: "2.0"
+services:
+  simple:
+    image: busybox:1.31.0-uclibc
+    command: echo simple
+    depends_on:
+      - another
+  another:
+    image: busybox:1.31.0-uclibc
+    command: echo another

+ 15 - 0
tests/helpers.py

@@ -1,6 +1,7 @@
 from __future__ import absolute_import
 from __future__ import absolute_import
 from __future__ import unicode_literals
 from __future__ import unicode_literals
 
 
+import contextlib
 import os
 import os
 
 
 from compose.config.config import ConfigDetails
 from compose.config.config import ConfigDetails
@@ -55,3 +56,17 @@ def create_host_file(client, filename):
         content = fh.read()
         content = fh.read()
 
 
     return create_custom_host_file(client, filename, content)
     return create_custom_host_file(client, filename, content)
+
+
[email protected]
+def cd(path):
+    """
+    A context manager which changes the working directory to the given
+    path, and then changes it back to its previous value on exit.
+    """
+    prev_cwd = os.getcwd()
+    os.chdir(path)
+    try:
+        yield
+    finally:
+        os.chdir(prev_cwd)

+ 4 - 4
tests/integration/project_test.py

@@ -8,7 +8,6 @@ import random
 import shutil
 import shutil
 import tempfile
 import tempfile
 
 
-import py
 import pytest
 import pytest
 from docker.errors import APIError
 from docker.errors import APIError
 from docker.errors import NotFound
 from docker.errors import NotFound
@@ -16,6 +15,7 @@ from docker.errors import NotFound
 from .. import mock
 from .. import mock
 from ..helpers import build_config as load_config
 from ..helpers import build_config as load_config
 from ..helpers import BUSYBOX_IMAGE_WITH_TAG
 from ..helpers import BUSYBOX_IMAGE_WITH_TAG
+from ..helpers import cd
 from ..helpers import create_host_file
 from ..helpers import create_host_file
 from .testcases import DockerClientTestCase
 from .testcases import DockerClientTestCase
 from .testcases import SWARM_SKIP_CONTAINERS_ALL
 from .testcases import SWARM_SKIP_CONTAINERS_ALL
@@ -1329,9 +1329,9 @@ class ProjectTest(DockerClientTestCase):
             })
             })
         details = config.ConfigDetails('.', [base_file, override_file])
         details = config.ConfigDetails('.', [base_file, override_file])
 
 
-        tmpdir = py.test.ensuretemp('logging_test')
-        self.addCleanup(tmpdir.remove)
-        with tmpdir.as_cwd():
+        tmpdir = tempfile.mkdtemp('logging_test')
+        self.addCleanup(shutil.rmtree, tmpdir)
+        with cd(tmpdir):
             config_data = config.load(details)
             config_data = config.load(details)
         project = Project.from_config(
         project = Project.from_config(
             name='composetest', config_data=config_data, client=self.client
             name='composetest', config_data=config_data, client=self.client

+ 17 - 12
tests/integration/state_test.py

@@ -6,8 +6,10 @@ from __future__ import absolute_import
 from __future__ import unicode_literals
 from __future__ import unicode_literals
 
 
 import copy
 import copy
+import os
+import shutil
+import tempfile
 
 
-import py
 from docker.errors import ImageNotFound
 from docker.errors import ImageNotFound
 
 
 from ..helpers import BUSYBOX_IMAGE_WITH_TAG
 from ..helpers import BUSYBOX_IMAGE_WITH_TAG
@@ -426,29 +428,32 @@ class ServiceStateTest(DockerClientTestCase):
 
 
     @no_cluster('Can not guarantee the build will be run on the same node the service is deployed')
     @no_cluster('Can not guarantee the build will be run on the same node the service is deployed')
     def test_trigger_recreate_with_build(self):
     def test_trigger_recreate_with_build(self):
-        context = py.test.ensuretemp('test_trigger_recreate_with_build')
-        self.addCleanup(context.remove)
+        context = tempfile.mkdtemp('test_trigger_recreate_with_build')
+        self.addCleanup(shutil.rmtree, context)
 
 
         base_image = "FROM busybox\nLABEL com.docker.compose.test_image=true\n"
         base_image = "FROM busybox\nLABEL com.docker.compose.test_image=true\n"
-        dockerfile = context.join('Dockerfile')
-        dockerfile.write(base_image)
+        dockerfile = os.path.join(context, 'Dockerfile')
+        with open(dockerfile, mode="w") as dockerfile_fh:
+            dockerfile_fh.write(base_image)
 
 
         web = self.create_service('web', build={'context': str(context)})
         web = self.create_service('web', build={'context': str(context)})
         container = web.create_container()
         container = web.create_container()
 
 
-        dockerfile.write(base_image + 'CMD echo hello world\n')
+        with open(dockerfile, mode="w") as dockerfile_fh:
+            dockerfile_fh.write(base_image + 'CMD echo hello world\n')
         web.build()
         web.build()
 
 
         web = self.create_service('web', build={'context': str(context)})
         web = self.create_service('web', build={'context': str(context)})
         assert ('recreate', [container]) == web.convergence_plan()
         assert ('recreate', [container]) == web.convergence_plan()
 
 
     def test_image_changed_to_build(self):
     def test_image_changed_to_build(self):
-        context = py.test.ensuretemp('test_image_changed_to_build')
-        self.addCleanup(context.remove)
-        context.join('Dockerfile').write("""
-            FROM busybox
-            LABEL com.docker.compose.test_image=true
-        """)
+        context = tempfile.mkdtemp('test_image_changed_to_build')
+        self.addCleanup(shutil.rmtree, context)
+        with open(os.path.join(context, 'Dockerfile'), mode="w") as dockerfile:
+            dockerfile.write("""
+                FROM busybox
+                LABEL com.docker.compose.test_image=true
+            """)
 
 
         web = self.create_service('web', image='busybox')
         web = self.create_service('web', image='busybox')
         container = web.create_container()
         container = web.create_container()

+ 0 - 233
tests/unit/bundle_test.py

@@ -1,233 +0,0 @@
-from __future__ import absolute_import
-from __future__ import unicode_literals
-
-import docker
-import pytest
-
-from .. import mock
-from compose import bundle
-from compose import service
-from compose.cli.errors import UserError
-from compose.config.config import Config
-from compose.const import COMPOSEFILE_V2_0 as V2_0
-from compose.service import NoSuchImageError
-
-
[email protected]
-def mock_service():
-    return mock.create_autospec(
-        service.Service,
-        client=mock.create_autospec(docker.APIClient),
-        options={})
-
-
-def test_get_image_digest_exists(mock_service):
-    mock_service.options['image'] = 'abcd'
-    mock_service.image.return_value = {'RepoDigests': ['digest1']}
-    digest = bundle.get_image_digest(mock_service)
-    assert digest == 'digest1'
-
-
-def test_get_image_digest_image_uses_digest(mock_service):
-    mock_service.options['image'] = image_id = 'redis@sha256:digest'
-
-    digest = bundle.get_image_digest(mock_service)
-    assert digest == image_id
-    assert not mock_service.image.called
-
-
-def test_get_image_digest_from_repository(mock_service):
-    mock_service.options['image'] = 'abcd'
-    mock_service.image_name = 'abcd'
-    mock_service.image.side_effect = NoSuchImageError(None)
-    mock_service.get_image_registry_data.return_value = {'Descriptor': {'digest': 'digest'}}
-
-    digest = bundle.get_image_digest(mock_service)
-    assert digest == 'abcd@digest'
-
-
-def test_get_image_digest_no_image(mock_service):
-    with pytest.raises(UserError) as exc:
-        bundle.get_image_digest(service.Service(name='theservice'))
-
-    assert "doesn't define an image tag" in exc.exconly()
-
-
-def test_push_image_with_saved_digest(mock_service):
-    mock_service.options['build'] = '.'
-    mock_service.options['image'] = image_id = 'abcd'
-    mock_service.push.return_value = expected = 'sha256:thedigest'
-    mock_service.image.return_value = {'RepoDigests': ['digest1']}
-
-    digest = bundle.push_image(mock_service)
-    assert digest == image_id + '@' + expected
-
-    mock_service.push.assert_called_once_with()
-    assert not mock_service.client.push.called
-
-
-def test_push_image(mock_service):
-    mock_service.options['build'] = '.'
-    mock_service.options['image'] = image_id = 'abcd'
-    mock_service.push.return_value = expected = 'sha256:thedigest'
-    mock_service.image.return_value = {'RepoDigests': []}
-
-    digest = bundle.push_image(mock_service)
-    assert digest == image_id + '@' + expected
-
-    mock_service.push.assert_called_once_with()
-    mock_service.client.pull.assert_called_once_with(digest)
-
-
-def test_to_bundle():
-    image_digests = {'a': 'aaaa', 'b': 'bbbb'}
-    services = [
-        {'name': 'a', 'build': '.', },
-        {'name': 'b', 'build': './b'},
-    ]
-    config = Config(
-        version=V2_0,
-        services=services,
-        volumes={'special': {}},
-        networks={'extra': {}},
-        secrets={},
-        configs={}
-    )
-
-    with mock.patch('compose.bundle.log.warning', autospec=True) as mock_log:
-        output = bundle.to_bundle(config, image_digests)
-
-    assert mock_log.mock_calls == [
-        mock.call("Unsupported top level key 'networks' - ignoring"),
-        mock.call("Unsupported top level key 'volumes' - ignoring"),
-    ]
-
-    assert output == {
-        'Version': '0.1',
-        'Services': {
-            'a': {'Image': 'aaaa', 'Networks': ['default']},
-            'b': {'Image': 'bbbb', 'Networks': ['default']},
-        }
-    }
-
-
-def test_convert_service_to_bundle():
-    name = 'theservice'
-    image_digest = 'thedigest'
-    service_dict = {
-        'ports': ['80'],
-        'expose': ['1234'],
-        'networks': {'extra': {}},
-        'command': 'foo',
-        'entrypoint': 'entry',
-        'environment': {'BAZ': 'ENV'},
-        'build': '.',
-        'working_dir': '/tmp',
-        'user': 'root',
-        'labels': {'FOO': 'LABEL'},
-        'privileged': True,
-    }
-
-    with mock.patch('compose.bundle.log.warning', autospec=True) as mock_log:
-        config = bundle.convert_service_to_bundle(name, service_dict, image_digest)
-
-    mock_log.assert_called_once_with(
-        "Unsupported key 'privileged' in services.theservice - ignoring")
-
-    assert config == {
-        'Image': image_digest,
-        'Ports': [
-            {'Protocol': 'tcp', 'Port': 80},
-            {'Protocol': 'tcp', 'Port': 1234},
-        ],
-        'Networks': ['extra'],
-        'Command': ['entry', 'foo'],
-        'Env': ['BAZ=ENV'],
-        'WorkingDir': '/tmp',
-        'User': 'root',
-        'Labels': {'FOO': 'LABEL'},
-    }
-
-
-def test_set_command_and_args_none():
-    config = {}
-    bundle.set_command_and_args(config, [], [])
-    assert config == {}
-
-
-def test_set_command_and_args_from_command():
-    config = {}
-    bundle.set_command_and_args(config, [], "echo ok")
-    assert config == {'Args': ['echo', 'ok']}
-
-
-def test_set_command_and_args_from_entrypoint():
-    config = {}
-    bundle.set_command_and_args(config, "echo entry", [])
-    assert config == {'Command': ['echo', 'entry']}
-
-
-def test_set_command_and_args_from_both():
-    config = {}
-    bundle.set_command_and_args(config, "echo entry", ["extra", "arg"])
-    assert config == {'Command': ['echo', 'entry', "extra", "arg"]}
-
-
-def test_make_service_networks_default():
-    name = 'theservice'
-    service_dict = {}
-
-    with mock.patch('compose.bundle.log.warning', autospec=True) as mock_log:
-        networks = bundle.make_service_networks(name, service_dict)
-
-    assert not mock_log.called
-    assert networks == ['default']
-
-
-def test_make_service_networks():
-    name = 'theservice'
-    service_dict = {
-        'networks': {
-            'foo': {
-                'aliases': ['one', 'two'],
-            },
-            'bar': {}
-        },
-    }
-
-    with mock.patch('compose.bundle.log.warning', autospec=True) as mock_log:
-        networks = bundle.make_service_networks(name, service_dict)
-
-    mock_log.assert_called_once_with(
-        "Unsupported key 'aliases' in services.theservice.networks.foo - ignoring")
-    assert sorted(networks) == sorted(service_dict['networks'])
-
-
-def test_make_port_specs():
-    service_dict = {
-        'expose': ['80', '500/udp'],
-        'ports': [
-            '400:80',
-            '222',
-            '127.0.0.1:8001:8001',
-            '127.0.0.1:5000-5001:3000-3001'],
-    }
-    port_specs = bundle.make_port_specs(service_dict)
-    assert port_specs == [
-        {'Protocol': 'tcp', 'Port': 80},
-        {'Protocol': 'tcp', 'Port': 222},
-        {'Protocol': 'tcp', 'Port': 8001},
-        {'Protocol': 'tcp', 'Port': 3000},
-        {'Protocol': 'tcp', 'Port': 3001},
-        {'Protocol': 'udp', 'Port': 500},
-    ]
-
-
-def test_make_port_spec_with_protocol():
-    port_spec = bundle.make_port_spec("5000/udp")
-    assert port_spec == {'Protocol': 'udp', 'Port': 5000}
-
-
-def test_make_port_spec_default_protocol():
-    port_spec = bundle.make_port_spec("50000")
-    assert port_spec == {'Protocol': 'tcp', 'Port': 50000}

+ 17 - 5
tests/unit/cli/main_test.py

@@ -12,7 +12,7 @@ from compose.cli.formatter import ConsoleWarningFormatter
 from compose.cli.main import build_one_off_container_options
 from compose.cli.main import build_one_off_container_options
 from compose.cli.main import call_docker
 from compose.cli.main import call_docker
 from compose.cli.main import convergence_strategy_from_opts
 from compose.cli.main import convergence_strategy_from_opts
-from compose.cli.main import filter_containers_to_service_names
+from compose.cli.main import filter_attached_containers
 from compose.cli.main import get_docker_start_call
 from compose.cli.main import get_docker_start_call
 from compose.cli.main import setup_console_handler
 from compose.cli.main import setup_console_handler
 from compose.cli.main import warn_for_swarm_mode
 from compose.cli.main import warn_for_swarm_mode
@@ -37,7 +37,7 @@ def logging_handler():
 
 
 class TestCLIMainTestCase(object):
 class TestCLIMainTestCase(object):
 
 
-    def test_filter_containers_to_service_names(self):
+    def test_filter_attached_containers(self):
         containers = [
         containers = [
             mock_container('web', 1),
             mock_container('web', 1),
             mock_container('web', 2),
             mock_container('web', 2),
@@ -46,17 +46,29 @@ class TestCLIMainTestCase(object):
             mock_container('another', 1),
             mock_container('another', 1),
         ]
         ]
         service_names = ['web', 'db']
         service_names = ['web', 'db']
-        actual = filter_containers_to_service_names(containers, service_names)
+        actual = filter_attached_containers(containers, service_names)
         assert actual == containers[:3]
         assert actual == containers[:3]
 
 
-    def test_filter_containers_to_service_names_all(self):
+    def test_filter_attached_containers_with_dependencies(self):
+        containers = [
+            mock_container('web', 1),
+            mock_container('web', 2),
+            mock_container('db', 1),
+            mock_container('other', 1),
+            mock_container('another', 1),
+        ]
+        service_names = ['web', 'db']
+        actual = filter_attached_containers(containers, service_names, attach_dependencies=True)
+        assert actual == containers
+
+    def test_filter_attached_containers_all(self):
         containers = [
         containers = [
             mock_container('web', 1),
             mock_container('web', 1),
             mock_container('db', 1),
             mock_container('db', 1),
             mock_container('other', 1),
             mock_container('other', 1),
         ]
         ]
         service_names = []
         service_names = []
-        actual = filter_containers_to_service_names(containers, service_names)
+        actual = filter_attached_containers(containers, service_names)
         assert actual == containers
         assert actual == containers
 
 
     def test_warning_in_swarm_mode(self):
     def test_warning_in_swarm_mode(self):

+ 202 - 164
tests/unit/config/config_test.py

@@ -10,12 +10,14 @@ import tempfile
 from operator import itemgetter
 from operator import itemgetter
 from random import shuffle
 from random import shuffle
 
 
-import py
 import pytest
 import pytest
 import yaml
 import yaml
+from ddt import data
+from ddt import ddt
 
 
 from ...helpers import build_config_details
 from ...helpers import build_config_details
 from ...helpers import BUSYBOX_IMAGE_WITH_TAG
 from ...helpers import BUSYBOX_IMAGE_WITH_TAG
+from ...helpers import cd
 from compose.config import config
 from compose.config import config
 from compose.config import types
 from compose.config import types
 from compose.config.config import ConfigFile
 from compose.config.config import ConfigFile
@@ -68,6 +70,7 @@ def secret_sort(secrets):
     return sorted(secrets, key=itemgetter('source'))
     return sorted(secrets, key=itemgetter('source'))
 
 
 
 
+@ddt
 class ConfigTest(unittest.TestCase):
 class ConfigTest(unittest.TestCase):
 
 
     def test_load(self):
     def test_load(self):
@@ -777,13 +780,14 @@ class ConfigTest(unittest.TestCase):
             })
             })
         details = config.ConfigDetails('.', [base_file, override_file])
         details = config.ConfigDetails('.', [base_file, override_file])
 
 
-        tmpdir = py.test.ensuretemp('config_test')
-        self.addCleanup(tmpdir.remove)
-        tmpdir.join('common.yml').write("""
-            base:
-              labels: ['label=one']
-        """)
-        with tmpdir.as_cwd():
+        tmpdir = tempfile.mkdtemp('config_test')
+        self.addCleanup(shutil.rmtree, tmpdir)
+        with open(os.path.join(tmpdir, 'common.yml'), mode="w") as common_fh:
+            common_fh.write("""
+                base:
+                  labels: ['label=one']
+            """)
+        with cd(tmpdir):
             service_dicts = config.load(details).services
             service_dicts = config.load(details).services
 
 
         expected = [
         expected = [
@@ -812,19 +816,20 @@ class ConfigTest(unittest.TestCase):
             }
             }
         )
         )
 
 
-        tmpdir = pytest.ensuretemp('config_test')
-        self.addCleanup(tmpdir.remove)
-        tmpdir.join('base.yml').write("""
-            version: '2.2'
-            services:
-              base:
-                image: base
-              web:
-                extends: base
-        """)
+        tmpdir = tempfile.mkdtemp('config_test')
+        self.addCleanup(shutil.rmtree, tmpdir)
+        with open(os.path.join(tmpdir, 'base.yml'), mode="w") as base_fh:
+            base_fh.write("""
+                version: '2.2'
+                services:
+                  base:
+                    image: base
+                  web:
+                    extends: base
+            """)
 
 
         details = config.ConfigDetails('.', [main_file])
         details = config.ConfigDetails('.', [main_file])
-        with tmpdir.as_cwd():
+        with cd(tmpdir):
             service_dicts = config.load(details).services
             service_dicts = config.load(details).services
             assert service_dicts[0] == {
             assert service_dicts[0] == {
                 'name': 'prodweb',
                 'name': 'prodweb',
@@ -1762,22 +1767,23 @@ class ConfigTest(unittest.TestCase):
         assert services[0]['environment']['SPRING_JPA_HIBERNATE_DDL-AUTO'] == 'none'
         assert services[0]['environment']['SPRING_JPA_HIBERNATE_DDL-AUTO'] == 'none'
 
 
     def test_load_yaml_with_yaml_error(self):
     def test_load_yaml_with_yaml_error(self):
-        tmpdir = py.test.ensuretemp('invalid_yaml_test')
-        self.addCleanup(tmpdir.remove)
-        invalid_yaml_file = tmpdir.join('docker-compose.yml')
-        invalid_yaml_file.write("""
-            web:
-              this is bogus: ok: what
-        """)
+        tmpdir = tempfile.mkdtemp('invalid_yaml_test')
+        self.addCleanup(shutil.rmtree, tmpdir)
+        invalid_yaml_file = os.path.join(tmpdir, 'docker-compose.yml')
+        with open(invalid_yaml_file, mode="w") as invalid_yaml_file_fh:
+            invalid_yaml_file_fh.write("""
+web:
+    this is bogus: ok: what
+            """)
         with pytest.raises(ConfigurationError) as exc:
         with pytest.raises(ConfigurationError) as exc:
             config.load_yaml(str(invalid_yaml_file))
             config.load_yaml(str(invalid_yaml_file))
 
 
-        assert 'line 3, column 32' in exc.exconly()
+        assert 'line 3, column 22' in exc.exconly()
 
 
     def test_load_yaml_with_bom(self):
     def test_load_yaml_with_bom(self):
-        tmpdir = py.test.ensuretemp('bom_yaml')
-        self.addCleanup(tmpdir.remove)
-        bom_yaml = tmpdir.join('docker-compose.yml')
+        tmpdir = tempfile.mkdtemp('bom_yaml')
+        self.addCleanup(shutil.rmtree, tmpdir)
+        bom_yaml = os.path.join(tmpdir, 'docker-compose.yml')
         with codecs.open(str(bom_yaml), 'w', encoding='utf-8') as f:
         with codecs.open(str(bom_yaml), 'w', encoding='utf-8') as f:
             f.write('''\ufeff
             f.write('''\ufeff
                 version: '2.3'
                 version: '2.3'
@@ -1885,6 +1891,26 @@ class ConfigTest(unittest.TestCase):
             }
             }
         ]
         ]
 
 
+    @data(
+        '2 ',
+        '3.',
+        '3.0.0',
+        '3.0.a',
+        '3.a',
+        '3a')
+    def test_invalid_version_formats(self, version):
+        content = {
+            'version': version,
+            'services': {
+                'web': {
+                    'image': 'alpine',
+                }
+            }
+        }
+        with pytest.raises(ConfigurationError) as exc:
+            config.load(build_config_details(content))
+        assert 'Version "{}" in "filename.yml" is invalid.'.format(version) in exc.exconly()
+
     def test_group_add_option(self):
     def test_group_add_option(self):
         actual = config.load(build_config_details({
         actual = config.load(build_config_details({
             'version': '2',
             'version': '2',
@@ -4701,43 +4727,48 @@ class ExtendsTest(unittest.TestCase):
 
 
     @mock.patch.dict(os.environ)
     @mock.patch.dict(os.environ)
     def test_extends_with_environment_and_env_files(self):
     def test_extends_with_environment_and_env_files(self):
-        tmpdir = py.test.ensuretemp('test_extends_with_environment')
-        self.addCleanup(tmpdir.remove)
-        commondir = tmpdir.mkdir('common')
-        commondir.join('base.yml').write("""
-            app:
-                image: 'example/app'
-                env_file:
-                    - 'envs'
-                environment:
-                    - SECRET
-                    - TEST_ONE=common
-                    - TEST_TWO=common
-        """)
-        tmpdir.join('docker-compose.yml').write("""
-            ext:
-                extends:
-                    file: common/base.yml
-                    service: app
-                env_file:
-                    - 'envs'
-                environment:
-                    - THING
-                    - TEST_ONE=top
-        """)
-        commondir.join('envs').write("""
-            COMMON_ENV_FILE
-            TEST_ONE=common-env-file
-            TEST_TWO=common-env-file
-            TEST_THREE=common-env-file
-            TEST_FOUR=common-env-file
-        """)
-        tmpdir.join('envs').write("""
-            TOP_ENV_FILE
-            TEST_ONE=top-env-file
-            TEST_TWO=top-env-file
-            TEST_THREE=top-env-file
-        """)
+        tmpdir = tempfile.mkdtemp('test_extends_with_environment')
+        self.addCleanup(shutil.rmtree, tmpdir)
+        commondir = os.path.join(tmpdir, 'common')
+        os.mkdir(commondir)
+        with open(os.path.join(commondir, 'base.yml'), mode="w") as base_fh:
+            base_fh.write("""
+                app:
+                    image: 'example/app'
+                    env_file:
+                        - 'envs'
+                    environment:
+                        - SECRET
+                        - TEST_ONE=common
+                        - TEST_TWO=common
+            """)
+        with open(os.path.join(tmpdir, 'docker-compose.yml'), mode="w") as docker_compose_fh:
+            docker_compose_fh.write("""
+                ext:
+                    extends:
+                        file: common/base.yml
+                        service: app
+                    env_file:
+                        - 'envs'
+                    environment:
+                        - THING
+                        - TEST_ONE=top
+            """)
+        with open(os.path.join(commondir, 'envs'), mode="w") as envs_fh:
+            envs_fh.write("""
+                COMMON_ENV_FILE
+                TEST_ONE=common-env-file
+                TEST_TWO=common-env-file
+                TEST_THREE=common-env-file
+                TEST_FOUR=common-env-file
+            """)
+        with open(os.path.join(tmpdir, 'envs'), mode="w") as envs_fh:
+            envs_fh.write("""
+                TOP_ENV_FILE
+                TEST_ONE=top-env-file
+                TEST_TWO=top-env-file
+                TEST_THREE=top-env-file
+            """)
 
 
         expected = [
         expected = [
             {
             {
@@ -4760,72 +4791,77 @@ class ExtendsTest(unittest.TestCase):
         os.environ['THING'] = 'thing'
         os.environ['THING'] = 'thing'
         os.environ['COMMON_ENV_FILE'] = 'secret'
         os.environ['COMMON_ENV_FILE'] = 'secret'
         os.environ['TOP_ENV_FILE'] = 'secret'
         os.environ['TOP_ENV_FILE'] = 'secret'
-        config = load_from_filename(str(tmpdir.join('docker-compose.yml')))
+        config = load_from_filename(str(os.path.join(tmpdir, 'docker-compose.yml')))
 
 
         assert config == expected
         assert config == expected
 
 
     def test_extends_with_mixed_versions_is_error(self):
     def test_extends_with_mixed_versions_is_error(self):
-        tmpdir = py.test.ensuretemp('test_extends_with_mixed_version')
-        self.addCleanup(tmpdir.remove)
-        tmpdir.join('docker-compose.yml').write("""
-            version: "2"
-            services:
-              web:
-                extends:
-                  file: base.yml
-                  service: base
-                image: busybox
-        """)
-        tmpdir.join('base.yml').write("""
-            base:
-              volumes: ['/foo']
-              ports: ['3000:3000']
-        """)
+        tmpdir = tempfile.mkdtemp('test_extends_with_mixed_version')
+        self.addCleanup(shutil.rmtree, tmpdir)
+        with open(os.path.join(tmpdir, 'docker-compose.yml'), mode="w") as docker_compose_fh:
+            docker_compose_fh.write("""
+                version: "2"
+                services:
+                  web:
+                    extends:
+                      file: base.yml
+                      service: base
+                    image: busybox
+            """)
+        with open(os.path.join(tmpdir, 'base.yml'), mode="w") as base_fh:
+            base_fh.write("""
+                base:
+                  volumes: ['/foo']
+                  ports: ['3000:3000']
+            """)
 
 
         with pytest.raises(ConfigurationError) as exc:
         with pytest.raises(ConfigurationError) as exc:
-            load_from_filename(str(tmpdir.join('docker-compose.yml')))
+            load_from_filename(str(os.path.join(tmpdir, 'docker-compose.yml')))
         assert 'Version mismatch' in exc.exconly()
         assert 'Version mismatch' in exc.exconly()
 
 
     def test_extends_with_defined_version_passes(self):
     def test_extends_with_defined_version_passes(self):
-        tmpdir = py.test.ensuretemp('test_extends_with_defined_version')
-        self.addCleanup(tmpdir.remove)
-        tmpdir.join('docker-compose.yml').write("""
-            version: "2"
-            services:
-              web:
-                extends:
-                  file: base.yml
-                  service: base
-                image: busybox
-        """)
-        tmpdir.join('base.yml').write("""
-            version: "2"
-            services:
-                base:
-                  volumes: ['/foo']
-                  ports: ['3000:3000']
-                  command: top
-        """)
-
-        service = load_from_filename(str(tmpdir.join('docker-compose.yml')))
+        tmpdir = tempfile.mkdtemp('test_extends_with_defined_version')
+        self.addCleanup(shutil.rmtree, tmpdir)
+        with open(os.path.join(tmpdir, 'docker-compose.yml'), mode="w") as docker_compose_fh:
+            docker_compose_fh.write("""
+                version: "2"
+                services:
+                  web:
+                    extends:
+                      file: base.yml
+                      service: base
+                    image: busybox
+            """)
+        with open(os.path.join(tmpdir, 'base.yml'), mode="w") as base_fh:
+            base_fh.write("""
+                version: "2"
+                services:
+                  base:
+                    volumes: ['/foo']
+                    ports: ['3000:3000']
+                    command: top
+            """)
+
+        service = load_from_filename(str(os.path.join(tmpdir, 'docker-compose.yml')))
         assert service[0]['command'] == "top"
         assert service[0]['command'] == "top"
 
 
     def test_extends_with_depends_on(self):
     def test_extends_with_depends_on(self):
-        tmpdir = py.test.ensuretemp('test_extends_with_depends_on')
-        self.addCleanup(tmpdir.remove)
-        tmpdir.join('docker-compose.yml').write("""
-            version: "2"
-            services:
-              base:
-                image: example
-              web:
-                extends: base
-                image: busybox
-                depends_on: ['other']
-              other:
-                image: example
-        """)
-        services = load_from_filename(str(tmpdir.join('docker-compose.yml')))
+        tmpdir = tempfile.mkdtemp('test_extends_with_depends_on')
+        self.addCleanup(shutil.rmtree, tmpdir)
+        with open(os.path.join(tmpdir, 'docker-compose.yml'), mode="w") as docker_compose_fh:
+            docker_compose_fh.write("""
+                version: "2"
+                services:
+                  base:
+                    image: example
+                  web:
+                    extends: base
+                    image: busybox
+                    depends_on: ['other']
+                  other:
+                    image: example
+            """)
+        services = load_from_filename(str(os.path.join(tmpdir, 'docker-compose.yml')))
         assert service_sort(services)[2]['depends_on'] == {
         assert service_sort(services)[2]['depends_on'] == {
             'other': {'condition': 'service_started'}
             'other': {'condition': 'service_started'}
         }
         }
@@ -4844,45 +4880,47 @@ class ExtendsTest(unittest.TestCase):
         }]
         }]
 
 
     def test_extends_with_ports(self):
     def test_extends_with_ports(self):
-        tmpdir = py.test.ensuretemp('test_extends_with_ports')
-        self.addCleanup(tmpdir.remove)
-        tmpdir.join('docker-compose.yml').write("""
-            version: '2'
-
-            services:
-              a:
-                image: nginx
-                ports:
-                  - 80
-
-              b:
-                extends:
-                  service: a
-        """)
-        services = load_from_filename(str(tmpdir.join('docker-compose.yml')))
+        tmpdir = tempfile.mkdtemp('test_extends_with_ports')
+        self.addCleanup(shutil.rmtree, tmpdir)
+        with open(os.path.join(tmpdir, 'docker-compose.yml'), mode="w") as docker_compose_fh:
+            docker_compose_fh.write("""
+                version: '2'
+
+                services:
+                  a:
+                    image: nginx
+                    ports:
+                      - 80
+
+                  b:
+                    extends:
+                      service: a
+            """)
+        services = load_from_filename(str(os.path.join(tmpdir, 'docker-compose.yml')))
 
 
         assert len(services) == 2
         assert len(services) == 2
         for svc in services:
         for svc in services:
             assert svc['ports'] == [types.ServicePort('80', None, None, None, None)]
             assert svc['ports'] == [types.ServicePort('80', None, None, None, None)]
 
 
     def test_extends_with_security_opt(self):
     def test_extends_with_security_opt(self):
-        tmpdir = py.test.ensuretemp('test_extends_with_ports')
-        self.addCleanup(tmpdir.remove)
-        tmpdir.join('docker-compose.yml').write("""
-            version: '2'
-
-            services:
-              a:
-                image: nginx
-                security_opt:
-                  - apparmor:unconfined
-                  - seccomp:unconfined
-
-              b:
-                extends:
-                  service: a
-        """)
-        services = load_from_filename(str(tmpdir.join('docker-compose.yml')))
+        tmpdir = tempfile.mkdtemp('test_extends_with_ports')
+        self.addCleanup(shutil.rmtree, tmpdir)
+        with open(os.path.join(tmpdir, 'docker-compose.yml'), mode="w") as docker_compose_fh:
+            docker_compose_fh.write("""
+                version: '2'
+
+                services:
+                  a:
+                    image: nginx
+                    security_opt:
+                    - apparmor:unconfined
+                    - seccomp:unconfined
+
+                  b:
+                    extends:
+                      service: a
+            """)
+        services = load_from_filename(str(os.path.join(tmpdir, 'docker-compose.yml')))
         assert len(services) == 2
         assert len(services) == 2
         for svc in services:
         for svc in services:
             assert types.SecurityOpt.parse('apparmor:unconfined') in svc['security_opt']
             assert types.SecurityOpt.parse('apparmor:unconfined') in svc['security_opt']
@@ -5037,7 +5075,7 @@ class HealthcheckTest(unittest.TestCase):
             })
             })
         )
         )
 
 
-        serialized_config = yaml.load(serialize_config(config_dict))
+        serialized_config = yaml.safe_load(serialize_config(config_dict))
         serialized_service = serialized_config['services']['test']
         serialized_service = serialized_config['services']['test']
 
 
         assert serialized_service['healthcheck'] == {
         assert serialized_service['healthcheck'] == {
@@ -5064,7 +5102,7 @@ class HealthcheckTest(unittest.TestCase):
             })
             })
         )
         )
 
 
-        serialized_config = yaml.load(serialize_config(config_dict))
+        serialized_config = yaml.safe_load(serialize_config(config_dict))
         serialized_service = serialized_config['services']['test']
         serialized_service = serialized_config['services']['test']
 
 
         assert serialized_service['healthcheck'] == {
         assert serialized_service['healthcheck'] == {
@@ -5271,7 +5309,7 @@ class SerializeTest(unittest.TestCase):
             'secrets': secrets_dict
             'secrets': secrets_dict
         }))
         }))
 
 
-        serialized_config = yaml.load(serialize_config(config_dict))
+        serialized_config = yaml.safe_load(serialize_config(config_dict))
         serialized_service = serialized_config['services']['web']
         serialized_service = serialized_config['services']['web']
         assert secret_sort(serialized_service['secrets']) == secret_sort(service_dict['secrets'])
         assert secret_sort(serialized_service['secrets']) == secret_sort(service_dict['secrets'])
         assert 'secrets' in serialized_config
         assert 'secrets' in serialized_config
@@ -5286,7 +5324,7 @@ class SerializeTest(unittest.TestCase):
             }
             }
         ], volumes={}, networks={}, secrets={}, configs={})
         ], volumes={}, networks={}, secrets={}, configs={})
 
 
-        serialized_config = yaml.load(serialize_config(config_dict))
+        serialized_config = yaml.safe_load(serialize_config(config_dict))
         assert '8080:80/tcp' in serialized_config['services']['web']['ports']
         assert '8080:80/tcp' in serialized_config['services']['web']['ports']
 
 
     def test_serialize_ports_with_ext_ip(self):
     def test_serialize_ports_with_ext_ip(self):
@@ -5298,7 +5336,7 @@ class SerializeTest(unittest.TestCase):
             }
             }
         ], volumes={}, networks={}, secrets={}, configs={})
         ], volumes={}, networks={}, secrets={}, configs={})
 
 
-        serialized_config = yaml.load(serialize_config(config_dict))
+        serialized_config = yaml.safe_load(serialize_config(config_dict))
         assert '127.0.0.1:8080:80/tcp' in serialized_config['services']['web']['ports']
         assert '127.0.0.1:8080:80/tcp' in serialized_config['services']['web']['ports']
 
 
     def test_serialize_configs(self):
     def test_serialize_configs(self):
@@ -5326,7 +5364,7 @@ class SerializeTest(unittest.TestCase):
             'configs': configs_dict
             'configs': configs_dict
         }))
         }))
 
 
-        serialized_config = yaml.load(serialize_config(config_dict))
+        serialized_config = yaml.safe_load(serialize_config(config_dict))
         serialized_service = serialized_config['services']['web']
         serialized_service = serialized_config['services']['web']
         assert secret_sort(serialized_service['configs']) == secret_sort(service_dict['configs'])
         assert secret_sort(serialized_service['configs']) == secret_sort(service_dict['configs'])
         assert 'configs' in serialized_config
         assert 'configs' in serialized_config
@@ -5366,7 +5404,7 @@ class SerializeTest(unittest.TestCase):
         }
         }
         config_dict = config.load(build_config_details(cfg))
         config_dict = config.load(build_config_details(cfg))
 
 
-        serialized_config = yaml.load(serialize_config(config_dict))
+        serialized_config = yaml.safe_load(serialize_config(config_dict))
         serialized_service = serialized_config['services']['web']
         serialized_service = serialized_config['services']['web']
         assert serialized_service['environment']['CURRENCY'] == '$$'
         assert serialized_service['environment']['CURRENCY'] == '$$'
         assert serialized_service['command'] == 'echo $$FOO'
         assert serialized_service['command'] == 'echo $$FOO'
@@ -5388,7 +5426,7 @@ class SerializeTest(unittest.TestCase):
         }
         }
         config_dict = config.load(build_config_details(cfg), interpolate=False)
         config_dict = config.load(build_config_details(cfg), interpolate=False)
 
 
-        serialized_config = yaml.load(serialize_config(config_dict, escape_dollar=False))
+        serialized_config = yaml.safe_load(serialize_config(config_dict, escape_dollar=False))
         serialized_service = serialized_config['services']['web']
         serialized_service = serialized_config['services']['web']
         assert serialized_service['environment']['CURRENCY'] == '$'
         assert serialized_service['environment']['CURRENCY'] == '$'
         assert serialized_service['command'] == 'echo $FOO'
         assert serialized_service['command'] == 'echo $FOO'
@@ -5407,7 +5445,7 @@ class SerializeTest(unittest.TestCase):
 
 
         config_dict = config.load(build_config_details(cfg))
         config_dict = config.load(build_config_details(cfg))
 
 
-        serialized_config = yaml.load(serialize_config(config_dict))
+        serialized_config = yaml.safe_load(serialize_config(config_dict))
         serialized_service = serialized_config['services']['web']
         serialized_service = serialized_config['services']['web']
         assert serialized_service['command'] == 'echo 十六夜 咲夜'
         assert serialized_service['command'] == 'echo 十六夜 咲夜'
 
 
@@ -5423,6 +5461,6 @@ class SerializeTest(unittest.TestCase):
         }
         }
 
 
         config_dict = config.load(build_config_details(cfg))
         config_dict = config.load(build_config_details(cfg))
-        serialized_config = yaml.load(serialize_config(config_dict))
+        serialized_config = yaml.safe_load(serialize_config(config_dict))
         serialized_volume = serialized_config['volumes']['test']
         serialized_volume = serialized_config['volumes']['test']
         assert serialized_volume['external'] is False
         assert serialized_volume['external'] is False

+ 9 - 6
tests/unit/config/environment_test.py

@@ -4,6 +4,9 @@ from __future__ import print_function
 from __future__ import unicode_literals
 from __future__ import unicode_literals
 
 
 import codecs
 import codecs
+import os
+import shutil
+import tempfile
 
 
 import pytest
 import pytest
 
 
@@ -46,19 +49,19 @@ class EnvironmentTest(unittest.TestCase):
         assert env.get_boolean('UNDEFINED') is False
         assert env.get_boolean('UNDEFINED') is False
 
 
     def test_env_vars_from_file_bom(self):
     def test_env_vars_from_file_bom(self):
-        tmpdir = pytest.ensuretemp('env_file')
-        self.addCleanup(tmpdir.remove)
+        tmpdir = tempfile.mkdtemp('env_file')
+        self.addCleanup(shutil.rmtree, tmpdir)
         with codecs.open('{}/bom.env'.format(str(tmpdir)), 'w', encoding='utf-8') as f:
         with codecs.open('{}/bom.env'.format(str(tmpdir)), 'w', encoding='utf-8') as f:
             f.write('\ufeffPARK_BOM=박봄\n')
             f.write('\ufeffPARK_BOM=박봄\n')
-        assert env_vars_from_file(str(tmpdir.join('bom.env'))) == {
+        assert env_vars_from_file(str(os.path.join(tmpdir, 'bom.env'))) == {
             'PARK_BOM': '박봄'
             'PARK_BOM': '박봄'
         }
         }
 
 
     def test_env_vars_from_file_whitespace(self):
     def test_env_vars_from_file_whitespace(self):
-        tmpdir = pytest.ensuretemp('env_file')
-        self.addCleanup(tmpdir.remove)
+        tmpdir = tempfile.mkdtemp('env_file')
+        self.addCleanup(shutil.rmtree, tmpdir)
         with codecs.open('{}/whitespace.env'.format(str(tmpdir)), 'w', encoding='utf-8') as f:
         with codecs.open('{}/whitespace.env'.format(str(tmpdir)), 'w', encoding='utf-8') as f:
             f.write('WHITESPACE =yes\n')
             f.write('WHITESPACE =yes\n')
         with pytest.raises(ConfigurationError) as exc:
         with pytest.raises(ConfigurationError) as exc:
-            env_vars_from_file(str(tmpdir.join('whitespace.env')))
+            env_vars_from_file(str(os.path.join(tmpdir, 'whitespace.env')))
         assert 'environment variable' in exc.exconly()
         assert 'environment variable' in exc.exconly()