Browse Source

Merge pull request #5914 from docker/bump-1.21.1

Bump 1.21.1
Joffrey F 7 years ago
parent
commit
456ff4be0f

+ 26 - 0
CHANGELOG.md

@@ -1,6 +1,32 @@
 Change log
 ==========
 
+1.21.1 (2018-04-27)
+-------------------
+
+### Bugfixes
+
+- In 1.21.0, we introduced a change to how project names are sanitized for
+  internal use in resource names. This caused issues when manipulating an
+  existing, deployed application whose name had changed as a result.
+  This release properly detects resources using "legacy" naming conventions.
+
+- Fixed an issue where specifying an in-context Dockerfile using an absolute
+  path would fail despite being valid.
+
+- Fixed a bug where IPAM option changes were incorrectly detected, preventing
+  redeployments.
+
+- Validation of v2 files now properly checks the structure of IPAM configs.
+
+- Improved support for credentials stores on Windows to include binaries using
+  extensions other than `.exe`. The list of valid extensions is determined by
+  the contents of the `PATHEXT` environment variable.
+
+- Fixed a bug where Compose would generate invalid binds containing duplicate
+  elements with some v3.2 files, triggering errors at the Engine level during
+  deployment.
+
 1.21.0 (2018-04-10)
 -------------------
 

+ 1 - 1
compose/__init__.py

@@ -1,4 +1,4 @@
 from __future__ import absolute_import
 from __future__ import unicode_literals
 
-__version__ = '1.21.0'
+__version__ = '1.21.1'

+ 20 - 1
compose/config/config_schema_v2.0.json

@@ -281,7 +281,8 @@
             "properties": {
                 "driver": {"type": "string"},
                 "config": {
-                    "type": "array"
+                    "type": "array",
+                    "items": {"$ref": "#/definitions/ipam_config"}
                 },
                 "options": {
                   "type": "object",
@@ -305,6 +306,24 @@
       "additionalProperties": false
     },
 
+    "ipam_config": {
+      "id": "#/definitions/ipam_config",
+      "type": "object",
+      "properties": {
+        "subnet": {"type": "string"},
+        "iprange": {"type": "string"},
+        "gateway": {"type": "string"},
+        "aux_addresses": {
+          "type": "object",
+          "patternProperties": {
+            "^.+$": {"type": "string"}
+          },
+          "additionalProperties": false
+        }
+      },
+      "additionalProperties": false
+    },
+
     "volume": {
       "id": "#/definitions/volume",
       "type": ["object", "null"],

+ 20 - 1
compose/config/config_schema_v2.1.json

@@ -332,7 +332,8 @@
             "properties": {
                 "driver": {"type": "string"},
                 "config": {
-                    "type": "array"
+                    "type": "array",
+                    "items": {"$ref": "#/definitions/ipam_config"}
                 },
                 "options": {
                   "type": "object",
@@ -359,6 +360,24 @@
       "additionalProperties": false
     },
 
+    "ipam_config": {
+      "id": "#/definitions/ipam_config",
+      "type": "object",
+      "properties": {
+        "subnet": {"type": "string"},
+        "iprange": {"type": "string"},
+        "gateway": {"type": "string"},
+        "aux_addresses": {
+          "type": "object",
+          "patternProperties": {
+            "^.+$": {"type": "string"}
+          },
+          "additionalProperties": false
+        }
+      },
+      "additionalProperties": false
+    },
+
     "volume": {
       "id": "#/definitions/volume",
       "type": ["object", "null"],

+ 20 - 1
compose/config/config_schema_v2.2.json

@@ -341,7 +341,8 @@
             "properties": {
                 "driver": {"type": "string"},
                 "config": {
-                    "type": "array"
+                    "type": "array",
+                    "items": {"$ref": "#/definitions/ipam_config"}
                 },
                 "options": {
                   "type": "object",
@@ -368,6 +369,24 @@
       "additionalProperties": false
     },
 
+    "ipam_config": {
+      "id": "#/definitions/ipam_config",
+      "type": "object",
+      "properties": {
+        "subnet": {"type": "string"},
+        "iprange": {"type": "string"},
+        "gateway": {"type": "string"},
+        "aux_addresses": {
+          "type": "object",
+          "patternProperties": {
+            "^.+$": {"type": "string"}
+          },
+          "additionalProperties": false
+        }
+      },
+      "additionalProperties": false
+    },
+
     "volume": {
       "id": "#/definitions/volume",
       "type": ["object", "null"],

+ 20 - 1
compose/config/config_schema_v2.3.json

@@ -385,7 +385,8 @@
             "properties": {
                 "driver": {"type": "string"},
                 "config": {
-                    "type": "array"
+                    "type": "array",
+                    "items": {"$ref": "#/definitions/ipam_config"}
                 },
                 "options": {
                   "type": "object",
@@ -412,6 +413,24 @@
       "additionalProperties": false
     },
 
+    "ipam_config": {
+      "id": "#/definitions/ipam_config",
+      "type": "object",
+      "properties": {
+        "subnet": {"type": "string"},
+        "iprange": {"type": "string"},
+        "gateway": {"type": "string"},
+        "aux_addresses": {
+          "type": "object",
+          "patternProperties": {
+            "^.+$": {"type": "string"}
+          },
+          "additionalProperties": false
+        }
+      },
+      "additionalProperties": false
+    },
+
     "volume": {
       "id": "#/definitions/volume",
       "type": ["object", "null"],

+ 20 - 1
compose/config/config_schema_v2.4.json

@@ -384,7 +384,8 @@
             "properties": {
                 "driver": {"type": "string"},
                 "config": {
-                    "type": "array"
+                    "type": "array",
+                    "items": {"$ref": "#/definitions/ipam_config"}
                 },
                 "options": {
                   "type": "object",
@@ -411,6 +412,24 @@
       "additionalProperties": false
     },
 
+    "ipam_config": {
+      "id": "#/definitions/ipam_config",
+      "type": "object",
+      "properties": {
+        "subnet": {"type": "string"},
+        "iprange": {"type": "string"},
+        "gateway": {"type": "string"},
+        "aux_addresses": {
+          "type": "object",
+          "patternProperties": {
+            "^.+$": {"type": "string"}
+          },
+          "additionalProperties": false
+        }
+      },
+      "additionalProperties": false
+    },
+
     "volume": {
       "id": "#/definitions/volume",
       "type": ["object", "null"],

+ 53 - 22
compose/network.py

@@ -2,6 +2,7 @@ from __future__ import absolute_import
 from __future__ import unicode_literals
 
 import logging
+import re
 from collections import OrderedDict
 
 from docker.errors import NotFound
@@ -10,9 +11,11 @@ from docker.types import IPAMPool
 from docker.utils import version_gte
 from docker.utils import version_lt
 
+from . import __version__
 from .config import ConfigurationError
 from .const import LABEL_NETWORK
 from .const import LABEL_PROJECT
+from .const import LABEL_VERSION
 
 
 log = logging.getLogger(__name__)
@@ -39,6 +42,7 @@ class Network(object):
         self.enable_ipv6 = enable_ipv6
         self.labels = labels
         self.custom_name = custom_name
+        self.legacy = None
 
     def ensure(self):
         if self.external:
@@ -64,8 +68,9 @@ class Network(object):
                 )
             return
 
+        self._set_legacy_flag()
         try:
-            data = self.inspect()
+            data = self.inspect(legacy=self.legacy)
             check_remote_network_config(data, self)
         except NotFound:
             driver_name = 'the default driver'
@@ -73,8 +78,7 @@ class Network(object):
                 driver_name = 'driver "{}"'.format(self.driver)
 
             log.info(
-                'Creating network "{}" with {}'
-                .format(self.full_name, driver_name)
+                'Creating network "{}" with {}'.format(self.full_name, driver_name)
             )
 
             self.client.create_network(
@@ -91,21 +95,38 @@ class Network(object):
 
     def remove(self):
         if self.external:
-            log.info("Network %s is external, skipping", self.full_name)
+            log.info("Network %s is external, skipping", self.true_name)
             return
 
-        log.info("Removing network {}".format(self.full_name))
-        self.client.remove_network(self.full_name)
+        log.info("Removing network {}".format(self.true_name))
+        self.client.remove_network(self.true_name)
 
-    def inspect(self):
+    def inspect(self, legacy=False):
+        if legacy:
+            return self.client.inspect_network(self.legacy_full_name)
         return self.client.inspect_network(self.full_name)
 
+    @property
+    def legacy_full_name(self):
+        if self.custom_name:
+            return self.name
+        return '{0}_{1}'.format(
+            re.sub(r'[_-]', '', self.project), self.name
+        )
+
     @property
     def full_name(self):
         if self.custom_name:
             return self.name
         return '{0}_{1}'.format(self.project, self.name)
 
+    @property
+    def true_name(self):
+        self._set_legacy_flag()
+        if self.legacy:
+            return self.legacy_full_name
+        return self.full_name
+
     @property
     def _labels(self):
         if version_lt(self.client._version, '1.23'):
@@ -114,9 +135,19 @@ class Network(object):
         labels.update({
             LABEL_PROJECT: self.project,
             LABEL_NETWORK: self.name,
+            LABEL_VERSION: __version__,
         })
         return labels
 
+    def _set_legacy_flag(self):
+        if self.legacy is not None:
+            return
+        try:
+            data = self.inspect(legacy=True)
+            self.legacy = data is not None
+        except NotFound:
+            self.legacy = False
+
 
 def create_ipam_config_from_dict(ipam_dict):
     if not ipam_dict:
@@ -150,49 +181,49 @@ def check_remote_ipam_config(remote, local):
     remote_ipam = remote.get('IPAM')
     ipam_dict = create_ipam_config_from_dict(local.ipam)
     if local.ipam.get('driver') and local.ipam.get('driver') != remote_ipam.get('Driver'):
-        raise NetworkConfigChangedError(local.full_name, 'IPAM driver')
+        raise NetworkConfigChangedError(local.true_name, 'IPAM driver')
     if len(ipam_dict['Config']) != 0:
         if len(ipam_dict['Config']) != len(remote_ipam['Config']):
-            raise NetworkConfigChangedError(local.full_name, 'IPAM configs')
+            raise NetworkConfigChangedError(local.true_name, 'IPAM configs')
         remote_configs = sorted(remote_ipam['Config'], key='Subnet')
         local_configs = sorted(ipam_dict['Config'], key='Subnet')
         while local_configs:
             lc = local_configs.pop()
             rc = remote_configs.pop()
             if lc.get('Subnet') != rc.get('Subnet'):
-                raise NetworkConfigChangedError(local.full_name, 'IPAM config subnet')
+                raise NetworkConfigChangedError(local.true_name, 'IPAM config subnet')
             if lc.get('Gateway') is not None and lc.get('Gateway') != rc.get('Gateway'):
-                raise NetworkConfigChangedError(local.full_name, 'IPAM config gateway')
+                raise NetworkConfigChangedError(local.true_name, 'IPAM config gateway')
             if lc.get('IPRange') != rc.get('IPRange'):
-                raise NetworkConfigChangedError(local.full_name, 'IPAM config ip_range')
+                raise NetworkConfigChangedError(local.true_name, 'IPAM config ip_range')
             if sorted(lc.get('AuxiliaryAddresses')) != sorted(rc.get('AuxiliaryAddresses')):
-                raise NetworkConfigChangedError(local.full_name, 'IPAM config aux_addresses')
+                raise NetworkConfigChangedError(local.true_name, 'IPAM config aux_addresses')
 
     remote_opts = remote_ipam.get('Options') or {}
-    local_opts = local.ipam.get('options') or {}
+    local_opts = local.ipam.get('Options') or {}
     for k in set.union(set(remote_opts.keys()), set(local_opts.keys())):
         if remote_opts.get(k) != local_opts.get(k):
-            raise NetworkConfigChangedError(local.full_name, 'IPAM option "{}"'.format(k))
+            raise NetworkConfigChangedError(local.true_name, 'IPAM option "{}"'.format(k))
 
 
 def check_remote_network_config(remote, local):
     if local.driver and remote.get('Driver') != local.driver:
-        raise NetworkConfigChangedError(local.full_name, 'driver')
+        raise NetworkConfigChangedError(local.true_name, 'driver')
     local_opts = local.driver_opts or {}
     remote_opts = remote.get('Options') or {}
     for k in set.union(set(remote_opts.keys()), set(local_opts.keys())):
         if k in OPTS_EXCEPTIONS:
             continue
         if remote_opts.get(k) != local_opts.get(k):
-            raise NetworkConfigChangedError(local.full_name, 'option "{}"'.format(k))
+            raise NetworkConfigChangedError(local.true_name, 'option "{}"'.format(k))
 
     if local.ipam is not None:
         check_remote_ipam_config(remote, local)
 
     if local.internal is not None and local.internal != remote.get('Internal', False):
-        raise NetworkConfigChangedError(local.full_name, 'internal')
+        raise NetworkConfigChangedError(local.true_name, 'internal')
     if local.enable_ipv6 is not None and local.enable_ipv6 != remote.get('EnableIPv6', False):
-        raise NetworkConfigChangedError(local.full_name, 'enable_ipv6')
+        raise NetworkConfigChangedError(local.true_name, 'enable_ipv6')
 
     local_labels = local.labels or {}
     remote_labels = remote.get('Labels', {})
@@ -202,7 +233,7 @@ def check_remote_network_config(remote, local):
         if remote_labels.get(k) != local_labels.get(k):
             log.warn(
                 'Network {}: label "{}" has changed. It may need to be'
-                ' recreated.'.format(local.full_name, k)
+                ' recreated.'.format(local.true_name, k)
             )
 
 
@@ -257,7 +288,7 @@ class ProjectNetworks(object):
             try:
                 network.remove()
             except NotFound:
-                log.warn("Network %s not found.", network.full_name)
+                log.warn("Network %s not found.", network.true_name)
 
     def initialize(self):
         if not self.use_networking:
@@ -286,7 +317,7 @@ def get_networks(service_dict, network_definitions):
     for name, netdef in get_network_defs_for_service(service_dict).items():
         network = network_definitions.get(name)
         if network:
-            networks[network.full_name] = netdef
+            networks[network.true_name] = netdef
         else:
             raise ConfigurationError(
                 'Service "{}" uses an undefined network "{}"'

+ 35 - 8
compose/service.py

@@ -51,6 +51,7 @@ from .progress_stream import StreamOutputError
 from .utils import json_hash
 from .utils import parse_bytes
 from .utils import parse_seconds_float
+from .version import ComposeVersion
 
 
 log = logging.getLogger(__name__)
@@ -192,11 +193,25 @@ class Service(object):
     def containers(self, stopped=False, one_off=False, filters={}):
         filters.update({'label': self.labels(one_off=one_off)})
 
-        return list(filter(None, [
+        result = list(filter(None, [
             Container.from_ps(self.client, container)
             for container in self.client.containers(
                 all=stopped,
-                filters=filters)]))
+                filters=filters)])
+        )
+        if result:
+            return result
+
+        filters.update({'label': self.labels(one_off=one_off, legacy=True)})
+        return list(
+            filter(
+                self.has_legacy_proj_name, filter(None, [
+                    Container.from_ps(self.client, container)
+                    for container in self.client.containers(
+                        all=stopped,
+                        filters=filters)])
+            )
+        )
 
     def get_container(self, number=1):
         """Return a :class:`compose.container.Container` for this service. The
@@ -380,6 +395,10 @@ class Service(object):
         has_diverged = False
 
         for c in containers:
+            if self.has_legacy_proj_name(c):
+                log.debug('%s has diverged: Legacy project name' % c.name)
+                has_diverged = True
+                continue
             container_config_hash = c.labels.get(LABEL_CONFIG_HASH, None)
             if container_config_hash != config_hash:
                 log.debug(
@@ -858,7 +877,6 @@ class Service(object):
             container_volumes, self.options.get('tmpfs') or [], previous_container,
             container_mounts
         )
-        override_options['binds'] = binds
         container_options['environment'].update(affinity)
 
         container_options['volumes'] = dict((v.internal, {}) for v in container_volumes or {})
@@ -871,13 +889,13 @@ class Service(object):
                 if m.is_tmpfs:
                     override_options['tmpfs'].append(m.target)
                 else:
-                    override_options['binds'].append(m.legacy_repr())
+                    binds.append(m.legacy_repr())
                     container_options['volumes'][m.target] = {}
 
         secret_volumes = self.get_secret_volumes()
         if secret_volumes:
             if version_lt(self.client.api_version, '1.30'):
-                override_options['binds'].extend(v.legacy_repr() for v in secret_volumes)
+                binds.extend(v.legacy_repr() for v in secret_volumes)
                 container_options['volumes'].update(
                     (v.target, {}) for v in secret_volumes
                 )
@@ -885,6 +903,8 @@ class Service(object):
                 override_options['mounts'] = override_options.get('mounts') or []
                 override_options['mounts'].extend([build_mount(v) for v in secret_volumes])
 
+        # Remove possible duplicates (see e.g. https://github.com/docker/compose/issues/5885)
+        override_options['binds'] = list(set(binds))
         return container_options, override_options
 
     def _get_container_host_config(self, override_options, one_off=False):
@@ -1053,11 +1073,12 @@ class Service(object):
     def can_be_built(self):
         return 'build' in self.options
 
-    def labels(self, one_off=False):
+    def labels(self, one_off=False, legacy=False):
+        proj_name = self.project if not legacy else re.sub(r'[_-]', '', self.project)
         return [
-            '{0}={1}'.format(LABEL_PROJECT, self.project),
+            '{0}={1}'.format(LABEL_PROJECT, proj_name),
             '{0}={1}'.format(LABEL_SERVICE, self.name),
-            '{0}={1}'.format(LABEL_ONE_OFF, "True" if one_off else "False")
+            '{0}={1}'.format(LABEL_ONE_OFF, "True" if one_off else "False"),
         ]
 
     @property
@@ -1214,6 +1235,12 @@ class Service(object):
 
         return result
 
+    def has_legacy_proj_name(self, ctnr):
+        return (
+            ComposeVersion(ctnr.labels.get(LABEL_VERSION)) < ComposeVersion('1.21.0') and
+            ctnr.project != self.project
+        )
+
 
 def short_id_alias_exists(container, network):
     aliases = container.get(

+ 43 - 10
compose/volume.py

@@ -2,15 +2,19 @@ from __future__ import absolute_import
 from __future__ import unicode_literals
 
 import logging
+import re
 
 from docker.errors import NotFound
 from docker.utils import version_lt
 
+from . import __version__
 from .config import ConfigurationError
 from .config.types import VolumeSpec
 from .const import LABEL_PROJECT
+from .const import LABEL_VERSION
 from .const import LABEL_VOLUME
 
+
 log = logging.getLogger(__name__)
 
 
@@ -25,6 +29,7 @@ class Volume(object):
         self.external = external
         self.labels = labels
         self.custom_name = custom_name
+        self.legacy = None
 
     def create(self):
         return self.client.create_volume(
@@ -33,17 +38,20 @@ class Volume(object):
 
     def remove(self):
         if self.external:
-            log.info("Volume %s is external, skipping", self.full_name)
+            log.info("Volume %s is external, skipping", self.true_name)
             return
-        log.info("Removing volume %s", self.full_name)
-        return self.client.remove_volume(self.full_name)
+        log.info("Removing volume %s", self.true_name)
+        return self.client.remove_volume(self.true_name)
 
-    def inspect(self):
+    def inspect(self, legacy=None):
+        if legacy:
+            return self.client.inspect_volume(self.legacy_full_name)
         return self.client.inspect_volume(self.full_name)
 
     def exists(self):
+        self._set_legacy_flag()
         try:
-            self.inspect()
+            self.inspect(legacy=self.legacy)
         except NotFound:
             return False
         return True
@@ -54,6 +62,21 @@ class Volume(object):
             return self.name
         return '{0}_{1}'.format(self.project, self.name)
 
+    @property
+    def legacy_full_name(self):
+        if self.custom_name:
+            return self.name
+        return '{0}_{1}'.format(
+            re.sub(r'[_-]', '', self.project), self.name
+        )
+
+    @property
+    def true_name(self):
+        self._set_legacy_flag()
+        if self.legacy:
+            return self.legacy_full_name
+        return self.full_name
+
     @property
     def _labels(self):
         if version_lt(self.client._version, '1.23'):
@@ -62,9 +85,19 @@ class Volume(object):
         labels.update({
             LABEL_PROJECT: self.project,
             LABEL_VOLUME: self.name,
+            LABEL_VERSION: __version__,
         })
         return labels
 
+    def _set_legacy_flag(self):
+        if self.legacy is not None:
+            return
+        try:
+            data = self.inspect(legacy=True)
+            self.legacy = data is not None
+        except NotFound:
+            self.legacy = False
+
 
 class ProjectVolumes(object):
 
@@ -94,7 +127,7 @@ class ProjectVolumes(object):
             try:
                 volume.remove()
             except NotFound:
-                log.warn("Volume %s not found.", volume.full_name)
+                log.warn("Volume %s not found.", volume.true_name)
 
     def initialize(self):
         try:
@@ -124,7 +157,7 @@ class ProjectVolumes(object):
                     )
                     volume.create()
                 else:
-                    check_remote_volume_config(volume.inspect(), volume)
+                    check_remote_volume_config(volume.inspect(legacy=volume.legacy), volume)
         except NotFound:
             raise ConfigurationError(
                 'Volume %s specifies nonexistent driver %s' % (volume.name, volume.driver)
@@ -136,9 +169,9 @@ class ProjectVolumes(object):
 
         if isinstance(volume_spec, VolumeSpec):
             volume = self.volumes[volume_spec.external]
-            return volume_spec._replace(external=volume.full_name)
+            return volume_spec._replace(external=volume.true_name)
         else:
-            volume_spec.source = self.volumes[volume_spec.source].full_name
+            volume_spec.source = self.volumes[volume_spec.source].true_name
             return volume_spec
 
 
@@ -152,7 +185,7 @@ class VolumeConfigChangedError(ConfigurationError):
             'first:\n$ docker volume rm {full_name}'.format(
                 vol_name=local.name, property_name=property_name,
                 local_value=local_value, remote_value=remote_value,
-                full_name=local.full_name
+                full_name=local.true_name
             )
         )
 

+ 40 - 57
contrib/completion/bash/docker-compose

@@ -81,41 +81,24 @@ __docker_compose_nospace() {
 	type compopt &>/dev/null && compopt -o nospace
 }
 
-# Extracts all service names from the compose file.
-___docker_compose_all_services_in_compose_file() {
-	__docker_compose_q config --services
-}
-
-# All services, even those without an existing container
-__docker_compose_services_all() {
-	COMPREPLY=( $(compgen -W "$(___docker_compose_all_services_in_compose_file)" -- "$cur") )
-}
 
-# All services that are defined by a Dockerfile reference
-__docker_compose_services_from_build() {
-	COMPREPLY=( $(compgen -W "$(__docker_compose_q ps --services --filter "source=build")" -- "$cur") )
+# Outputs a list of all defined services, regardless of their running state.
+# Arguments for `docker-compose ps` may be passed in order to filter the service list,
+# e.g. `status=running`.
+__docker_compose_services() {
+	__docker_compose_q ps --services "$@"
 }
 
-# All services that are defined by an image
-__docker_compose_services_from_image() {
-	COMPREPLY=( $(compgen -W "$(__docker_compose_q ps --services --filter "source=image")" -- "$cur") )
-}
-
-# The services for which at least one paused container exists
-__docker_compose_services_paused() {
-	names=$(__docker_compose_q ps --services --filter "status=paused")
-	COMPREPLY=( $(compgen -W "$names" -- "$cur") )
+# Applies completion of services based on the current value of `$cur`.
+# Arguments for `docker-compose ps` may be passed in order to filter the service list,
+# see `__docker_compose_services`.
+__docker_compose_complete_services() {
+	COMPREPLY=( $(compgen -W "$(__docker_compose_services "$@")" -- "$cur") )
 }
 
 # The services for which at least one running container exists
-__docker_compose_services_running() {
-	names=$(__docker_compose_q ps --services --filter "status=running")
-	COMPREPLY=( $(compgen -W "$names" -- "$cur") )
-}
-
-# The services for which at least one stopped container exists
-__docker_compose_services_stopped() {
-	names=$(__docker_compose_q ps --services --filter "status=stopped")
+__docker_compose_complete_running_services() {
+	local names=$(__docker_compose_complete_services --filter status=running)
 	COMPREPLY=( $(compgen -W "$names" -- "$cur") )
 }
 
@@ -131,10 +114,10 @@ _docker_compose_build() {
 
 	case "$cur" in
 		-*)
-			COMPREPLY=( $( compgen -W "--build-arg --force-rm --help --memory --no-cache --pull" -- "$cur" ) )
+			COMPREPLY=( $( compgen -W "--build-arg --compress --force-rm --help --memory --no-cache --pull" -- "$cur" ) )
 			;;
 		*)
-			__docker_compose_services_from_build
+			__docker_compose_complete_services --filter source=build
 			;;
 	esac
 }
@@ -163,7 +146,7 @@ _docker_compose_create() {
 			COMPREPLY=( $( compgen -W "--build --force-recreate --help --no-build --no-recreate" -- "$cur" ) )
 			;;
 		*)
-			__docker_compose_services_all
+			__docker_compose_complete_services
 			;;
 	esac
 }
@@ -234,7 +217,7 @@ _docker_compose_events() {
 			COMPREPLY=( $( compgen -W "--help --json" -- "$cur" ) )
 			;;
 		*)
-			__docker_compose_services_all
+			__docker_compose_complete_services
 			;;
 	esac
 }
@@ -242,17 +225,17 @@ _docker_compose_events() {
 
 _docker_compose_exec() {
 	case "$prev" in
-		--index|--user|-u)
+		--index|--user|-u|--workdir|-w)
 			return
 			;;
 	esac
 
 	case "$cur" in
 		-*)
-			COMPREPLY=( $( compgen -W "-d --detach --help --index --privileged -T --user -u" -- "$cur" ) )
+			COMPREPLY=( $( compgen -W "-d --detach --help --index --privileged -T --user -u --workdir -w" -- "$cur" ) )
 			;;
 		*)
-			__docker_compose_services_running
+			__docker_compose_complete_running_services
 			;;
 	esac
 }
@@ -268,7 +251,7 @@ _docker_compose_images() {
 			COMPREPLY=( $( compgen -W "--help --quiet -q" -- "$cur" ) )
 			;;
 		*)
-			__docker_compose_services_all
+			__docker_compose_complete_services
 			;;
 	esac
 }
@@ -286,7 +269,7 @@ _docker_compose_kill() {
 			COMPREPLY=( $( compgen -W "--help -s" -- "$cur" ) )
 			;;
 		*)
-			__docker_compose_services_running
+			__docker_compose_complete_running_services
 			;;
 	esac
 }
@@ -304,7 +287,7 @@ _docker_compose_logs() {
 			COMPREPLY=( $( compgen -W "--follow -f --help --no-color --tail --timestamps -t" -- "$cur" ) )
 			;;
 		*)
-			__docker_compose_services_all
+			__docker_compose_complete_services
 			;;
 	esac
 }
@@ -316,7 +299,7 @@ _docker_compose_pause() {
 			COMPREPLY=( $( compgen -W "--help" -- "$cur" ) )
 			;;
 		*)
-			__docker_compose_services_running
+			__docker_compose_complete_running_services
 			;;
 	esac
 }
@@ -338,7 +321,7 @@ _docker_compose_port() {
 			COMPREPLY=( $( compgen -W "--help --index --protocol" -- "$cur" ) )
 			;;
 		*)
-			__docker_compose_services_all
+			__docker_compose_complete_services
 			;;
 	esac
 }
@@ -370,7 +353,7 @@ _docker_compose_ps() {
 			COMPREPLY=( $( compgen -W "--help --quiet -q --services --filter" -- "$cur" ) )
 			;;
 		*)
-			__docker_compose_services_all
+			__docker_compose_complete_services
 			;;
 	esac
 }
@@ -379,10 +362,10 @@ _docker_compose_ps() {
 _docker_compose_pull() {
 	case "$cur" in
 		-*)
-			COMPREPLY=( $( compgen -W "--help --ignore-pull-failures --include-deps --parallel --quiet -q" -- "$cur" ) )
+			COMPREPLY=( $( compgen -W "--help --ignore-pull-failures --include-deps --no-parallel --quiet -q" -- "$cur" ) )
 			;;
 		*)
-			__docker_compose_services_from_image
+			__docker_compose_complete_services --filter source=image
 			;;
 	esac
 }
@@ -394,7 +377,7 @@ _docker_compose_push() {
 			COMPREPLY=( $( compgen -W "--help --ignore-push-failures" -- "$cur" ) )
 			;;
 		*)
-			__docker_compose_services_all
+			__docker_compose_complete_services
 			;;
 	esac
 }
@@ -412,7 +395,7 @@ _docker_compose_restart() {
 			COMPREPLY=( $( compgen -W "--help --timeout -t" -- "$cur" ) )
 			;;
 		*)
-			__docker_compose_services_running
+			__docker_compose_complete_running_services
 			;;
 	esac
 }
@@ -425,9 +408,9 @@ _docker_compose_rm() {
 			;;
 		*)
 			if __docker_compose_has_option "--stop|-s" ; then
-				__docker_compose_services_all
+				__docker_compose_complete_services
 			else
-				__docker_compose_services_stopped
+				__docker_compose_complete_services --filter status=stopped
 			fi
 			;;
 	esac
@@ -451,7 +434,7 @@ _docker_compose_run() {
 			COMPREPLY=( $( compgen -W "--detach -d --entrypoint -e --help --label -l --name --no-deps --publish -p --rm --service-ports -T --use-aliases --user -u --volume -v --workdir -w" -- "$cur" ) )
 			;;
 		*)
-			__docker_compose_services_all
+			__docker_compose_complete_services
 			;;
 	esac
 }
@@ -473,7 +456,7 @@ _docker_compose_scale() {
 			COMPREPLY=( $( compgen -W "--help --timeout -t" -- "$cur" ) )
 			;;
 		*)
-			COMPREPLY=( $(compgen -S "=" -W "$(___docker_compose_all_services_in_compose_file)" -- "$cur") )
+			COMPREPLY=( $(compgen -S "=" -W "$(__docker_compose_services)" -- "$cur") )
 			__docker_compose_nospace
 			;;
 	esac
@@ -486,7 +469,7 @@ _docker_compose_start() {
 			COMPREPLY=( $( compgen -W "--help" -- "$cur" ) )
 			;;
 		*)
-			__docker_compose_services_stopped
+			__docker_compose_complete_services --filter status=stopped
 			;;
 	esac
 }
@@ -504,7 +487,7 @@ _docker_compose_stop() {
 			COMPREPLY=( $( compgen -W "--help --timeout -t" -- "$cur" ) )
 			;;
 		*)
-			__docker_compose_services_running
+			__docker_compose_complete_running_services
 			;;
 	esac
 }
@@ -516,7 +499,7 @@ _docker_compose_top() {
 			COMPREPLY=( $( compgen -W "--help" -- "$cur" ) )
 			;;
 		*)
-			__docker_compose_services_running
+			__docker_compose_complete_running_services
 			;;
 	esac
 }
@@ -528,7 +511,7 @@ _docker_compose_unpause() {
 			COMPREPLY=( $( compgen -W "--help" -- "$cur" ) )
 			;;
 		*)
-			__docker_compose_services_paused
+			__docker_compose_complete_services --filter status=paused
 			;;
 	esac
 }
@@ -541,11 +524,11 @@ _docker_compose_up() {
 			return
 			;;
 		--exit-code-from)
-			__docker_compose_services_all
+			__docker_compose_complete_services
 			return
 			;;
 		--scale)
-			COMPREPLY=( $(compgen -S "=" -W "$(___docker_compose_all_services_in_compose_file)" -- "$cur") )
+			COMPREPLY=( $(compgen -S "=" -W "$(__docker_compose_services)" -- "$cur") )
 			__docker_compose_nospace
 			return
 			;;
@@ -559,7 +542,7 @@ _docker_compose_up() {
 			COMPREPLY=( $( compgen -W "--abort-on-container-exit --always-recreate-deps --build -d --detach --exit-code-from --force-recreate --help --no-build --no-color --no-deps --no-recreate --no-start --renew-anon-volumes -V --remove-orphans --scale --timeout -t" -- "$cur" ) )
 			;;
 		*)
-			__docker_compose_services_all
+			__docker_compose_complete_services
 			;;
 	esac
 }

+ 0 - 148
project/RELEASE-PROCESS.md

@@ -1,148 +0,0 @@
-Building a Compose release
-==========================
-
-## Prerequisites
-
-The release scripts require the following tools installed on the host:
-
-* https://hub.github.com/
-* https://stedolan.github.io/jq/
-* http://pandoc.org/
-
-## To get started with a new release
-
-Create a branch, update version, and add release notes by running `make-branch`
-
-        ./script/release/make-branch $VERSION [$BASE_VERSION]
-
-`$BASE_VERSION` will default to master. Use the last version tag for a bug fix
-release.
-
-As part of this script you'll be asked to:
-
-1.  Update the version in `compose/__init__.py` and `script/run/run.sh`.
-
-    If the next release will be an RC, append `-rcN`, e.g. `1.4.0-rc1`.
-
-2.  Write release notes in `CHANGELOG.md`.
-
-    Almost every feature enhancement should be mentioned, with the most
-    visible/exciting ones first. Use descriptive sentences and give context
-    where appropriate.
-
-    Bug fixes are worth mentioning if it's likely that they've affected lots
-    of people, or if they were regressions in the previous version.
-
-    Improvements to the code are not worth mentioning.
-
-3.  Create a new repository on [bintray](https://bintray.com/docker-compose).
-    The name has to match the name of the branch (e.g. `bump-1.9.0`) and the
-    type should be "Generic". Other fields can be left blank.
-
-4.  Check that the `vnext-compose` branch on
-    [the docs repo](https://github.com/docker/docker.github.io/) has
-    documentation for all the new additions in the upcoming release, and create
-    a PR there for what needs to be amended.
-
-
-## When a PR is merged into master that we want in the release
-
-1. Check out the bump branch and run the cherry pick script
-
-        git checkout bump-$VERSION
-        ./script/release/cherry-pick-pr $PR_NUMBER
-
-2. When you are done cherry-picking branches move the bump version commit to HEAD
-
-        ./script/release/rebase-bump-commit
-        git push --force $USERNAME bump-$VERSION
-
-
-## To release a version (whether RC or stable)
-
-Check out the bump branch and run the `build-binaries` script
-
-        git checkout bump-$VERSION
-        ./script/release/build-binaries
-
-When prompted build the non-linux binaries and test them.
-
-1.  Download the different platform binaries by running the following script:
-
-    `./script/release/download-binaries $VERSION`
-
-    The binaries for Linux, OSX and Windows will be downloaded in the `binaries-$VERSION` folder.
-
-3.  Draft a release from the tag on GitHub (the `build-binaries` script will open the window for
-    you)
-
-    The tag will only be present on Github when you run the `push-release`
-    script in step 7, but you can pre-fill it at that point.
-
-4.  Paste in installation instructions and release notes. Here's an example -
-    change the Compose version and Docker version as appropriate:
-
-        If you're a Mac or Windows user, the best way to install Compose and keep it up-to-date is **[Docker for Mac and Windows](https://www.docker.com/products/docker)**.
-
-        Docker for Mac and Windows will automatically install the latest version of Docker Engine for you.
-
-        Alternatively, you can use the usual commands to install or upgrade Compose:
-
-        ```
-        curl -L https://github.com/docker/compose/releases/download/1.16.0/docker-compose-`uname -s`-`uname -m` -o /usr/local/bin/docker-compose
-        chmod +x /usr/local/bin/docker-compose
-        ```
-
-        See the [install docs](https://docs.docker.com/compose/install/) for more install options and instructions.
-
-        ## Compose file format compatibility matrix
-
-        | Compose file format | Docker Engine |
-        | --- | --- |
-        | 3.3 | 17.06.0+ |
-        | 3.0 &ndash; 3.2 | 1.13.0+ |
-        | 2.3| 17.06.0+ |
-        | 2.2 | 1.13.0+ |
-        | 2.1 | 1.12.0+ |
-        | 2.0 | 1.10.0+ |
-        | 1.0 | 1.9.1+ |
-
-        ## Changes
-
-        ...release notes go here...
-
-5.  Attach the binaries and `script/run/run.sh`
-
-6.  Add "Thanks" with a list of contributors. The contributor list can be generated
-    by running `./script/release/contributors`.
-
-7.  If everything looks good, it's time to push the release.
-
-
-        ./script/release/push-release
-
-
-8.  Merge the bump PR.
-
-8.  Publish the release on GitHub.
-
-9.  Check that all the binaries download (following the install instructions) and run.
-
-10. Announce the release on the appropriate Slack channel(s).
-
-## If it’s a stable release (not an RC)
-
-1. Close the release’s milestone.
-
-## If it’s a minor release (1.x.0), rather than a patch release (1.x.y)
-
-1. Open a PR against `master` to:
-
-    - update `CHANGELOG.md` to bring it in line with `release`
-    - bump the version in `compose/__init__.py` to the *next* minor version number with `dev` appended. For example, if you just released `1.4.0`, update it to `1.5.0dev`.
-
-2. Get the PR merged.
-
-## Finally
-
-1. Celebrate, however you’d like.

+ 1 - 0
project/RELEASE-PROCESS.md

@@ -0,0 +1 @@
+../script/release/README.md

+ 2 - 2
requirements.txt

@@ -2,8 +2,8 @@ backports.ssl-match-hostname==3.5.0.1; python_version < '3'
 cached-property==1.3.0
 certifi==2017.4.17
 chardet==3.0.4
-docker==3.2.1
-docker-pycreds==0.2.1
+docker==3.3.0
+docker-pycreds==0.2.3
 dockerpty==0.4.1
 docopt==0.6.2
 enum34==1.1.6; python_version < '3.4'

+ 14 - 0
script/release/Dockerfile

@@ -0,0 +1,14 @@
+FROM python:3.6
+RUN mkdir -p /src && pip install -U Jinja2==2.10 \
+    PyGithub==1.39 \
+    pypandoc==1.4 \
+    GitPython==2.1.9 \
+    requests==2.18.4 && \
+    apt-get update && apt-get install -y pandoc
+
+VOLUME /src/script/release
+WORKDIR /src
+COPY . /src
+RUN python setup.py develop
+ENTRYPOINT ["python", "script/release/release.py"]
+CMD ["--help"]

+ 184 - 0
script/release/README.md

@@ -0,0 +1,184 @@
+# Release HOWTO
+
+This file describes the process of making a public release of `docker-compose`.
+Please read it carefully before proceeding!
+
+## Prerequisites
+
+The following things are required to bring a release to a successful conclusion
+
+### Local Docker engine (Linux Containers)
+
+The release script runs inside a container and builds images that will be part
+of the release.
+
+### Docker Hub account
+
+You should be logged into a Docker Hub account that allows pushing to the
+following repositories:
+
+- docker/compose
+- docker/compose-tests
+
+### A Github account and Github API token
+
+Your Github account needs to have write access on the `docker/compose` repo.
+To generate a Github token, head over to the
+[Personal access tokens](https://github.com/settings/tokens) page in your
+Github settings and select "Generate new token". Your token should include
+(at minimum) the following scopes:
+
+- `repo:status`
+- `public_repo`
+
+This API token should be exposed to the release script through the
+`GITHUB_TOKEN` environment variable.
+
+### A Bintray account and Bintray API key
+
+Your Bintray account will need to be an admin member of the
+[docker-compose organization](https://github.com/settings/tokens).
+Additionally, you should generate a personal API key. To do so, click your
+username in the top-right hand corner and select "Edit profile" ; on the new
+page, select "API key" in the left-side menu.
+
+This API key should be exposed to the release script through the
+`BINTRAY_TOKEN` environment variable.
+
+### A PyPi account
+
+Said account needs to be a member of the maintainers group for the
+[`docker-compose` project](https://pypi.org/project/docker-compose/).
+
+Moreover, the `~/.pypirc` file should exist on your host and contain the
+relevant pypi credentials.
+
+## Start a feature release
+
+A feature release is a release that includes all changes present in the
+`master` branch when initiated. It's typically versioned `X.Y.0-rc1`, where
+Y is the minor version of the previous release incremented by one. A series
+of one or more Release Candidates (RCs) should be made available to the public
+to find and squash potential bugs.
+
+From the root of the Compose repository, run the following command:
+```
+./script/release/release.sh -b <BINTRAY_USERNAME> start X.Y.0-rc1
+```
+
+After a short initialization period, the script will invite you to edit the
+`CHANGELOG.md` file. Do so by being careful to respect the same format as
+previous releases. Once done, the script will display a `diff` of the staged
+changes for the bump commit. Once you validate these, a bump commit will be
+created on the newly created release branch and pushed remotely.
+
+The release tool then waits for the CI to conclude before proceeding.
+If failures are reported, the release will be aborted until these are fixed.
+Please refer to the "Resume a draft release" section below for more details.
+
+Once all resources have been prepared, the release script will exit with a
+message resembling this one:
+
+```
+You're almost done! Please verify that everything is in order and you are ready
+to make the release public, then run the following command:
+./script/release/release.sh -b user finalize X.Y.0-rc1
+```
+
+Once you are ready to finalize the release (making binaries and other versioned
+assets public), proceed to the "Finalize a release" section of this guide.
+
+## Start a patch release
+
+A patch release is a release that builds off a previous release with discrete
+additions. This can be an RC release after RC1 (`X.Y.0-rcZ`, `Z > 1`), a GA release
+based off the final RC (`X.Y.0`), or a bugfix release based off a previous
+GA release (`X.Y.Z`, `Z > 0`).
+
+From the root of the Compose repository, run the following command:
+```
+./script/release/release.sh -b <BINTRAY_USERNAME> start --patch=BASE_VERSION RELEASE_VERSION
+```
+
+The process of starting a patch release is identical to starting a feature
+release except for one difference ; at the beginning, the script will ask for
+PR numbers you wish to cherry-pick into the release. These numbers should
+correspond to existing PRs on the docker/compose repository. Multiple numbers
+should be separated by whitespace.
+
+Once you are ready to finalize the release (making binaries and other versioned
+assets public), proceed to the "Finalize a release" section of this guide.
+
+## Finalize a release
+
+Once you're ready to make your release public, you may execute the following
+command from the root of the Compose repository:
+```
+./script/release/release.sh -b <BINTRAY_USERNAME> finalize RELEAE_VERSION
+```
+
+Note that this command will create and publish versioned assets to the public.
+As a result, it can not be reverted. The command will perform some basic
+sanity checks before doing so, but it is your responsibility to ensure
+everything is in order before pushing the button.
+
+After the command exits, you should make sure:
+
+- The `docker/compose:VERSION` image is available on Docker Hub and functional
+- The `pip install -U docker-compose==VERSION` command correctly installs the
+  specified version
+- The install command on the Github release page installs the new release
+
+## Resume a draft release
+
+"Resuming" a release lets you address the following situations occurring before
+a release is made final:
+
+- Cherry-pick additional PRs to include in the release
+- Resume a release that was aborted because of CI failures after they've been
+  addressed
+- Rebuild / redownload assets after manual changes have been made to the
+  release branch
+- etc.
+
+From the root of the Compose repository, run the following command:
+```
+./script/release/release.sh -b <BINTRAY_USERNAME> resume RELEASE_VERSION
+```
+
+The release tool will attempt to determine what steps it's already been through
+for the specified release and pick up where it left off. Some steps are
+executed again no matter what as it's assumed they'll produce different
+results, like building images or downloading binaries.
+
+## Cancel a draft release
+
+If issues snuck into your release branch, it is sometimes easier to start from
+scratch. Before a release has been finalized, it is possible to cancel it using
+the following command:
+```
+./script/release/release.sh -b <BINTRAY_USERNAME> cancel RELEASE_VERSION
+```
+
+This will remove the release branch with this release (locally and remotely),
+close the associated PR, remove the release page draft on Github and delete
+the Bintray repository for it, allowing you to start fresh.
+
+## Manual operations
+
+Some common, release-related operations are not covered by this tool and should
+be handled manually by the operator:
+
+- After any release:
+    - Announce new release on Slack
+- After a GA release:
+    - Close the release milestone
+    - Merge back `CHANGELOG.md` changes from the `release` branch into `master`
+    - Bump the version in `compose/__init__.py` to the *next* minor version
+      number with `dev` appended. For example, if you just released `1.4.0`,
+      update it to `1.5.0dev`
+
+## Advanced options
+
+You can consult the full list of options for the release tool by executing
+`./script/release/release.sh --help`.

+ 0 - 40
script/release/build-binaries

@@ -1,40 +0,0 @@
-#!/bin/bash
-#
-# Build the release binaries
-#
-
-. "$(dirname "${BASH_SOURCE[0]}")/utils.sh"
-
-function usage() {
-    >&2 cat << EOM
-Build binaries for the release.
-
-This script requires that 'git config branch.${BRANCH}.release' is set to the
-release version for the release branch.
-
-EOM
-    exit 1
-}
-
-BRANCH="$(git rev-parse --abbrev-ref HEAD)"
-VERSION="$(git config "branch.${BRANCH}.release")" || usage
-REPO=docker/compose
-
-# Build the binaries
-script/clean
-script/build/linux
-
-echo "Building the container distribution"
-script/build/image $VERSION
-
-echo "Building the compose-tests image"
-script/build/test-image $VERSION
-
-echo "Create a github release"
-# TODO: script more of this https://developer.github.com/v3/repos/releases/
-browser https://github.com/$REPO/releases/new
-
-echo "Don't forget to download the osx and windows binaries from appveyor/bintray\!"
-echo "https://dl.bintray.com/docker-compose/$BRANCH/"
-echo "https://ci.appveyor.com/project/docker/compose"
-echo

+ 0 - 30
script/release/contributors

@@ -1,30 +0,0 @@
-#!/bin/bash
-set -e
-
-
-function usage() {
-    >&2 cat << EOM
-Print the list of github contributors for the release
-
-Usage:
-
-    $0 <previous release tag>
-EOM
-    exit 1
-}
-
-[[ -n "$1" ]] || usage
-PREV_RELEASE=$1
-BRANCH="$(git rev-parse --abbrev-ref HEAD)"
-URL="https://api.github.com/repos/docker/compose/compare"
-
-contribs=$(curl -sf "$URL/$PREV_RELEASE...$BRANCH" | \
-    jq -r '.commits[].author.login' | \
-    sort | \
-    uniq -c | \
-    sort -nr)
-
-echo "Contributions by user: "
-echo "$contribs"
-echo
-echo "$contribs" | awk '{print "@"$2","}' | xargs

+ 0 - 39
script/release/download-binaries

@@ -1,39 +0,0 @@
-#!/bin/bash
-
-function usage() {
-    >&2 cat << EOM
-Download Linux, Mac OS and Windows binaries from remote endpoints
-
-Usage:
-
-    $0 <version>
-
-Options:
-
-    version        version string for the release (ex: 1.6.0)
-
-EOM
-    exit 1
-}
-
-
-[ -n "$1" ] || usage
-VERSION=$1
-BASE_BINTRAY_URL=https://dl.bintray.com/docker-compose/bump-$VERSION/
-DESTINATION=binaries-$VERSION
-APPVEYOR_URL=https://ci.appveyor.com/api/projects/docker/compose/\
-artifacts/dist%2Fdocker-compose-Windows-x86_64.exe?branch=bump-$VERSION
-
-mkdir $DESTINATION
-
-
-wget -O $DESTINATION/docker-compose-Darwin-x86_64 $BASE_BINTRAY_URL/docker-compose-Darwin-x86_64
-wget -O $DESTINATION/docker-compose-Linux-x86_64 $BASE_BINTRAY_URL/docker-compose-Linux-x86_64
-wget -O $DESTINATION/docker-compose-Windows-x86_64.exe $APPVEYOR_URL
-
-echo -e "\n\nCopy the following lines into the integrity check table in the release notes:\n\n"
-cd $DESTINATION
-rm -rf *.sha256
-ls | xargs sha256sum | sed 's/  / | /g' | sed -r 's/([^ |]+)/`\1`/g'
-ls | xargs -I@ bash -c "sha256sum @ | cut -d' ' -f1 > @.sha256"
-cd -

+ 0 - 86
script/release/make-branch

@@ -1,86 +0,0 @@
-#!/bin/bash
-#
-# Prepare a new release branch
-#
-
-. "$(dirname "${BASH_SOURCE[0]}")/utils.sh"
-
-function usage() {
-    >&2 cat << EOM
-Create a new release branch 'release-<version>'
-
-Usage:
-
-    $0 <version> [<base_version>]
-
-Options:
-
-    version        version string for the release (ex: 1.6.0)
-    base_version   branch or tag to start from. Defaults to master. For
-                   bug-fix releases use the previous stage release tag.
-
-EOM
-    exit 1
-}
-
-
-[ -n "$1" ] || usage
-VERSION=$1
-BRANCH=bump-$VERSION
-REPO=docker/compose
[email protected]:$REPO
-
-if [ -z "$2" ]; then
-    BASE_VERSION="master"
-else
-    BASE_VERSION=$2
-fi
-
-
-DEFAULT_REMOTE=release
-REMOTE="$(find_remote "$GITHUB_REPO")"
-# If we don't have a docker remote add one
-if [ -z "$REMOTE" ]; then
-    echo "Creating $DEFAULT_REMOTE remote"
-    git remote add ${DEFAULT_REMOTE} ${GITHUB_REPO}
-fi
-
-# handle the difference between a branch and a tag
-if [ -z "$(git name-rev --tags $BASE_VERSION | grep tags)" ]; then
-    BASE_VERSION=$REMOTE/$BASE_VERSION
-fi
-
-echo "Creating a release branch $VERSION from $BASE_VERSION"
-read -n1 -r -p "Continue? (ctrl+c to cancel)"
-git fetch $REMOTE -p
-git checkout -b $BRANCH $BASE_VERSION
-
-echo "Merging remote release branch into new release branch"
-git merge --strategy=ours --no-edit $REMOTE/release
-
-# Store the release version for this branch in git, so that other release
-# scripts can use it
-git config "branch.${BRANCH}.release" $VERSION
-
-
-editor=${EDITOR:-vim}
-
-echo "Update versions in compose/__init__.py, script/run/run.sh"
-$editor compose/__init__.py
-$editor script/run/run.sh
-
-
-echo "Write release notes in CHANGELOG.md"
-browser "https://github.com/docker/compose/issues?q=milestone%3A$VERSION+is%3Aclosed"
-$editor CHANGELOG.md
-
-
-git diff
-echo "Verify changes before commit. Exit the shell to commit changes"
-$SHELL || true
-git commit -a -m "Bump $VERSION" --signoff --no-verify
-
-
-echo "Push branch to docker remote"
-git push $REMOTE
-browser https://github.com/$REPO/compare/docker:release...$BRANCH?expand=1

+ 34 - 0
script/release/release.md.tmpl

@@ -0,0 +1,34 @@
+If you're a Mac or Windows user, the best way to install Compose and keep it up-to-date is **[Docker for Mac and Windows](https://www.docker.com/products/docker)**.
+
+Docker for Mac and Windows will automatically install the latest version of Docker Engine for you.
+
+Alternatively, you can use the usual commands to install or upgrade Compose:
+
+```
+curl -L https://github.com/docker/compose/releases/download/{{version}}/docker-compose-`uname -s`-`uname -m` -o /usr/local/bin/docker-compose
+chmod +x /usr/local/bin/docker-compose
+```
+
+See the [install docs](https://docs.docker.com/compose/install/) for more install options and instructions.
+
+## Compose file format compatibility matrix
+
+| Compose file format | Docker Engine |
+| --- | --- |
+{% for engine, formats in compat_matrix.items() -%}
+| {% for format in formats %}{{format}}{% if not loop.last %}, {% endif %}{% endfor %} | {{engine}}+ |
+{% endfor -%}
+
+## Changes
+
+{{changelog}}
+
+Thanks to {% for name in contributors %}@{{name}}{% if not loop.last %}, {% endif %}{% endfor %} for contributing to this release!
+
+## Integrity check
+
+Binary name | SHA-256 sum
+| --- | --- |
+{% for filename, sha in integrity.items() -%}
+| `{{filename}}` | `{{sha[1]}}` |
+{% endfor -%}

+ 329 - 0
script/release/release.py

@@ -0,0 +1,329 @@
+from __future__ import absolute_import
+from __future__ import print_function
+from __future__ import unicode_literals
+
+import argparse
+import os
+import sys
+import time
+from distutils.core import run_setup
+
+import pypandoc
+from jinja2 import Template
+from release.bintray import BintrayAPI
+from release.const import BINTRAY_ORG
+from release.const import NAME
+from release.const import REPO_ROOT
+from release.downloader import BinaryDownloader
+from release.images import ImageManager
+from release.repository import delete_assets
+from release.repository import get_contributors
+from release.repository import Repository
+from release.repository import upload_assets
+from release.utils import branch_name
+from release.utils import compatibility_matrix
+from release.utils import read_release_notes_from_changelog
+from release.utils import ScriptError
+from release.utils import update_init_py_version
+from release.utils import update_run_sh_version
+from release.utils import yesno
+
+
+def create_initial_branch(repository, args):
+    release_branch = repository.create_release_branch(args.release, args.base)
+    if args.base and args.cherries:
+        print('Detected patch version.')
+        cherries = input('Indicate (space-separated) PR numbers to cherry-pick then press Enter:\n')
+        repository.cherry_pick_prs(release_branch, cherries.split())
+
+    return create_bump_commit(repository, release_branch, args.bintray_user, args.bintray_org)
+
+
+def create_bump_commit(repository, release_branch, bintray_user, bintray_org):
+    with release_branch.config_reader() as cfg:
+        release = cfg.get('release')
+    print('Updating version info in __init__.py and run.sh')
+    update_run_sh_version(release)
+    update_init_py_version(release)
+
+    input('Please add the release notes to the CHANGELOG.md file, then press Enter to continue.')
+    proceed = None
+    while not proceed:
+        print(repository.diff())
+        proceed = yesno('Are these changes ok? y/N ', default=False)
+
+    if repository.diff():
+        repository.create_bump_commit(release_branch, release)
+    repository.push_branch_to_remote(release_branch)
+
+    bintray_api = BintrayAPI(os.environ['BINTRAY_TOKEN'], bintray_user)
+    print('Creating data repository {} on bintray'.format(release_branch.name))
+    bintray_api.create_repository(bintray_org, release_branch.name, 'generic')
+
+
+def monitor_pr_status(pr_data):
+    print('Waiting for CI to complete...')
+    last_commit = pr_data.get_commits().reversed[0]
+    while True:
+        status = last_commit.get_combined_status()
+        if status.state == 'pending' or status.state == 'failure':
+            summary = {
+                'pending': 0,
+                'success': 0,
+                'failure': 0,
+            }
+            for detail in status.statuses:
+                if detail.context == 'dco-signed':
+                    # dco-signed check breaks on merge remote-tracking ; ignore it
+                    continue
+                summary[detail.state] += 1
+            print('{pending} pending, {success} successes, {failure} failures'.format(**summary))
+            if status.total_count == 0:
+                # Mostly for testing purposes against repos with no CI setup
+                return True
+            elif summary['pending'] == 0 and summary['failure'] == 0:
+                return True
+            elif summary['failure'] > 0:
+                raise ScriptError('CI failures detected!')
+            time.sleep(30)
+        elif status.state == 'success':
+            print('{} successes: all clear!'.format(status.total_count))
+            return True
+
+
+def check_pr_mergeable(pr_data):
+    if not pr_data.mergeable:
+        print(
+            'WARNING!! PR #{} can not currently be merged. You will need to '
+            'resolve the conflicts manually before finalizing the release.'.format(pr_data.number)
+        )
+    return pr_data.mergeable
+
+
+def create_release_draft(repository, version, pr_data, files):
+    print('Creating Github release draft')
+    with open(os.path.join(os.path.dirname(__file__), 'release.md.tmpl'), 'r') as f:
+        template = Template(f.read())
+    print('Rendering release notes based on template')
+    release_notes = template.render(
+        version=version,
+        compat_matrix=compatibility_matrix(),
+        integrity=files,
+        contributors=get_contributors(pr_data),
+        changelog=read_release_notes_from_changelog(),
+    )
+    gh_release = repository.create_release(
+        version, release_notes, draft=True, prerelease='-rc' in version,
+        target_commitish='release'
+    )
+    print('Release draft initialized')
+    return gh_release
+
+
+def print_final_instructions(args):
+    print(
+        "You're almost done! Please verify that everything is in order and "
+        "you are ready to make the release public, then run the following "
+        "command:\n{exe} -b {user} finalize {version}".format(
+            exe=sys.argv[0], user=args.bintray_user, version=args.release
+        )
+    )
+
+
+def resume(args):
+    try:
+        repository = Repository(REPO_ROOT, args.repo)
+        br_name = branch_name(args.release)
+        if not repository.branch_exists(br_name):
+            raise ScriptError('No local branch exists for this release.')
+        gh_release = repository.find_release(args.release)
+        if gh_release and not gh_release.draft:
+            print('WARNING!! Found non-draft (public) release for this version!')
+            proceed = yesno(
+                'Are you sure you wish to proceed? Modifying an already '
+                'released version is dangerous! y/N ', default=False
+            )
+            if proceed.lower() is not True:
+                raise ScriptError('Aborting release')
+
+        release_branch = repository.checkout_branch(br_name)
+        if args.cherries:
+            cherries = input('Indicate (space-separated) PR numbers to cherry-pick then press Enter:\n')
+            repository.cherry_pick_prs(release_branch, cherries.split())
+
+        create_bump_commit(repository, release_branch, args.bintray_user, args.bintray_org)
+        pr_data = repository.find_release_pr(args.release)
+        if not pr_data:
+            pr_data = repository.create_release_pull_request(args.release)
+        check_pr_mergeable(pr_data)
+        monitor_pr_status(pr_data)
+        downloader = BinaryDownloader(args.destination)
+        files = downloader.download_all(args.release)
+        if not gh_release:
+            gh_release = create_release_draft(repository, args.release, pr_data, files)
+        delete_assets(gh_release)
+        upload_assets(gh_release, files)
+        img_manager = ImageManager(args.release)
+        img_manager.build_images(repository, files)
+    except ScriptError as e:
+        print(e)
+        return 1
+
+    print_final_instructions(args)
+    return 0
+
+
+def cancel(args):
+    try:
+        repository = Repository(REPO_ROOT, args.repo)
+        repository.close_release_pr(args.release)
+        repository.remove_release(args.release)
+        repository.remove_bump_branch(args.release)
+        bintray_api = BintrayAPI(os.environ['BINTRAY_TOKEN'], args.bintray_user)
+        print('Removing Bintray data repository for {}'.format(args.release))
+        bintray_api.delete_repository(args.bintray_org, branch_name(args.release))
+    except ScriptError as e:
+        print(e)
+        return 1
+    print('Release cancellation complete.')
+    return 0
+
+
+def start(args):
+    try:
+        repository = Repository(REPO_ROOT, args.repo)
+        create_initial_branch(repository, args)
+        pr_data = repository.create_release_pull_request(args.release)
+        check_pr_mergeable(pr_data)
+        monitor_pr_status(pr_data)
+        downloader = BinaryDownloader(args.destination)
+        files = downloader.download_all(args.release)
+        gh_release = create_release_draft(repository, args.release, pr_data, files)
+        upload_assets(gh_release, files)
+        img_manager = ImageManager(args.release)
+        img_manager.build_images(repository, files)
+    except ScriptError as e:
+        print(e)
+        return 1
+
+    print_final_instructions(args)
+    return 0
+
+
+def finalize(args):
+    try:
+        repository = Repository(REPO_ROOT, args.repo)
+        img_manager = ImageManager(args.release)
+        pr_data = repository.find_release_pr(args.release)
+        if not pr_data:
+            raise ScriptError('No PR found for {}'.format(args.release))
+        if not check_pr_mergeable(pr_data):
+            raise ScriptError('Can not finalize release with an unmergeable PR')
+        if not img_manager.check_images(args.release):
+            raise ScriptError('Missing release image')
+        br_name = branch_name(args.release)
+        if not repository.branch_exists(br_name):
+            raise ScriptError('No local branch exists for this release.')
+        gh_release = repository.find_release(args.release)
+        if not gh_release:
+            raise ScriptError('No Github release draft for this version')
+
+        repository.checkout_branch(br_name)
+
+        pypandoc.convert_file(
+            os.path.join(REPO_ROOT, 'README.md'), 'rst', outputfile=os.path.join(REPO_ROOT, 'README.rst')
+        )
+        run_setup(os.path.join(REPO_ROOT, 'setup.py'), script_args=['sdist', 'bdist_wheel'])
+
+        merge_status = pr_data.merge()
+        if not merge_status.merged:
+            raise ScriptError('Unable to merge PR #{}: {}'.format(pr_data.number, merge_status.message))
+        print('Uploading to PyPi')
+        run_setup(os.path.join(REPO_ROOT, 'setup.py'), script_args=['upload'])
+        img_manager.push_images(args.release)
+        repository.publish_release(gh_release)
+    except ScriptError as e:
+        print(e)
+        return 1
+
+    return 0
+
+
+ACTIONS = [
+    'start',
+    'cancel',
+    'resume',
+    'finalize',
+]
+
+EPILOG = '''Example uses:
+    * Start a new feature release (includes all changes currently in master)
+        release.py -b user start 1.23.0
+    * Start a new patch release
+        release.py -b user --patch 1.21.0 start 1.21.1
+    * Cancel / rollback an existing release draft
+        release.py -b user cancel 1.23.0
+    * Restart a previously aborted patch release
+        release.py -b user -p 1.21.0 resume 1.21.1
+'''
+
+
+def main():
+    if 'GITHUB_TOKEN' not in os.environ:
+        print('GITHUB_TOKEN environment variable must be set')
+        return 1
+
+    if 'BINTRAY_TOKEN' not in os.environ:
+        print('BINTRAY_TOKEN environment variable must be set')
+        return 1
+
+    parser = argparse.ArgumentParser(
+        description='Orchestrate a new release of docker/compose. This tool assumes that you have '
+                    'obtained a Github API token and Bintray API key and set the GITHUB_TOKEN and '
+                    'BINTRAY_TOKEN environment variables accordingly.',
+        epilog=EPILOG, formatter_class=argparse.RawTextHelpFormatter)
+    parser.add_argument(
+        'action', choices=ACTIONS, help='The action to be performed for this release'
+    )
+    parser.add_argument('release', help='Release number, e.g. 1.9.0-rc1, 2.1.1')
+    parser.add_argument(
+        '--patch', '-p', dest='base',
+        help='Which version is being patched by this release'
+    )
+    parser.add_argument(
+        '--repo', '-r', dest='repo', default=NAME,
+        help='Start a release for the given repo (default: {})'.format(NAME)
+    )
+    parser.add_argument(
+        '-b', dest='bintray_user', required=True, metavar='USER',
+        help='Username associated with the Bintray API key'
+    )
+    parser.add_argument(
+        '--bintray-org', dest='bintray_org', metavar='ORG', default=BINTRAY_ORG,
+        help='Organization name on bintray where the data repository will be created.'
+    )
+    parser.add_argument(
+        '--destination', '-o', metavar='DIR', default='binaries',
+        help='Directory where release binaries will be downloaded relative to the project root'
+    )
+    parser.add_argument(
+        '--no-cherries', '-C', dest='cherries', action='store_false',
+        help='If set, the program will not prompt the user for PR numbers to cherry-pick'
+    )
+    args = parser.parse_args()
+
+    if args.action == 'start':
+        return start(args)
+    elif args.action == 'resume':
+        return resume(args)
+    elif args.action == 'cancel':
+        return cancel(args)
+    elif args.action == 'finalize':
+        return finalize(args)
+
+    print('Unexpected action "{}"'.format(args.action), file=sys.stderr)
+    return 1
+
+
+if __name__ == '__main__':
+    sys.exit(main())

+ 25 - 0
script/release/release.sh

@@ -0,0 +1,25 @@
+#!/bin/sh
+
+docker image inspect compose/release-tool > /dev/null
+if test $? -ne 0; then
+    docker build -t compose/release-tool -f $(pwd)/script/release/Dockerfile $(pwd)
+fi
+
+if test -z $GITHUB_TOKEN; then
+    echo "GITHUB_TOKEN environment variable must be set"
+    exit 1
+fi
+
+if test -z $BINTRAY_TOKEN; then
+    echo "BINTRAY_TOKEN environment variable must be set"
+    exit 1
+fi
+
+docker run -e GITHUB_TOKEN=$GITHUB_TOKEN -e BINTRAY_TOKEN=$BINTRAY_TOKEN -it \
+    --mount type=bind,source=$(pwd),target=/src \
+    --mount type=bind,source=$(pwd)/.git,target=/src/.git \
+    --mount type=bind,source=$HOME/.docker,target=/root/.docker \
+    --mount type=bind,source=/var/run/docker.sock,target=/var/run/docker.sock \
+    --mount type=bind,source=$HOME/.ssh,target=/root/.ssh \
+    -v $HOME/.pypirc:/root/.pypirc \
+    compose/release-tool $*

+ 0 - 0
script/release/release/__init__.py


+ 40 - 0
script/release/release/bintray.py

@@ -0,0 +1,40 @@
+from __future__ import absolute_import
+from __future__ import unicode_literals
+
+import json
+
+import requests
+
+from .const import NAME
+
+
+class BintrayAPI(requests.Session):
+    def __init__(self, api_key, user, *args, **kwargs):
+        super(BintrayAPI, self).__init__(*args, **kwargs)
+        self.auth = (user, api_key)
+        self.base_url = 'https://api.bintray.com/'
+
+    def create_repository(self, subject, repo_name, repo_type='generic'):
+        url = '{base}/repos/{subject}/{repo_name}'.format(
+            base=self.base_url, subject=subject, repo_name=repo_name,
+        )
+        data = {
+            'name': repo_name,
+            'type': repo_type,
+            'private': False,
+            'desc': 'Automated release for {}: {}'.format(NAME, repo_name),
+            'labels': ['docker-compose', 'docker', 'release-bot'],
+        }
+        return self.post_json(url, data)
+
+    def delete_repository(self, subject, repo_name):
+        url = '{base}/repos/{subject}/{repo_name}'.format(
+            base=self.base_url, subject=subject, repo_name=repo_name,
+        )
+        return self.delete(url)
+
+    def post_json(self, url, data, **kwargs):
+        if 'headers' not in kwargs:
+            kwargs['headers'] = {}
+        kwargs['headers']['Content-Type'] = 'application/json'
+        return self.post(url, data=json.dumps(data), **kwargs)

+ 9 - 0
script/release/release/const.py

@@ -0,0 +1,9 @@
+from __future__ import absolute_import
+from __future__ import unicode_literals
+
+import os
+
+
+REPO_ROOT = os.path.join(os.path.dirname(__file__), '..', '..', '..')
+NAME = 'docker/compose'
+BINTRAY_ORG = 'docker-compose'

+ 72 - 0
script/release/release/downloader.py

@@ -0,0 +1,72 @@
+from __future__ import absolute_import
+from __future__ import print_function
+from __future__ import unicode_literals
+
+import hashlib
+import os
+
+import requests
+
+from .const import BINTRAY_ORG
+from .const import NAME
+from .const import REPO_ROOT
+from .utils import branch_name
+
+
+class BinaryDownloader(requests.Session):
+    base_bintray_url = 'https://dl.bintray.com/{}'.format(BINTRAY_ORG)
+    base_appveyor_url = 'https://ci.appveyor.com/api/projects/{}/artifacts/'.format(NAME)
+
+    def __init__(self, destination, *args, **kwargs):
+        super(BinaryDownloader, self).__init__(*args, **kwargs)
+        self.destination = destination
+        os.makedirs(self.destination, exist_ok=True)
+
+    def download_from_bintray(self, repo_name, filename):
+        print('Downloading {} from bintray'.format(filename))
+        url = '{base}/{repo_name}/{filename}'.format(
+            base=self.base_bintray_url, repo_name=repo_name, filename=filename
+        )
+        full_dest = os.path.join(REPO_ROOT, self.destination, filename)
+        return self._download(url, full_dest)
+
+    def download_from_appveyor(self, branch_name, filename):
+        print('Downloading {} from appveyor'.format(filename))
+        url = '{base}/dist%2F{filename}?branch={branch_name}'.format(
+            base=self.base_appveyor_url, filename=filename, branch_name=branch_name
+        )
+        full_dest = os.path.join(REPO_ROOT, self.destination, filename)
+        return self._download(url, full_dest)
+
+    def _download(self, url, full_dest):
+        m = hashlib.sha256()
+        with open(full_dest, 'wb') as f:
+            r = self.get(url, stream=True)
+            for chunk in r.iter_content(chunk_size=1024 * 600, decode_unicode=False):
+                print('.', end='', flush=True)
+                m.update(chunk)
+                f.write(chunk)
+
+        print(' download complete')
+        hex_digest = m.hexdigest()
+        with open(full_dest + '.sha256', 'w') as f:
+            f.write('{}  {}\n'.format(hex_digest, os.path.basename(full_dest)))
+        return full_dest, hex_digest
+
+    def download_all(self, version):
+        files = {
+            'docker-compose-Darwin-x86_64': None,
+            'docker-compose-Linux-x86_64': None,
+            'docker-compose-Windows-x86_64.exe': None,
+        }
+
+        for filename in files.keys():
+            if 'Windows' in filename:
+                files[filename] = self.download_from_appveyor(
+                    branch_name(version), filename
+                )
+            else:
+                files[filename] = self.download_from_bintray(
+                    branch_name(version), filename
+                )
+        return files

+ 82 - 0
script/release/release/images.py

@@ -0,0 +1,82 @@
+from __future__ import absolute_import
+from __future__ import print_function
+from __future__ import unicode_literals
+
+import os
+import shutil
+
+import docker
+
+from .const import REPO_ROOT
+from .utils import ScriptError
+
+
+class ImageManager(object):
+    def __init__(self, version):
+        self.docker_client = docker.APIClient(**docker.utils.kwargs_from_env())
+        self.version = version
+
+    def build_images(self, repository, files):
+        print("Building release images...")
+        repository.write_git_sha()
+        docker_client = docker.APIClient(**docker.utils.kwargs_from_env())
+        distdir = os.path.join(REPO_ROOT, 'dist')
+        os.makedirs(distdir, exist_ok=True)
+        shutil.copy(files['docker-compose-Linux-x86_64'][0], distdir)
+        print('Building docker/compose image')
+        logstream = docker_client.build(
+            REPO_ROOT, tag='docker/compose:{}'.format(self.version), dockerfile='Dockerfile.run',
+            decode=True
+        )
+        for chunk in logstream:
+            if 'error' in chunk:
+                raise ScriptError('Build error: {}'.format(chunk['error']))
+            if 'stream' in chunk:
+                print(chunk['stream'], end='')
+
+        print('Building test image (for UCP e2e)')
+        logstream = docker_client.build(
+            REPO_ROOT, tag='docker-compose-tests:tmp', decode=True
+        )
+        for chunk in logstream:
+            if 'error' in chunk:
+                raise ScriptError('Build error: {}'.format(chunk['error']))
+            if 'stream' in chunk:
+                print(chunk['stream'], end='')
+
+        container = docker_client.create_container(
+            'docker-compose-tests:tmp', entrypoint='tox'
+        )
+        docker_client.commit(container, 'docker/compose-tests:latest')
+        docker_client.tag('docker/compose-tests:latest', 'docker/compose-tests:{}'.format(self.version))
+        docker_client.remove_container(container, force=True)
+        docker_client.remove_image('docker-compose-tests:tmp', force=True)
+
+    @property
+    def image_names(self):
+        return [
+            'docker/compose-tests:latest',
+            'docker/compose-tests:{}'.format(self.version),
+            'docker/compose:{}'.format(self.version)
+        ]
+
+    def check_images(self, version):
+        docker_client = docker.APIClient(**docker.utils.kwargs_from_env())
+
+        for name in self.image_names:
+            try:
+                docker_client.inspect_image(name)
+            except docker.errors.ImageNotFound:
+                print('Expected image {} was not found'.format(name))
+                return False
+        return True
+
+    def push_images(self):
+        docker_client = docker.APIClient(**docker.utils.kwargs_from_env())
+
+        for name in self.image_names:
+            print('Pushing {} to Docker Hub'.format(name))
+            logstream = docker_client.push(name, stream=True, decode=True)
+            for chunk in logstream:
+                if 'status' in chunk:
+                    print(chunk['status'])

+ 225 - 0
script/release/release/repository.py

@@ -0,0 +1,225 @@
+from __future__ import absolute_import
+from __future__ import unicode_literals
+
+import os
+import tempfile
+
+import requests
+from git import GitCommandError
+from git import Repo
+from github import Github
+
+from .const import NAME
+from .const import REPO_ROOT
+from .utils import branch_name
+from .utils import read_release_notes_from_changelog
+from .utils import ScriptError
+
+
+class Repository(object):
+    def __init__(self, root=None, gh_name=None):
+        if root is None:
+            root = REPO_ROOT
+        if gh_name is None:
+            gh_name = NAME
+        self.git_repo = Repo(root)
+        self.gh_client = Github(os.environ['GITHUB_TOKEN'])
+        self.gh_repo = self.gh_client.get_repo(gh_name)
+
+    def create_release_branch(self, version, base=None):
+        print('Creating release branch {} based on {}...'.format(version, base or 'master'))
+        remote = self.find_remote(self.gh_repo.full_name)
+        br_name = branch_name(version)
+        remote.fetch()
+        if self.branch_exists(br_name):
+            raise ScriptError(
+                "Branch {} already exists locally. Please remove it before "
+                "running the release script, or use `resume` instead.".format(
+                    br_name
+                )
+            )
+        if base is not None:
+            base = self.git_repo.tag('refs/tags/{}'.format(base))
+        else:
+            base = 'refs/remotes/{}/master'.format(remote.name)
+        release_branch = self.git_repo.create_head(br_name, commit=base)
+        release_branch.checkout()
+        self.git_repo.git.merge('--strategy=ours', '--no-edit', '{}/release'.format(remote.name))
+        with release_branch.config_writer() as cfg:
+            cfg.set_value('release', version)
+        return release_branch
+
+    def find_remote(self, remote_name=None):
+        if not remote_name:
+            remote_name = self.gh_repo.full_name
+        for remote in self.git_repo.remotes:
+            for url in remote.urls:
+                if remote_name in url:
+                    return remote
+        return None
+
+    def create_bump_commit(self, bump_branch, version):
+        print('Creating bump commit...')
+        bump_branch.checkout()
+        self.git_repo.git.commit('-a', '-s', '-m "Bump {}"'.format(version), '--no-verify')
+
+    def diff(self):
+        return self.git_repo.git.diff()
+
+    def checkout_branch(self, name):
+        return self.git_repo.branches[name].checkout()
+
+    def push_branch_to_remote(self, branch, remote_name=None):
+        print('Pushing branch {} to remote...'.format(branch.name))
+        remote = self.find_remote(remote_name)
+        remote.push(refspec=branch, force=True)
+
+    def branch_exists(self, name):
+        return name in [h.name for h in self.git_repo.heads]
+
+    def create_release_pull_request(self, version):
+        return self.gh_repo.create_pull(
+            title='Bump {}'.format(version),
+            body='Automated release for docker-compose {}\n\n{}'.format(
+                version, read_release_notes_from_changelog()
+            ),
+            base='release',
+            head=branch_name(version),
+        )
+
+    def create_release(self, version, release_notes, **kwargs):
+        return self.gh_repo.create_git_release(
+            tag=version, name=version, message=release_notes, **kwargs
+        )
+
+    def find_release(self, version):
+        print('Retrieving release draft for {}'.format(version))
+        releases = self.gh_repo.get_releases()
+        for release in releases:
+            if release.tag_name == version and release.title == version:
+                return release
+        return None
+
+    def publish_release(self, release):
+        release.update_release(
+            name=release.title,
+            message=release.body,
+            draft=False,
+            prerelease=release.prerelease
+        )
+
+    def remove_release(self, version):
+        print('Removing release draft for {}'.format(version))
+        releases = self.gh_repo.get_releases()
+        for release in releases:
+            if release.tag_name == version and release.title == version:
+                if not release.draft:
+                    print(
+                        'The release at {} is no longer a draft. If you TRULY intend '
+                        'to remove it, please do so manually.'.format(release.url)
+                    )
+                    continue
+                release.delete_release()
+
+    def remove_bump_branch(self, version, remote_name=None):
+        name = branch_name(version)
+        if not self.branch_exists(name):
+            return False
+        print('Removing local branch "{}"'.format(name))
+        if self.git_repo.active_branch.name == name:
+            print('Active branch is about to be deleted. Checking out to master...')
+            try:
+                self.checkout_branch('master')
+            except GitCommandError:
+                raise ScriptError(
+                    'Unable to checkout master. Try stashing local changes before proceeding.'
+                )
+        self.git_repo.branches[name].delete(self.git_repo, name, force=True)
+        print('Removing remote branch "{}"'.format(name))
+        remote = self.find_remote(remote_name)
+        try:
+            remote.push(name, delete=True)
+        except GitCommandError as e:
+            if 'remote ref does not exist' in str(e):
+                return False
+            raise ScriptError(
+                'Error trying to remove remote branch: {}'.format(e)
+            )
+        return True
+
+    def find_release_pr(self, version):
+        print('Retrieving release PR for {}'.format(version))
+        name = branch_name(version)
+        open_prs = self.gh_repo.get_pulls(state='open')
+        for pr in open_prs:
+            if pr.head.ref == name:
+                print('Found matching PR #{}'.format(pr.number))
+                return pr
+        print('No open PR for this release branch.')
+        return None
+
+    def close_release_pr(self, version):
+        print('Retrieving and closing release PR for {}'.format(version))
+        name = branch_name(version)
+        open_prs = self.gh_repo.get_pulls(state='open')
+        count = 0
+        for pr in open_prs:
+            if pr.head.ref == name:
+                print('Found matching PR #{}'.format(pr.number))
+                pr.edit(state='closed')
+                count += 1
+        if count == 0:
+            print('No open PR for this release branch.')
+        return count
+
+    def write_git_sha(self):
+        with open(os.path.join(REPO_ROOT, 'compose', 'GITSHA'), 'w') as f:
+            f.write(self.git_repo.head.commit.hexsha[:7])
+
+    def cherry_pick_prs(self, release_branch, ids):
+        if not ids:
+            return
+        release_branch.checkout()
+        for i in ids:
+            try:
+                i = int(i)
+            except ValueError as e:
+                raise ScriptError('Invalid PR id: {}'.format(e))
+            print('Retrieving PR#{}'.format(i))
+            pr = self.gh_repo.get_pull(i)
+            patch_data = requests.get(pr.patch_url).text
+            self.apply_patch(patch_data)
+
+    def apply_patch(self, patch_data):
+        with tempfile.NamedTemporaryFile(mode='w', prefix='_compose_cherry', encoding='utf-8') as f:
+            f.write(patch_data)
+            f.flush()
+            self.git_repo.git.am('--3way', f.name)
+
+
+def get_contributors(pr_data):
+    commits = pr_data.get_commits()
+    authors = {}
+    for commit in commits:
+        author = commit.author.login
+        authors[author] = authors.get(author, 0) + 1
+    return [x[0] for x in sorted(list(authors.items()), key=lambda x: x[1])]
+
+
+def upload_assets(gh_release, files):
+    print('Uploading binaries and hash sums')
+    for filename, filedata in files.items():
+        print('Uploading {}...'.format(filename))
+        gh_release.upload_asset(filedata[0], content_type='application/octet-stream')
+        gh_release.upload_asset('{}.sha256'.format(filedata[0]), content_type='text/plain')
+    print('Uploading run.sh...')
+    gh_release.upload_asset(
+        os.path.join(REPO_ROOT, 'script', 'run', 'run.sh'), content_type='text/plain'
+    )
+
+
+def delete_assets(gh_release):
+    print('Removing previously uploaded assets')
+    for asset in gh_release.get_assets():
+        print('Deleting asset {}'.format(asset.name))
+        asset.delete_asset()

+ 85 - 0
script/release/release/utils.py

@@ -0,0 +1,85 @@
+from __future__ import absolute_import
+from __future__ import unicode_literals
+
+import os
+import re
+
+from .const import REPO_ROOT
+from compose import const as compose_const
+
+section_header_re = re.compile(r'^[0-9]+\.[0-9]+\.[0-9]+ \([0-9]{4}-[01][0-9]-[0-3][0-9]\)$')
+
+
+class ScriptError(Exception):
+    pass
+
+
+def branch_name(version):
+    return 'bump-{}'.format(version)
+
+
+def read_release_notes_from_changelog():
+    with open(os.path.join(REPO_ROOT, 'CHANGELOG.md'), 'r') as f:
+        lines = f.readlines()
+    i = 0
+    while i < len(lines):
+        if section_header_re.match(lines[i]):
+            break
+        i += 1
+
+    j = i + 1
+    while j < len(lines):
+        if section_header_re.match(lines[j]):
+            break
+        j += 1
+
+    return ''.join(lines[i + 2:j - 1])
+
+
+def update_init_py_version(version):
+    path = os.path.join(REPO_ROOT, 'compose', '__init__.py')
+    with open(path, 'r') as f:
+        contents = f.read()
+    contents = re.sub(r"__version__ = '[0-9a-z.-]+'", "__version__ = '{}'".format(version), contents)
+    with open(path, 'w') as f:
+        f.write(contents)
+
+
+def update_run_sh_version(version):
+    path = os.path.join(REPO_ROOT, 'script', 'run', 'run.sh')
+    with open(path, 'r') as f:
+        contents = f.read()
+    contents = re.sub(r'VERSION="[0-9a-z.-]+"', 'VERSION="{}"'.format(version), contents)
+    with open(path, 'w') as f:
+        f.write(contents)
+
+
+def compatibility_matrix():
+    result = {}
+    for engine_version in compose_const.API_VERSION_TO_ENGINE_VERSION.values():
+        result[engine_version] = []
+    for fmt, api_version in compose_const.API_VERSIONS.items():
+        result[compose_const.API_VERSION_TO_ENGINE_VERSION[api_version]].append(fmt.vstring)
+    return result
+
+
+def yesno(prompt, default=None):
+    """
+    Prompt the user for a yes or no.
+
+    Can optionally specify a default value, which will only be
+    used if they enter a blank line.
+
+    Unrecognised input (anything other than "y", "n", "yes",
+    "no" or "") will return None.
+    """
+    answer = input(prompt).strip().lower()
+
+    if answer == "y" or answer == "yes":
+        return True
+    elif answer == "n" or answer == "no":
+        return False
+    elif answer == "":
+        return default
+    else:
+        return None

+ 1 - 1
script/run/run.sh

@@ -15,7 +15,7 @@
 
 set -e
 
-VERSION="1.21.0"
+VERSION="1.21.1"
 IMAGE="docker/compose:$VERSION"
 
 

+ 1 - 1
setup.py

@@ -36,7 +36,7 @@ install_requires = [
     'requests >= 2.6.1, != 2.11.0, != 2.12.2, != 2.18.0, < 2.19',
     'texttable >= 0.9.0, < 0.10',
     'websocket-client >= 0.32.0, < 1.0',
-    'docker >= 3.2.1, < 4.0',
+    'docker >= 3.3.0, < 4.0',
     'dockerpty >= 0.4.1, < 0.5',
     'six >= 1.3.0, < 2',
     'jsonschema >= 2.5.1, < 3',

+ 22 - 0
tests/unit/config/config_test.py

@@ -1322,6 +1322,28 @@ class ConfigTest(unittest.TestCase):
         assert mount.type == 'bind'
         assert mount.source == expected_source
 
+    def test_config_invalid_ipam_config(self):
+        with pytest.raises(ConfigurationError) as excinfo:
+            config.load(
+                build_config_details(
+                    {
+                        'version': str(V2_1),
+                        'networks': {
+                            'foo': {
+                                'driver': 'default',
+                                'ipam': {
+                                    'driver': 'default',
+                                    'config': ['172.18.0.0/16'],
+                                }
+                            }
+                        }
+                    },
+                    filename='filename.yml',
+                )
+            )
+        assert ('networks.foo.ipam.config contains an invalid type,'
+                ' it should be an object') in excinfo.exconly()
+
     def test_config_valid_service_names(self):
         for valid_name in ['_', '-', '.__.', '_what-up.', 'what_.up----', 'whatup']:
             services = config.load(

+ 10 - 1
tests/unit/network_test.py

@@ -23,7 +23,10 @@ class NetworkTest(unittest.TestCase):
                     'aux_addresses': ['11.0.0.1', '24.25.26.27'],
                     'ip_range': '156.0.0.1-254'
                 }
-            ]
+            ],
+            'options': {
+                'iface': 'eth0',
+            }
         }
         labels = {
             'com.project.tests.istest': 'true',
@@ -57,6 +60,9 @@ class NetworkTest(unittest.TestCase):
                         'Subnet': '172.0.0.1/16',
                         'Gateway': '172.0.0.1'
                     }],
+                    'Options': {
+                        'iface': 'eth0',
+                    },
                 },
                 'Labels': remote_labels
             },
@@ -78,6 +84,7 @@ class NetworkTest(unittest.TestCase):
             {'Driver': 'overlay', 'Options': remote_options}, net
         )
 
+    @mock.patch('compose.network.Network.true_name', lambda n: n.full_name)
     def test_check_remote_network_config_driver_mismatch(self):
         net = Network(None, 'compose_test', 'net1', 'overlay')
         with pytest.raises(NetworkConfigChangedError) as e:
@@ -87,6 +94,7 @@ class NetworkTest(unittest.TestCase):
 
         assert 'driver has changed' in str(e.value)
 
+    @mock.patch('compose.network.Network.true_name', lambda n: n.full_name)
     def test_check_remote_network_config_options_mismatch(self):
         net = Network(None, 'compose_test', 'net1', 'overlay')
         with pytest.raises(NetworkConfigChangedError) as e:
@@ -140,6 +148,7 @@ class NetworkTest(unittest.TestCase):
             net
         )
 
+    @mock.patch('compose.network.Network.true_name', lambda n: n.full_name)
     def test_check_remote_network_labels_mismatch(self):
         net = Network(None, 'compose_test', 'net1', 'overlay', labels={
             'com.project.touhou.character': 'sakuya.izayoi'

+ 2 - 0
tests/unit/project_test.py

@@ -60,6 +60,7 @@ class ProjectTest(unittest.TestCase):
         assert project.get_service('db').options['image'] == 'busybox:latest'
         assert not project.networks.use_networking
 
+    @mock.patch('compose.network.Network.true_name', lambda n: n.full_name)
     def test_from_config_v2(self):
         config = Config(
             version=V2_0,
@@ -217,6 +218,7 @@ class ProjectTest(unittest.TestCase):
         )
         assert project.get_service('test')._get_volumes_from() == [container_name + ":rw"]
 
+    @mock.patch('compose.network.Network.true_name', lambda n: n.full_name)
     def test_use_volumes_from_service_container(self):
         container_ids = ['aabbccddee', '12345']
 

+ 19 - 0
tests/unit/service_test.py

@@ -10,6 +10,7 @@ from docker.errors import NotFound
 from .. import mock
 from .. import unittest
 from compose.config.errors import DependencyError
+from compose.config.types import MountSpec
 from compose.config.types import ServicePort
 from compose.config.types import ServiceSecret
 from compose.config.types import VolumeFromSpec
@@ -955,6 +956,24 @@ class ServiceTest(unittest.TestCase):
 
         assert service.create_container().id == 'new_cont_id'
 
+    def test_build_volume_options_duplicate_binds(self):
+        self.mock_client.api_version = '1.29'  # Trigger 3.2 format workaround
+        service = Service('foo', client=self.mock_client)
+        ctnr_opts, override_opts = service._build_container_volume_options(
+            previous_container=None,
+            container_options={
+                'volumes': [
+                    MountSpec.parse({'source': 'vol', 'target': '/data', 'type': 'volume'}),
+                    VolumeSpec.parse('vol:/data:rw'),
+                ],
+                'environment': {},
+            },
+            override_options={},
+        )
+        assert 'binds' in override_opts
+        assert len(override_opts['binds']) == 1
+        assert override_opts['binds'][0] == 'vol:/data:rw'
+
 
 class TestServiceNetwork(unittest.TestCase):
     def setUp(self):