Просмотр исходного кода

Merge pull request #5844 from docker/bump-1.21.0-rc1

Bump 1.21.0 RC1
Joffrey F 7 лет назад
Родитель
Сommit
9cc30ad0e9

+ 3 - 3
.circleci/config.yml

@@ -6,11 +6,11 @@ jobs:
     steps:
     - checkout
     - run:
-        name: install python3
-        command: brew update > /dev/null && brew upgrade python
+        name: setup script
+        command: ./script/setup/osx
     - run:
         name: install tox
-        command: sudo pip3 install --upgrade tox==2.1.1
+        command: sudo pip install --upgrade tox==2.1.1
     - run:
         name: unit tests
         command: tox -e py27,py36 -- tests/unit

+ 65 - 1
CHANGELOG.md

@@ -1,6 +1,70 @@
 Change log
 ==========
 
+1.21.0 (2018-04-11)
+-------------------
+
+### New features
+
+#### Compose file version 2.4
+
+- Introduced version 2.4 of the `docker-compose.yml` specification.
+  This version requires Docker Engine 17.12.0 or above.
+
+- Added support for the `platform` parameter in service definitions.
+  If supplied, the parameter is also used when performing build for the
+  service.
+
+#### Compose file version 2.2 and up
+
+- Added support for the `cpu_rt_period` and `cpu_rt_runtime` parameters
+  in service definitions (2.x only).
+
+#### Compose file version 2.1 and up
+
+- Added support for the `cpu_period` parameter in service definitions
+  (2.x only).
+
+- Added support for the `isolation` parameter in service build configurations.
+  Additionally, the `isolation` parameter is used for builds as well if no
+  `build.isolation` parameter is defined. (2.x only)
+
+#### All formats
+
+- Added support for the `--workdir` flag in `docker-compose exec`.
+
+- Added support for the `--compress` flag in `docker-compose build`.
+
+- `docker-compose pull` is now performed in parallel by default. You can
+  opt out using the `--no-parallel` flag. The `--parallel` flag is now
+  deprecated and will be removed in a future version.
+
+- Dashes and underscores in project names are no longer stripped out.
+
+- `docker-compose build` now supports the use of Dockerfile from outside
+  the build context.
+
+### Bugfixes
+
+- Compose now checks that the volume's configuration matches the remote
+  volume, and errors out if a mismatch is detected.
+
+- Fixed a bug that caused Compose to raise unexpected errors when attempting
+  to create several one-off containers in parallel.
+
+- Fixed a bug with argument parsing when using `docker-machine config` to
+  generate TLS flags for `exec` and `run` commands.
+
+- Fixed a bug where variable substitution with an empty default value
+  (e.g. `${VAR:-}`) would print an incorrect warning.
+
+- Improved resilience when encoding of the Compose file doesn't match the
+  system's. Users are encouraged to use UTF-8 when possible.
+
+- Fixed a bug where external overlay networks in Swarm would be incorrectly
+  recognized as inexistent by Compose, interrupting otherwise valid
+  operations.
+
 1.20.1 (2018-03-21)
 -------------------
 
@@ -17,7 +81,7 @@ Change log
 #### Compose file version 3.6
 
 - Introduced version 3.6 of the `docker-compose.yml` specification.
-  This version requires to be used with Docker Engine 18.02.0 or above.
+  This version requires Docker Engine 18.02.0 or above.
 
 - Added support for the `tmpfs.size` property in volume mappings
 

+ 2 - 1
Dockerfile.run

@@ -3,12 +3,13 @@ FROM alpine:3.6
 ENV GLIBC 2.27-r0
 ENV DOCKERBINS_SHA 1270dce1bd7e1838d62ae21d2505d87f16efc1d9074645571daaefdfd0c14054
 
-RUN apk update && apk add --no-cache openssl ca-certificates curl && \
+RUN apk update && apk add --no-cache openssl ca-certificates curl libgcc && \
     curl -fsSL -o /etc/apk/keys/sgerrand.rsa.pub https://raw.githubusercontent.com/sgerrand/alpine-pkg-glibc/master/sgerrand.rsa.pub && \
     curl -fsSL -o glibc-$GLIBC.apk https://github.com/sgerrand/alpine-pkg-glibc/releases/download/$GLIBC/glibc-$GLIBC.apk && \
     apk add --no-cache glibc-$GLIBC.apk && \
     ln -s /lib/libz.so.1 /usr/glibc-compat/lib/ && \
     ln -s /lib/libc.musl-x86_64.so.1 /usr/glibc-compat/lib && \
+    ln -s /usr/lib/libgcc_s.so.1 /usr/glibc-compat/lib && \
     curl -fsSL -o dockerbins.tgz "https://download.docker.com/linux/static/stable/x86_64/docker-17.12.1-ce.tgz" && \
     echo "${DOCKERBINS_SHA}  dockerbins.tgz" | sha256sum -c - && \
     tar xvf dockerbins.tgz docker/docker --strip-components 1 && \

+ 30 - 2
MAINTAINERS

@@ -11,11 +11,29 @@
 [Org]
 	[Org."Core maintainers"]
 		people = [
+			"mefyl",
+			"mnottale",
+			"shin-",
+		]
+	[Org.Alumni]
+		people = [
+			# Aanand Prasad is one of the two creators of the fig project
+			# which later went on to become docker-compose, and a longtime
+			# maintainer responsible for several keystone features
 			"aanand",
+			# Ben Firshman is also one of the fig creators and contributed
+			# heavily to the project's design and UX as well as the
+			# day-to-day maintenance
 			"bfirsh",
-			"dnephin",
+			# Mazz Mosley made significant contributions to the project
+			# in 2015 with solid bugfixes and improved error handling
+			# among them
 			"mnowster",
-			"shin-",
+			# Daniel Nephin is one of the longest-running maitainers on
+			# the Compose project, and has contributed several major features
+			# including muti-file support, variable interpolation, secrets
+			# emulation and many more
+			"dnephin",
 		]
 
 [people]
@@ -41,6 +59,16 @@
 	Email = "[email protected]"
 	GitHub = "dnephin"
 
+	[people.mefyl]
+	Name = "Quentin Hocquet"
+	Email = "[email protected]"
+	GitHub = "mefyl"
+
+	[people.mnottale]
+	Name = "Matthieu Nottale"
+	Email = "[email protected]"
+	GitHub = "mnottale"
+
 	[people.mnowster]
 	Name = "Mazz Mosley"
 	Email = "[email protected]"

+ 1 - 1
compose/__init__.py

@@ -1,4 +1,4 @@
 from __future__ import absolute_import
 from __future__ import unicode_literals
 
-__version__ = '1.20.1'
+__version__ = '1.21.0-rc1'

+ 4 - 2
compose/cli/command.py

@@ -122,12 +122,14 @@ def get_project(project_dir, config_path=None, project_name=None, verbose=False,
     )
 
     with errors.handle_connection_errors(client):
-        return Project.from_config(project_name, config_data, client)
+        return Project.from_config(
+            project_name, config_data, client, environment.get('DOCKER_DEFAULT_PLATFORM')
+        )
 
 
 def get_project_name(working_dir, project_name=None, environment=None):
     def normalize_name(name):
-        return re.sub(r'[^a-z0-9]', '', name.lower())
+        return re.sub(r'[^-_a-z0-9]', '', name.lower())
 
     if not environment:
         environment = Environment.from_env_file(working_dir)

+ 21 - 5
compose/cli/main.py

@@ -254,6 +254,7 @@ class TopLevelCommand(object):
         Usage: build [options] [--build-arg key=val...] [SERVICE...]
 
         Options:
+            --compress              Compress the build context using gzip.
             --force-rm              Always remove intermediate containers.
             --no-cache              Do not use cache when building the image.
             --pull                  Always attempt to pull a newer version of the image.
@@ -277,7 +278,9 @@ class TopLevelCommand(object):
             pull=bool(options.get('--pull', False)),
             force_rm=bool(options.get('--force-rm', False)),
             memory=options.get('--memory'),
-            build_args=build_args)
+            build_args=build_args,
+            gzip=options.get('--compress', False),
+        )
 
     def bundle(self, options):
         """
@@ -459,6 +462,7 @@ class TopLevelCommand(object):
                               instances of a service [default: 1]
             -e, --env KEY=VAL Set environment variables (can be used multiple times,
                               not supported in API < 1.25)
+            -w, --workdir DIR Path to workdir directory for this command.
         """
         environment = Environment.from_env_file(self.project_dir)
         use_cli = not environment.get_boolean('COMPOSE_INTERACTIVE_NO_CLI')
@@ -467,7 +471,12 @@ class TopLevelCommand(object):
         detach = options.get('--detach')
 
         if options['--env'] and docker.utils.version_lt(self.project.client.api_version, '1.25'):
-            raise UserError("Setting environment for exec is not supported in API < 1.25'")
+            raise UserError("Setting environment for exec is not supported in API < 1.25 (%s)"
+                            % self.project.client.api_version)
+
+        if options['--workdir'] and docker.utils.version_lt(self.project.client.api_version, '1.35'):
+            raise UserError("Setting workdir for exec is not supported in API < 1.35 (%s)"
+                            % self.project.client.api_version)
 
         try:
             container = service.get_container(number=index)
@@ -487,6 +496,7 @@ class TopLevelCommand(object):
             "user": options["--user"],
             "tty": tty,
             "stdin": True,
+            "workdir": options["--workdir"],
         }
 
         if docker.utils.version_gte(self.project.client.api_version, '1.25'):
@@ -704,14 +714,17 @@ class TopLevelCommand(object):
 
         Options:
             --ignore-pull-failures  Pull what it can and ignores images with pull failures.
-            --parallel              Pull multiple images in parallel.
+            --parallel              Deprecated, pull multiple images in parallel (enabled by default).
+            --no-parallel           Disable parallel pulling.
             -q, --quiet             Pull without printing progress information
             --include-deps          Also pull services declared as dependencies
         """
+        if options.get('--parallel'):
+            log.warn('--parallel option is deprecated and will be removed in future versions.')
         self.project.pull(
             service_names=options['SERVICE'],
             ignore_pull_failures=options.get('--ignore-pull-failures'),
-            parallel_pull=options.get('--parallel'),
+            parallel_pull=not options.get('--no-parallel'),
             silent=options.get('--quiet'),
             include_deps=options.get('--include-deps'),
         )
@@ -1408,7 +1421,7 @@ def call_docker(args, dockeropts):
     if verify:
         tls_options.append('--tlsverify')
     if host:
-        tls_options.extend(['--host', host])
+        tls_options.extend(['--host', host.lstrip('=')])
 
     args = [executable_path] + tls_options + args
     log.debug(" ".join(map(pipes.quote, args)))
@@ -1453,6 +1466,9 @@ def build_exec_command(options, container_id, command):
         for env_variable in options["--env"]:
             args += ["--env", env_variable]
 
+    if options["--workdir"]:
+        args += ["--workdir", options["--workdir"]]
+
     args += [container_id]
     args += command
     return args

+ 15 - 4
compose/config/config.py

@@ -2,6 +2,7 @@ from __future__ import absolute_import
 from __future__ import unicode_literals
 
 import functools
+import io
 import logging
 import os
 import string
@@ -67,7 +68,10 @@ DOCKER_CONFIG_KEYS = [
     'command',
     'cpu_count',
     'cpu_percent',
+    'cpu_period',
     'cpu_quota',
+    'cpu_rt_period',
+    'cpu_rt_runtime',
     'cpu_shares',
     'cpus',
     'cpuset',
@@ -125,11 +129,12 @@ ALLOWED_KEYS = DOCKER_CONFIG_KEYS + [
     'container_name',
     'credential_spec',
     'dockerfile',
+    'init',
     'log_driver',
     'log_opt',
     'logging',
     'network_mode',
-    'init',
+    'platform',
     'scale',
     'stop_grace_period',
 ]
@@ -1115,6 +1120,7 @@ def merge_build(output, base, override):
     md.merge_scalar('network')
     md.merge_scalar('target')
     md.merge_scalar('shm_size')
+    md.merge_scalar('isolation')
     md.merge_mapping('args', parse_build_arguments)
     md.merge_field('cache_from', merge_unique_items_lists, default=[])
     md.merge_mapping('labels', parse_labels)
@@ -1428,10 +1434,15 @@ def has_uppercase(name):
     return any(char in string.ascii_uppercase for char in name)
 
 
-def load_yaml(filename):
+def load_yaml(filename, encoding=None):
     try:
-        with open(filename, 'r') as fh:
+        with io.open(filename, 'r', encoding=encoding) as fh:
             return yaml.safe_load(fh)
-    except (IOError, yaml.YAMLError) as e:
+    except (IOError, yaml.YAMLError, UnicodeDecodeError) as e:
+        if encoding is None:
+            # Sometimes the user's locale sets an encoding that doesn't match
+            # the YAML files. Im such cases, retry once with the "default"
+            # UTF-8 encoding
+            return load_yaml(filename, encoding='utf-8')
         error_name = getattr(e, '__module__', '') + '.' + e.__class__.__name__
         raise ConfigurationError(u"{}: {}".format(error_name, e))

+ 3 - 1
compose/config/config_schema_v2.1.json

@@ -88,7 +88,8 @@
                 "context": {"type": "string"},
                 "dockerfile": {"type": "string"},
                 "args": {"$ref": "#/definitions/list_or_dict"},
-                "labels": {"$ref": "#/definitions/labels"}
+                "labels": {"$ref": "#/definitions/labels"},
+                "isolation": {"type": "string"}
               },
               "additionalProperties": false
             }
@@ -106,6 +107,7 @@
         "container_name": {"type": "string"},
         "cpu_shares": {"type": ["number", "string"]},
         "cpu_quota": {"type": ["number", "string"]},
+        "cpu_period": {"type": ["number", "string"]},
         "cpuset": {"type": "string"},
         "depends_on": {
           "oneOf": [

+ 5 - 1
compose/config/config_schema_v2.2.json

@@ -90,7 +90,8 @@
                 "args": {"$ref": "#/definitions/list_or_dict"},
                 "labels": {"$ref": "#/definitions/labels"},
                 "cache_from": {"$ref": "#/definitions/list_of_strings"},
-                "network": {"type": "string"}
+                "network": {"type": "string"},
+                "isolation": {"type": "string"}
               },
               "additionalProperties": false
             }
@@ -110,6 +111,9 @@
         "cpu_percent": {"type": "integer", "minimum": 0, "maximum": 100},
         "cpu_shares": {"type": ["number", "string"]},
         "cpu_quota": {"type": ["number", "string"]},
+        "cpu_period": {"type": ["number", "string"]},
+        "cpu_rt_period": {"type": ["number", "string"]},
+        "cpu_rt_runtime": {"type": ["number", "string"]},
         "cpus": {"type": "number", "minimum": 0},
         "cpuset": {"type": "string"},
         "depends_on": {

+ 5 - 1
compose/config/config_schema_v2.3.json

@@ -93,7 +93,8 @@
                 "network": {"type": "string"},
                 "target": {"type": "string"},
                 "shm_size": {"type": ["integer", "string"]},
-                "extra_hosts": {"$ref": "#/definitions/list_or_dict"}
+                "extra_hosts": {"$ref": "#/definitions/list_or_dict"},
+                "isolation": {"type": "string"}
               },
               "additionalProperties": false
             }
@@ -113,6 +114,9 @@
         "cpu_percent": {"type": "integer", "minimum": 0, "maximum": 100},
         "cpu_shares": {"type": ["number", "string"]},
         "cpu_quota": {"type": ["number", "string"]},
+        "cpu_period": {"type": ["number", "string"]},
+        "cpu_rt_period": {"type": ["number", "string"]},
+        "cpu_rt_runtime": {"type": ["number", "string"]},
         "cpus": {"type": "number", "minimum": 0},
         "cpuset": {"type": "string"},
         "depends_on": {

+ 513 - 0
compose/config/config_schema_v2.4.json

@@ -0,0 +1,513 @@
+{
+  "$schema": "http://json-schema.org/draft-04/schema#",
+  "id": "config_schema_v2.4.json",
+  "type": "object",
+
+  "properties": {
+    "version": {
+      "type": "string"
+    },
+
+    "services": {
+      "id": "#/properties/services",
+      "type": "object",
+      "patternProperties": {
+        "^[a-zA-Z0-9._-]+$": {
+          "$ref": "#/definitions/service"
+        }
+      },
+      "additionalProperties": false
+    },
+
+    "networks": {
+      "id": "#/properties/networks",
+      "type": "object",
+      "patternProperties": {
+        "^[a-zA-Z0-9._-]+$": {
+          "$ref": "#/definitions/network"
+        }
+      }
+    },
+
+    "volumes": {
+      "id": "#/properties/volumes",
+      "type": "object",
+      "patternProperties": {
+        "^[a-zA-Z0-9._-]+$": {
+          "$ref": "#/definitions/volume"
+        }
+      },
+      "additionalProperties": false
+    }
+  },
+
+  "patternProperties": {"^x-": {}},
+  "additionalProperties": false,
+
+  "definitions": {
+
+    "service": {
+      "id": "#/definitions/service",
+      "type": "object",
+
+      "properties": {
+        "blkio_config": {
+          "type": "object",
+          "properties": {
+            "device_read_bps": {
+              "type": "array",
+              "items": {"$ref": "#/definitions/blkio_limit"}
+            },
+            "device_read_iops": {
+              "type": "array",
+              "items": {"$ref": "#/definitions/blkio_limit"}
+            },
+            "device_write_bps": {
+              "type": "array",
+              "items": {"$ref": "#/definitions/blkio_limit"}
+            },
+            "device_write_iops": {
+              "type": "array",
+              "items": {"$ref": "#/definitions/blkio_limit"}
+            },
+            "weight": {"type": "integer"},
+            "weight_device": {
+              "type": "array",
+              "items": {"$ref": "#/definitions/blkio_weight"}
+            }
+          },
+          "additionalProperties": false
+        },
+
+        "build": {
+          "oneOf": [
+            {"type": "string"},
+            {
+              "type": "object",
+              "properties": {
+                "context": {"type": "string"},
+                "dockerfile": {"type": "string"},
+                "args": {"$ref": "#/definitions/list_or_dict"},
+                "labels": {"$ref": "#/definitions/labels"},
+                "cache_from": {"$ref": "#/definitions/list_of_strings"},
+                "network": {"type": "string"},
+                "target": {"type": "string"},
+                "shm_size": {"type": ["integer", "string"]},
+                "extra_hosts": {"$ref": "#/definitions/list_or_dict"},
+                "isolation": {"type": "string"}
+              },
+              "additionalProperties": false
+            }
+          ]
+        },
+        "cap_add": {"$ref": "#/definitions/list_of_strings"},
+        "cap_drop": {"$ref": "#/definitions/list_of_strings"},
+        "cgroup_parent": {"type": "string"},
+        "command": {
+          "oneOf": [
+            {"type": "string"},
+            {"type": "array", "items": {"type": "string"}}
+          ]
+        },
+        "container_name": {"type": "string"},
+        "cpu_count": {"type": "integer", "minimum": 0},
+        "cpu_percent": {"type": "integer", "minimum": 0, "maximum": 100},
+        "cpu_shares": {"type": ["number", "string"]},
+        "cpu_quota": {"type": ["number", "string"]},
+        "cpu_period": {"type": ["number", "string"]},
+        "cpu_rt_period": {"type": ["number", "string"]},
+        "cpu_rt_runtime": {"type": ["number", "string"]},
+        "cpus": {"type": "number", "minimum": 0},
+        "cpuset": {"type": "string"},
+        "depends_on": {
+          "oneOf": [
+            {"$ref": "#/definitions/list_of_strings"},
+            {
+              "type": "object",
+              "additionalProperties": false,
+              "patternProperties": {
+                "^[a-zA-Z0-9._-]+$": {
+                  "type": "object",
+                  "additionalProperties": false,
+                  "properties": {
+                    "condition": {
+                      "type": "string",
+                      "enum": ["service_started", "service_healthy"]
+                    }
+                  },
+                  "required": ["condition"]
+                }
+              }
+            }
+          ]
+        },
+        "device_cgroup_rules": {"$ref": "#/definitions/list_of_strings"},
+        "devices": {"$ref": "#/definitions/list_of_strings"},
+        "dns_opt": {
+          "type": "array",
+          "items": {
+            "type": "string"
+          },
+          "uniqueItems": true
+        },
+        "dns": {"$ref": "#/definitions/string_or_list"},
+        "dns_search": {"$ref": "#/definitions/string_or_list"},
+        "domainname": {"type": "string"},
+        "entrypoint": {
+          "oneOf": [
+            {"type": "string"},
+            {"type": "array", "items": {"type": "string"}}
+          ]
+        },
+        "env_file": {"$ref": "#/definitions/string_or_list"},
+        "environment": {"$ref": "#/definitions/list_or_dict"},
+
+        "expose": {
+          "type": "array",
+          "items": {
+            "type": ["string", "number"],
+            "format": "expose"
+          },
+          "uniqueItems": true
+        },
+
+        "extends": {
+          "oneOf": [
+            {
+              "type": "string"
+            },
+            {
+              "type": "object",
+
+              "properties": {
+                "service": {"type": "string"},
+                "file": {"type": "string"}
+              },
+              "required": ["service"],
+              "additionalProperties": false
+            }
+          ]
+        },
+
+        "external_links": {"$ref": "#/definitions/list_of_strings"},
+        "extra_hosts": {"$ref": "#/definitions/list_or_dict"},
+        "group_add": {
+            "type": "array",
+            "items": {
+                "type": ["string", "number"]
+            },
+            "uniqueItems": true
+        },
+        "healthcheck": {"$ref": "#/definitions/healthcheck"},
+        "hostname": {"type": "string"},
+        "image": {"type": "string"},
+        "init": {"type": ["boolean", "string"]},
+        "ipc": {"type": "string"},
+        "isolation": {"type": "string"},
+        "labels": {"$ref": "#/definitions/labels"},
+        "links": {"$ref": "#/definitions/list_of_strings"},
+
+        "logging": {
+            "type": "object",
+
+            "properties": {
+                "driver": {"type": "string"},
+                "options": {"type": "object"}
+            },
+            "additionalProperties": false
+        },
+
+        "mac_address": {"type": "string"},
+        "mem_limit": {"type": ["number", "string"]},
+        "mem_reservation": {"type": ["string", "integer"]},
+        "mem_swappiness": {"type": "integer"},
+        "memswap_limit": {"type": ["number", "string"]},
+        "network_mode": {"type": "string"},
+
+        "networks": {
+          "oneOf": [
+            {"$ref": "#/definitions/list_of_strings"},
+            {
+              "type": "object",
+              "patternProperties": {
+                "^[a-zA-Z0-9._-]+$": {
+                  "oneOf": [
+                    {
+                      "type": "object",
+                      "properties": {
+                        "aliases": {"$ref": "#/definitions/list_of_strings"},
+                        "ipv4_address": {"type": "string"},
+                        "ipv6_address": {"type": "string"},
+                        "link_local_ips": {"$ref": "#/definitions/list_of_strings"},
+                        "priority": {"type": "number"}
+                      },
+                      "additionalProperties": false
+                    },
+                    {"type": "null"}
+                  ]
+                }
+              },
+              "additionalProperties": false
+            }
+          ]
+        },
+        "oom_kill_disable": {"type": "boolean"},
+        "oom_score_adj": {"type": "integer", "minimum": -1000, "maximum": 1000},
+        "pid": {"type": ["string", "null"]},
+        "platform": {"type": "string"},
+        "ports": {
+          "type": "array",
+          "items": {
+            "type": ["string", "number"],
+            "format": "ports"
+          },
+          "uniqueItems": true
+        },
+        "privileged": {"type": "boolean"},
+        "read_only": {"type": "boolean"},
+        "restart": {"type": "string"},
+        "runtime": {"type": "string"},
+        "scale": {"type": "integer"},
+        "security_opt": {"$ref": "#/definitions/list_of_strings"},
+        "shm_size": {"type": ["number", "string"]},
+        "sysctls": {"$ref": "#/definitions/list_or_dict"},
+        "pids_limit": {"type": ["number", "string"]},
+        "stdin_open": {"type": "boolean"},
+        "stop_grace_period": {"type": "string", "format": "duration"},
+        "stop_signal": {"type": "string"},
+        "storage_opt": {"type": "object"},
+        "tmpfs": {"$ref": "#/definitions/string_or_list"},
+        "tty": {"type": "boolean"},
+        "ulimits": {
+          "type": "object",
+          "patternProperties": {
+            "^[a-z]+$": {
+              "oneOf": [
+                {"type": "integer"},
+                {
+                  "type":"object",
+                  "properties": {
+                    "hard": {"type": "integer"},
+                    "soft": {"type": "integer"}
+                  },
+                  "required": ["soft", "hard"],
+                  "additionalProperties": false
+                }
+              ]
+            }
+          }
+        },
+        "user": {"type": "string"},
+        "userns_mode": {"type": "string"},
+        "volumes": {
+          "type": "array",
+          "items": {
+            "oneOf": [
+              {"type": "string"},
+              {
+                "type": "object",
+                "required": ["type"],
+                "additionalProperties": false,
+                "properties": {
+                  "type": {"type": "string"},
+                  "source": {"type": "string"},
+                  "target": {"type": "string"},
+                  "read_only": {"type": "boolean"},
+                  "consistency": {"type": "string"},
+                  "bind": {
+                    "type": "object",
+                    "properties": {
+                      "propagation": {"type": "string"}
+                    }
+                  },
+                  "volume": {
+                    "type": "object",
+                    "properties": {
+                      "nocopy": {"type": "boolean"}
+                    }
+                  },
+                  "tmpfs": {
+                    "type": "object",
+                    "properties": {
+                      "size": {"type": ["integer", "string"]}
+                    }
+                  }
+                }
+              }
+            ],
+            "uniqueItems": true
+          }
+        },
+        "volume_driver": {"type": "string"},
+        "volumes_from": {"$ref": "#/definitions/list_of_strings"},
+        "working_dir": {"type": "string"}
+      },
+
+      "dependencies": {
+        "memswap_limit": ["mem_limit"]
+      },
+      "additionalProperties": false
+    },
+
+    "healthcheck": {
+      "id": "#/definitions/healthcheck",
+      "type": "object",
+      "additionalProperties": false,
+      "properties": {
+        "disable": {"type": "boolean"},
+        "interval": {"type": "string"},
+        "retries": {"type": "number"},
+        "start_period": {"type": "string"},
+        "test": {
+          "oneOf": [
+            {"type": "string"},
+            {"type": "array", "items": {"type": "string"}}
+          ]
+        },
+        "timeout": {"type": "string"}
+      }
+    },
+
+    "network": {
+      "id": "#/definitions/network",
+      "type": "object",
+      "properties": {
+        "driver": {"type": "string"},
+        "driver_opts": {
+          "type": "object",
+          "patternProperties": {
+            "^.+$": {"type": ["string", "number"]}
+          }
+        },
+        "ipam": {
+            "type": "object",
+            "properties": {
+                "driver": {"type": "string"},
+                "config": {
+                    "type": "array"
+                },
+                "options": {
+                  "type": "object",
+                  "patternProperties": {
+                    "^.+$": {"type": "string"}
+                  },
+                  "additionalProperties": false
+                }
+            },
+            "additionalProperties": false
+        },
+        "external": {
+          "type": ["boolean", "object"],
+          "properties": {
+            "name": {"type": "string"}
+          },
+          "additionalProperties": false
+        },
+        "internal": {"type": "boolean"},
+        "enable_ipv6": {"type": "boolean"},
+        "labels": {"$ref": "#/definitions/labels"},
+        "name": {"type": "string"}
+      },
+      "additionalProperties": false
+    },
+
+    "volume": {
+      "id": "#/definitions/volume",
+      "type": ["object", "null"],
+      "properties": {
+        "driver": {"type": "string"},
+        "driver_opts": {
+          "type": "object",
+          "patternProperties": {
+            "^.+$": {"type": ["string", "number"]}
+          }
+        },
+        "external": {
+          "type": ["boolean", "object"],
+          "properties": {
+            "name": {"type": "string"}
+          },
+          "additionalProperties": false
+        },
+        "labels": {"$ref": "#/definitions/labels"},
+        "name": {"type": "string"}
+      },
+      "additionalProperties": false
+    },
+
+    "string_or_list": {
+      "oneOf": [
+        {"type": "string"},
+        {"$ref": "#/definitions/list_of_strings"}
+      ]
+    },
+
+    "list_of_strings": {
+      "type": "array",
+      "items": {"type": "string"},
+      "uniqueItems": true
+    },
+
+    "list_or_dict": {
+      "oneOf": [
+        {
+          "type": "object",
+          "patternProperties": {
+            ".+": {
+              "type": ["string", "number", "null"]
+            }
+          },
+          "additionalProperties": false
+        },
+        {"type": "array", "items": {"type": "string"}, "uniqueItems": true}
+      ]
+    },
+
+    "labels": {
+      "oneOf": [
+        {
+          "type": "object",
+          "patternProperties": {
+            ".+": {
+              "type": "string"
+            }
+          },
+          "additionalProperties": false
+        },
+        {"type": "array", "items": {"type": "string"}, "uniqueItems": true}
+      ]
+    },
+
+    "blkio_limit": {
+      "type": "object",
+      "properties": {
+        "path": {"type": "string"},
+        "rate": {"type": ["integer", "string"]}
+      },
+      "additionalProperties": false
+    },
+    "blkio_weight": {
+      "type": "object",
+      "properties": {
+        "path": {"type": "string"},
+        "weight": {"type": "integer"}
+      },
+      "additionalProperties": false
+    },
+
+    "constraints": {
+      "service": {
+        "id": "#/definitions/constraints/service",
+        "anyOf": [
+          {"required": ["build"]},
+          {"required": ["image"]}
+        ],
+        "properties": {
+          "build": {
+            "required": ["context"]
+          }
+        }
+      }
+    }
+  }
+}

+ 13 - 3
compose/config/interpolation.py

@@ -10,6 +10,7 @@ import six
 from .errors import ConfigurationError
 from compose.const import COMPOSEFILE_V2_0 as V2_0
 from compose.utils import parse_bytes
+from compose.utils import parse_nanoseconds_int
 
 
 log = logging.getLogger(__name__)
@@ -132,9 +133,8 @@ class TemplateWithDefaults(Template):
             braced = mo.group('braced')
             if braced is not None:
                 sep = mo.group('sep')
-                result = self.process_braced_group(braced, sep, mapping)
-                if result:
-                    return result
+                if sep:
+                    return self.process_braced_group(braced, sep, mapping)
 
             if named is not None:
                 val = mapping[named]
@@ -223,6 +223,12 @@ def bytes_to_int(s):
     return v
 
 
+def to_microseconds(v):
+    if not isinstance(v, six.string_types):
+        return v
+    return int(parse_nanoseconds_int(v) / 1000)
+
+
 class ConversionMap(object):
     map = {
         service_path('blkio_config', 'weight'): to_int,
@@ -230,6 +236,10 @@ class ConversionMap(object):
         service_path('build', 'labels', FULL_JOKER): to_str,
         service_path('cpus'): to_float,
         service_path('cpu_count'): to_int,
+        service_path('cpu_quota'): to_microseconds,
+        service_path('cpu_period'): to_microseconds,
+        service_path('cpu_rt_period'): to_microseconds,
+        service_path('cpu_rt_runtime'): to_microseconds,
         service_path('configs', 'mode'): to_int,
         service_path('secrets', 'mode'): to_int,
         service_path('healthcheck', 'retries'): to_int,

+ 3 - 2
compose/config/serialize.py

@@ -151,9 +151,10 @@ def denormalize_service_dict(service_dict, version, image_digest=None):
             service_dict['healthcheck']['start_period'] = serialize_ns_time_value(
                 service_dict['healthcheck']['start_period']
             )
-    if 'ports' in service_dict and version < V3_2:
+
+    if 'ports' in service_dict:
         service_dict['ports'] = [
-            p.legacy_repr() if isinstance(p, types.ServicePort) else p
+            p.legacy_repr() if p.external_ip or version < V3_2 else p
             for p in service_dict['ports']
         ]
     if 'volumes' in service_dict and (version < V2_3 or (version > V3_0 and version < V3_2)):

+ 3 - 0
compose/const.py

@@ -27,6 +27,7 @@ COMPOSEFILE_V2_0 = ComposeVersion('2.0')
 COMPOSEFILE_V2_1 = ComposeVersion('2.1')
 COMPOSEFILE_V2_2 = ComposeVersion('2.2')
 COMPOSEFILE_V2_3 = ComposeVersion('2.3')
+COMPOSEFILE_V2_4 = ComposeVersion('2.4')
 
 COMPOSEFILE_V3_0 = ComposeVersion('3.0')
 COMPOSEFILE_V3_1 = ComposeVersion('3.1')
@@ -42,6 +43,7 @@ API_VERSIONS = {
     COMPOSEFILE_V2_1: '1.24',
     COMPOSEFILE_V2_2: '1.25',
     COMPOSEFILE_V2_3: '1.30',
+    COMPOSEFILE_V2_4: '1.35',
     COMPOSEFILE_V3_0: '1.25',
     COMPOSEFILE_V3_1: '1.25',
     COMPOSEFILE_V3_2: '1.25',
@@ -57,6 +59,7 @@ API_VERSION_TO_ENGINE_VERSION = {
     API_VERSIONS[COMPOSEFILE_V2_1]: '1.12.0',
     API_VERSIONS[COMPOSEFILE_V2_2]: '1.13.0',
     API_VERSIONS[COMPOSEFILE_V2_3]: '17.06.0',
+    API_VERSIONS[COMPOSEFILE_V2_4]: '17.12.0',
     API_VERSIONS[COMPOSEFILE_V3_0]: '1.13.0',
     API_VERSIONS[COMPOSEFILE_V3_1]: '1.13.0',
     API_VERSIONS[COMPOSEFILE_V3_2]: '1.13.0',

+ 5 - 0
compose/network.py

@@ -42,6 +42,11 @@ class Network(object):
 
     def ensure(self):
         if self.external:
+            if self.driver == 'overlay':
+                # Swarm nodes do not register overlay networks that were
+                # created on a different node unless they're in use.
+                # See docker/compose#4399
+                return
             try:
                 self.inspect()
                 log.debug(

+ 5 - 4
compose/project.py

@@ -77,7 +77,7 @@ class Project(object):
         return labels
 
     @classmethod
-    def from_config(cls, name, config_data, client):
+    def from_config(cls, name, config_data, client, default_platform=None):
         """
         Construct a Project from a config.Config object.
         """
@@ -128,6 +128,7 @@ class Project(object):
                     volumes_from=volumes_from,
                     secrets=secrets,
                     pid_mode=pid_mode,
+                    platform=service_dict.pop('platform', default_platform),
                     **service_dict)
             )
 
@@ -366,10 +367,10 @@ class Project(object):
         return containers
 
     def build(self, service_names=None, no_cache=False, pull=False, force_rm=False, memory=None,
-              build_args=None):
+              build_args=None, gzip=False):
         for service in self.get_services(service_names):
             if service.can_be_built():
-                service.build(no_cache, pull, force_rm, memory, build_args)
+                service.build(no_cache, pull, force_rm, memory, build_args, gzip)
             else:
                 log.info('%s uses an image, skipping' % service.name)
 
@@ -551,7 +552,7 @@ class Project(object):
                 services,
                 pull_service,
                 operator.attrgetter('name'),
-                'Pulling',
+                not silent and 'Pulling' or None,
                 limit=5,
             )
             if len(errors):

+ 46 - 9
compose/service.py

@@ -62,7 +62,10 @@ HOST_CONFIG_KEYS = [
     'cgroup_parent',
     'cpu_count',
     'cpu_percent',
+    'cpu_period',
     'cpu_quota',
+    'cpu_rt_period',
+    'cpu_rt_runtime',
     'cpu_shares',
     'cpus',
     'cpuset',
@@ -682,15 +685,27 @@ class Service(object):
     # TODO: this would benefit from github.com/docker/docker/pull/14699
     # to remove the need to inspect every container
     def _next_container_number(self, one_off=False):
-        containers = filter(None, [
-            Container.from_ps(self.client, container)
-            for container in self.client.containers(
-                all=True,
-                filters={'label': self.labels(one_off=one_off)})
-        ])
+        containers = self._fetch_containers(
+            all=True,
+            filters={'label': self.labels(one_off=one_off)}
+        )
         numbers = [c.number for c in containers]
         return 1 if not numbers else max(numbers) + 1
 
+    def _fetch_containers(self, **fetch_options):
+        # Account for containers that might have been removed since we fetched
+        # the list.
+        def soft_inspect(container):
+            try:
+                return Container.from_id(self.client, container['Id'])
+            except NotFound:
+                return None
+
+        return filter(None, [
+            soft_inspect(container)
+            for container in self.client.containers(**fetch_options)
+        ])
+
     def _get_aliases(self, network, container=None):
         return list(
             {self.name} |
@@ -947,6 +962,9 @@ class Service(object):
             device_write_iops=blkio_config.get('device_write_iops'),
             mounts=options.get('mounts'),
             device_cgroup_rules=options.get('device_cgroup_rules'),
+            cpu_period=options.get('cpu_period'),
+            cpu_rt_period=options.get('cpu_rt_period'),
+            cpu_rt_runtime=options.get('cpu_rt_runtime'),
         )
 
     def get_secret_volumes(self):
@@ -961,7 +979,8 @@ class Service(object):
 
         return [build_spec(secret) for secret in self.secrets]
 
-    def build(self, no_cache=False, pull=False, force_rm=False, memory=None, build_args_override=None):
+    def build(self, no_cache=False, pull=False, force_rm=False, memory=None, build_args_override=None,
+              gzip=False):
         log.info('Building %s' % self.name)
 
         build_opts = self.options.get('build', {})
@@ -979,6 +998,12 @@ class Service(object):
         if not six.PY3 and not IS_WINDOWS_PLATFORM:
             path = path.encode('utf8')
 
+        platform = self.options.get('platform')
+        if platform and version_lt(self.client.api_version, '1.35'):
+            raise OperationFailedError(
+                'Impossible to perform platform-targeted builds for API version < 1.35'
+            )
+
         build_output = self.client.build(
             path=path,
             tag=self.image_name,
@@ -997,6 +1022,9 @@ class Service(object):
             container_limits={
                 'memory': parse_bytes(memory) if memory else None
             },
+            gzip=gzip,
+            isolation=build_opts.get('isolation', self.options.get('isolation', None)),
+            platform=platform,
         )
 
         try:
@@ -1098,11 +1126,20 @@ class Service(object):
             return
 
         repo, tag, separator = parse_repository_tag(self.options['image'])
-        tag = tag or 'latest'
+        kwargs = {
+            'tag': tag or 'latest',
+            'stream': True,
+            'platform': self.options.get('platform'),
+        }
         if not silent:
             log.info('Pulling %s (%s%s%s)...' % (self.name, repo, separator, tag))
+
+        if kwargs['platform'] and version_lt(self.client.api_version, '1.35'):
+            raise OperationFailedError(
+                'Impossible to perform platform-targeted builds for API version < 1.35'
+            )
         try:
-            output = self.client.pull(repo, tag=tag, stream=True)
+            output = self.client.pull(repo, **kwargs)
             if silent:
                 with open(os.devnull, 'w') as devnull:
                     return progress_stream.get_digest_from_pull(

+ 41 - 13
compose/volume.py

@@ -124,19 +124,7 @@ class ProjectVolumes(object):
                     )
                     volume.create()
                 else:
-                    driver = volume.inspect()['Driver']
-                    if volume.driver is not None and driver != volume.driver:
-                        raise ConfigurationError(
-                            'Configuration for volume {0} specifies driver '
-                            '{1}, but a volume with the same name uses a '
-                            'different driver ({3}). If you wish to use the '
-                            'new configuration, please remove the existing '
-                            'volume "{2}" first:\n'
-                            '$ docker volume rm {2}'.format(
-                                volume.name, volume.driver, volume.full_name,
-                                volume.inspect()['Driver']
-                            )
-                        )
+                    check_remote_volume_config(volume.inspect(), volume)
         except NotFound:
             raise ConfigurationError(
                 'Volume %s specifies nonexistent driver %s' % (volume.name, volume.driver)
@@ -152,3 +140,43 @@ class ProjectVolumes(object):
         else:
             volume_spec.source = self.volumes[volume_spec.source].full_name
             return volume_spec
+
+
+class VolumeConfigChangedError(ConfigurationError):
+    def __init__(self, local, property_name, local_value, remote_value):
+        super(VolumeConfigChangedError, self).__init__(
+            'Configuration for volume {vol_name} specifies {property_name} '
+            '{local_value}, but a volume with the same name uses a different '
+            '{property_name} ({remote_value}). If you wish to use the new '
+            'configuration, please remove the existing volume "{full_name}" '
+            'first:\n$ docker volume rm {full_name}'.format(
+                vol_name=local.name, property_name=property_name,
+                local_value=local_value, remote_value=remote_value,
+                full_name=local.full_name
+            )
+        )
+
+
+def check_remote_volume_config(remote, local):
+    if local.driver and remote.get('Driver') != local.driver:
+        raise VolumeConfigChangedError(local, 'driver', local.driver, remote.get('Driver'))
+    local_opts = local.driver_opts or {}
+    remote_opts = remote.get('Options') or {}
+    for k in set.union(set(remote_opts.keys()), set(local_opts.keys())):
+        if k.startswith('com.docker.'):  # These options are set internally
+            continue
+        if remote_opts.get(k) != local_opts.get(k):
+            raise VolumeConfigChangedError(
+                local, '"{}" driver_opt'.format(k), local_opts.get(k), remote_opts.get(k),
+            )
+
+    local_labels = local.labels or {}
+    remote_labels = remote.get('Labels') or {}
+    for k in set.union(set(remote_labels.keys()), set(local_labels.keys())):
+        if k.startswith('com.docker.'):  # We are only interested in user-specified labels
+            continue
+        if remote_labels.get(k) != local_labels.get(k):
+            log.warn(
+                'Volume {}: label "{}" has changed. It may need to be'
+                ' recreated.'.format(local.name, k)
+            )

+ 1 - 1
contrib/completion/zsh/_docker-compose

@@ -88,7 +88,7 @@ __docker-compose_get_services() {
     shift
     [[ $kind =~ (stopped|all) ]] && args=($args -a)
 
-    lines=(${(f)"$(_call_program commands docker $docker_options ps $args)"})
+    lines=(${(f)"$(_call_program commands docker $docker_options ps --format 'table' $args)"})
     services=(${(f)"$(_call_program commands docker-compose 2>/dev/null $compose_options ps -q)"})
 
     # Parse header line to find columns

+ 116 - 0
contrib/update/update-docker-compose.ps1

@@ -0,0 +1,116 @@
+# Self-elevate the script if required
+# http://www.expta.com/2017/03/how-to-self-elevate-powershell-script.html
+If (-Not ([Security.Principal.WindowsPrincipal] [Security.Principal.WindowsIdentity]::GetCurrent()).IsInRole([Security.Principal.WindowsBuiltInRole] 'Administrator')) {
+    If ([int](Get-CimInstance -Class Win32_OperatingSystem | Select-Object -ExpandProperty BuildNumber) -ge 6000) {
+        $CommandLine = "-File `"" + $MyInvocation.MyCommand.Path + "`" " + $MyInvocation.UnboundArguments
+        Start-Process -FilePath PowerShell.exe -Verb Runas -ArgumentList $CommandLine
+        Exit
+    }
+}
+
+$SectionSeparator = "--------------------------------------------------"
+
+# Update docker-compose if required
+Function UpdateDockerCompose() {
+    Write-Host "Updating docker-compose if required..."
+    Write-Host $SectionSeparator
+
+    # Find the installed docker-compose.exe location
+    Try {
+        $DockerComposePath = Get-Command docker-compose.exe -ErrorAction Stop | `
+            Select-Object -First 1 -ExpandProperty Definition
+    }
+    Catch {
+        Write-Host "Error: Could not find path to docker-compose.exe" `
+            -ForegroundColor Red
+        Return $false
+    }
+
+    # Prefer/enable TLS 1.2
+    # https://stackoverflow.com/a/48030563/153079
+    [Net.ServicePointManager]::SecurityProtocol = "tls12, tls11, tls"
+
+    # Query for the latest release version
+    Try {
+        $URI = "https://api.github.com/repos/docker/compose/releases/latest"
+        $LatestComposeVersion = [System.Version](Invoke-RestMethod -Method Get -Uri $URI).tag_name
+    }
+    Catch {
+        Write-Host "Error: Query for the latest docker-compose release version failed" `
+            -ForegroundColor Red
+        Return $false
+    }
+
+    # Check the installed version and compare with latest release
+    $UpdateDockerCompose = $false
+    Try {
+        $InstalledComposeVersion = `
+            [System.Version]((docker-compose.exe version --short) | Out-String)
+
+        If ($InstalledComposeVersion -eq $LatestComposeVersion) {
+            Write-Host ("Installed docker-compose version ({0}) same as latest ({1})." `
+                -f $InstalledComposeVersion.ToString(), $LatestComposeVersion.ToString())
+        }
+        ElseIf ($InstalledComposeVersion -lt $LatestComposeVersion) {
+            Write-Host ("Installed docker-compose version ({0}) older than latest ({1})." `
+                -f $InstalledComposeVersion.ToString(), $LatestComposeVersion.ToString())
+            $UpdateDockerCompose = $true
+        }
+        Else {
+            Write-Host ("Installed docker-compose version ({0}) newer than latest ({1})." `
+                -f $InstalledComposeVersion.ToString(), $LatestComposeVersion.ToString()) `
+                -ForegroundColor Yellow
+        }
+    }
+    Catch {
+        Write-Host `
+            "Warning: Couldn't get docker-compose version, assuming an update is required..." `
+            -ForegroundColor Yellow
+        $UpdateDockerCompose = $true
+    }
+
+    If (-Not $UpdateDockerCompose) {
+        # Nothing to do!
+        Return $false
+    }
+
+    # Download the latest version of docker-compose.exe
+    Try {
+        $RemoteFileName = "docker-compose-Windows-x86_64.exe"
+        $URI = ("https://github.com/docker/compose/releases/download/{0}/{1}" `
+            -f $LatestComposeVersion.ToString(), $RemoteFileName)
+        Invoke-WebRequest -UseBasicParsing -Uri $URI `
+            -OutFile $DockerComposePath
+        Return $true
+    }
+    Catch {
+        Write-Host ("Error: Failed to download the latest version of docker-compose`n{0}" `
+            -f $_.Exception.Message) -ForegroundColor Red
+        Return $false
+    }
+
+    Return $false
+}
+
+If (UpdateDockerCompose) {
+    Write-Host "Updated to latest-version of docker-compose, running update again to verify.`n"
+    If (UpdateDockerCompose) {
+        Write-Host "Error: Should not have updated twice." -ForegroundColor Red
+    }
+}
+
+# Assuming elevation popped up a new powershell window, pause so the user can see what happened
+# https://stackoverflow.com/a/22362868/153079
+Function Pause ($Message = "Press any key to continue . . . ") {
+    If ((Test-Path variable:psISE) -and $psISE) {
+        $Shell = New-Object -ComObject "WScript.Shell"
+        $Shell.Popup("Click OK to continue.", 0, "Script Paused", 0)
+    }
+    Else {
+        Write-Host "`n$SectionSeparator"
+        Write-Host -NoNewline $Message
+        [void][System.Console]::ReadKey($true)
+        Write-Host
+    }
+}
+Pause

+ 5 - 0
docker-compose.spec

@@ -42,6 +42,11 @@ exe = EXE(pyz,
                 'compose/config/config_schema_v2.3.json',
                 'DATA'
             ),
+            (
+                'compose/config/config_schema_v2.4.json',
+                'compose/config/config_schema_v2.4.json',
+                'DATA'
+            ),
             (
                 'compose/config/config_schema_v3.0.json',
                 'compose/config/config_schema_v3.0.json',

+ 50 - 0
docs/issue_template.md

@@ -0,0 +1,50 @@
+<!--
+Welcome to the docker-compose issue tracker! Before creating an issue, please heed the following:
+
+1. This tracker should only be used to report bugs and request features / enhancements to docker-compose
+    - For questions and general support, use https://forums.docker.com
+    - For documentation issues, use https://github.com/docker/docker.github.io
+    - For issues with the `docker stack` commands and the version 3 of the Compose file, use
+      https://github.com/docker/cli
+2. Use the search function before creating a new issue. Duplicates will be closed and directed to
+   the original discussion.
+3. When making a bug report, make sure you provide all required information. The easier it is for
+   maintainers to reproduce, the faster it'll be fixed.
+-->
+
+## Description of the issue
+
+## Context information (for bug reports)
+
+```
+Output of "docker-compose version"
+```
+
+```
+Output of "docker version"
+```
+
+```
+Output of "docker-compose config"
+```
+
+
+## Steps to reproduce the issue
+
+1.
+2.
+3.
+
+### Observed result
+
+### Expected result
+
+### Stacktrace / full error message
+
+```
+(if applicable)
+```
+
+## Additional information
+
+OS version / distribution, `docker-compose` install method, etc.

+ 13 - 0
docs/pull_request_template.md

@@ -0,0 +1,13 @@
+<!--
+Welcome to the docker-compose issue tracker, and thank you for your interest
+in contributing to the project! Please make sure you've read the guidelines
+in CONTRIBUTING.md before submitting your pull request. Contributions that
+do not comply and contributions with failing tests will not be reviewed!
+-->
+
+<!-- Please make sure an issue describing the problem the PR is trying to
+    solve exists, or create it before submitting a PR. The maintainers will
+    validate if the issue should be addressed or if it is out of scope for the
+    project.
+-->
+Resolves #

+ 1 - 1
requirements.txt

@@ -2,7 +2,7 @@ backports.ssl-match-hostname==3.5.0.1; python_version < '3'
 cached-property==1.3.0
 certifi==2017.4.17
 chardet==3.0.4
-docker==3.1.4
+docker==3.2.1
 docker-pycreds==0.2.1
 dockerpty==0.4.1
 docopt==0.6.2

+ 1 - 1
script/run/run.sh

@@ -15,7 +15,7 @@
 
 set -e
 
-VERSION="1.20.1"
+VERSION="1.21.0-rc1"
 IMAGE="docker/compose:$VERSION"
 
 

+ 8 - 1
script/test/versions.py

@@ -73,6 +73,11 @@ class Version(namedtuple('_Version', 'major minor patch rc edition')):
         return '.'.join(map(str, self[:3])) + edition + rc
 
 
+BLACKLIST = [  # List of versions known to be broken and should not be used
+    Version.parse('18.03.0-ce-rc2'),
+]
+
+
 def group_versions(versions):
     """Group versions by `major.minor` releases.
 
@@ -117,7 +122,9 @@ def get_default(versions):
 def get_versions(tags):
     for tag in tags:
         try:
-            yield Version.parse(tag['name'])
+            v = Version.parse(tag['name'])
+            if v not in BLACKLIST:
+                yield v
         except ValueError:
             print("Skipping invalid tag: {name}".format(**tag), file=sys.stderr)
 

+ 1 - 1
setup.py

@@ -36,7 +36,7 @@ install_requires = [
     'requests >= 2.6.1, != 2.11.0, != 2.12.2, != 2.18.0, < 2.19',
     'texttable >= 0.9.0, < 0.10',
     'websocket-client >= 0.32.0, < 1.0',
-    'docker >= 3.1.4, < 4.0',
+    'docker >= 3.2.1, < 4.0',
     'dockerpty >= 0.4.1, < 0.5',
     'six >= 1.3.0, < 2',
     'jsonschema >= 2.5.1, < 3',

+ 57 - 42
tests/acceptance/cli_test.py

@@ -177,6 +177,13 @@ class CLITestCase(DockerClientTestCase):
             returncode=0
         )
 
+    def test_shorthand_host_opt_interactive(self):
+        self.dispatch(
+            ['-H={0}'.format(os.environ.get('DOCKER_HOST', 'unix://')),
+             'run', 'another', 'ls'],
+            returncode=0
+        )
+
     def test_host_not_reachable(self):
         result = self.dispatch(['-H=tcp://doesnotexist:8000', 'ps'], returncode=1)
         assert "Couldn't connect to Docker daemon" in result.stderr
@@ -491,16 +498,16 @@ class CLITestCase(DockerClientTestCase):
     def test_ps(self):
         self.project.get_service('simple').create_container()
         result = self.dispatch(['ps'])
-        assert 'simplecomposefile_simple_1' in result.stdout
+        assert 'simple-composefile_simple_1' in result.stdout
 
     def test_ps_default_composefile(self):
         self.base_dir = 'tests/fixtures/multiple-composefiles'
         self.dispatch(['up', '-d'])
         result = self.dispatch(['ps'])
 
-        assert 'multiplecomposefiles_simple_1' in result.stdout
-        assert 'multiplecomposefiles_another_1' in result.stdout
-        assert 'multiplecomposefiles_yetanother_1' not in result.stdout
+        assert 'multiple-composefiles_simple_1' in result.stdout
+        assert 'multiple-composefiles_another_1' in result.stdout
+        assert 'multiple-composefiles_yetanother_1' not in result.stdout
 
     def test_ps_alternate_composefile(self):
         config_path = os.path.abspath(
@@ -511,9 +518,9 @@ class CLITestCase(DockerClientTestCase):
         self.dispatch(['-f', 'compose2.yml', 'up', '-d'])
         result = self.dispatch(['-f', 'compose2.yml', 'ps'])
 
-        assert 'multiplecomposefiles_simple_1' not in result.stdout
-        assert 'multiplecomposefiles_another_1' not in result.stdout
-        assert 'multiplecomposefiles_yetanother_1' in result.stdout
+        assert 'multiple-composefiles_simple_1' not in result.stdout
+        assert 'multiple-composefiles_another_1' not in result.stdout
+        assert 'multiple-composefiles_yetanother_1' in result.stdout
 
     def test_ps_services_filter_option(self):
         self.base_dir = 'tests/fixtures/ps-services-filter'
@@ -545,13 +552,11 @@ class CLITestCase(DockerClientTestCase):
 
     def test_pull(self):
         result = self.dispatch(['pull'])
-        assert sorted(result.stderr.split('\n'))[1:] == [
-            'Pulling another (busybox:latest)...',
-            'Pulling simple (busybox:latest)...',
-        ]
+        assert 'Pulling simple' in result.stderr
+        assert 'Pulling another' in result.stderr
 
     def test_pull_with_digest(self):
-        result = self.dispatch(['-f', 'digest.yml', 'pull'])
+        result = self.dispatch(['-f', 'digest.yml', 'pull', '--no-parallel'])
 
         assert 'Pulling simple (busybox:latest)...' in result.stderr
         assert ('Pulling digest (busybox@'
@@ -561,7 +566,7 @@ class CLITestCase(DockerClientTestCase):
     def test_pull_with_ignore_pull_failures(self):
         result = self.dispatch([
             '-f', 'ignore-pull-failures.yml',
-            'pull', '--ignore-pull-failures']
+            'pull', '--ignore-pull-failures', '--no-parallel']
         )
 
         assert 'Pulling simple (busybox:latest)...' in result.stderr
@@ -576,7 +581,7 @@ class CLITestCase(DockerClientTestCase):
 
     def test_pull_with_parallel_failure(self):
         result = self.dispatch([
-            '-f', 'ignore-pull-failures.yml', 'pull', '--parallel'],
+            '-f', 'ignore-pull-failures.yml', 'pull'],
             returncode=1
         )
 
@@ -593,14 +598,14 @@ class CLITestCase(DockerClientTestCase):
 
     def test_pull_with_no_deps(self):
         self.base_dir = 'tests/fixtures/links-composefile'
-        result = self.dispatch(['pull', 'web'])
+        result = self.dispatch(['pull', '--no-parallel', 'web'])
         assert sorted(result.stderr.split('\n'))[1:] == [
             'Pulling web (busybox:latest)...',
         ]
 
     def test_pull_with_include_deps(self):
         self.base_dir = 'tests/fixtures/links-composefile'
-        result = self.dispatch(['pull', '--include-deps', 'web'])
+        result = self.dispatch(['pull', '--no-parallel', '--include-deps', 'web'])
         assert sorted(result.stderr.split('\n'))[1:] == [
             'Pulling db (busybox:latest)...',
             'Pulling web (busybox:latest)...',
@@ -902,18 +907,18 @@ class CLITestCase(DockerClientTestCase):
         assert len(self.project.containers(one_off=OneOffFilter.only, stopped=True)) == 2
 
         result = self.dispatch(['down', '--rmi=local', '--volumes'])
-        assert 'Stopping v2full_web_1' in result.stderr
-        assert 'Stopping v2full_other_1' in result.stderr
-        assert 'Stopping v2full_web_run_2' in result.stderr
-        assert 'Removing v2full_web_1' in result.stderr
-        assert 'Removing v2full_other_1' in result.stderr
-        assert 'Removing v2full_web_run_1' in result.stderr
-        assert 'Removing v2full_web_run_2' in result.stderr
-        assert 'Removing volume v2full_data' in result.stderr
-        assert 'Removing image v2full_web' in result.stderr
+        assert 'Stopping v2-full_web_1' in result.stderr
+        assert 'Stopping v2-full_other_1' in result.stderr
+        assert 'Stopping v2-full_web_run_2' in result.stderr
+        assert 'Removing v2-full_web_1' in result.stderr
+        assert 'Removing v2-full_other_1' in result.stderr
+        assert 'Removing v2-full_web_run_1' in result.stderr
+        assert 'Removing v2-full_web_run_2' in result.stderr
+        assert 'Removing volume v2-full_data' in result.stderr
+        assert 'Removing image v2-full_web' in result.stderr
         assert 'Removing image busybox' not in result.stderr
-        assert 'Removing network v2full_default' in result.stderr
-        assert 'Removing network v2full_front' in result.stderr
+        assert 'Removing network v2-full_default' in result.stderr
+        assert 'Removing network v2-full_front' in result.stderr
 
     def test_down_timeout(self):
         self.dispatch(['up', '-d'], None)
@@ -1559,6 +1564,16 @@ class CLITestCase(DockerClientTestCase):
         assert stdout == "operator\n"
         assert stderr == ""
 
+    @v3_only()
+    def test_exec_workdir(self):
+        self.base_dir = 'tests/fixtures/links-composefile'
+        os.environ['COMPOSE_API_VERSION'] = '1.35'
+        self.dispatch(['up', '-d', 'console'])
+        assert len(self.project.containers()) == 1
+
+        stdout, stderr = self.dispatch(['exec', '-T', '--workdir', '/etc', 'console', 'ls'])
+        assert 'passwd' in stdout
+
     @v2_2_only()
     def test_exec_service_with_environment_overridden(self):
         name = 'service'
@@ -1990,39 +2005,39 @@ class CLITestCase(DockerClientTestCase):
         proc = start_process(self.base_dir, ['run', '-T', 'simple', 'top'])
         wait_on_condition(ContainerStateCondition(
             self.project.client,
-            'simplecomposefile_simple_run_1',
+            'simple-composefile_simple_run_1',
             'running'))
 
         os.kill(proc.pid, signal.SIGINT)
         wait_on_condition(ContainerStateCondition(
             self.project.client,
-            'simplecomposefile_simple_run_1',
+            'simple-composefile_simple_run_1',
             'exited'))
 
     def test_run_handles_sigterm(self):
         proc = start_process(self.base_dir, ['run', '-T', 'simple', 'top'])
         wait_on_condition(ContainerStateCondition(
             self.project.client,
-            'simplecomposefile_simple_run_1',
+            'simple-composefile_simple_run_1',
             'running'))
 
         os.kill(proc.pid, signal.SIGTERM)
         wait_on_condition(ContainerStateCondition(
             self.project.client,
-            'simplecomposefile_simple_run_1',
+            'simple-composefile_simple_run_1',
             'exited'))
 
     def test_run_handles_sighup(self):
         proc = start_process(self.base_dir, ['run', '-T', 'simple', 'top'])
         wait_on_condition(ContainerStateCondition(
             self.project.client,
-            'simplecomposefile_simple_run_1',
+            'simple-composefile_simple_run_1',
             'running'))
 
         os.kill(proc.pid, signal.SIGHUP)
         wait_on_condition(ContainerStateCondition(
             self.project.client,
-            'simplecomposefile_simple_run_1',
+            'simple-composefile_simple_run_1',
             'exited'))
 
     @mock.patch.dict(os.environ)
@@ -2224,7 +2239,7 @@ class CLITestCase(DockerClientTestCase):
         self.dispatch(['up', '-d', 'another'])
         wait_on_condition(ContainerStateCondition(
             self.project.client,
-            'logscomposefile_another_1',
+            'logs-composefile_another_1',
             'exited'))
 
         self.dispatch(['kill', 'simple'])
@@ -2233,8 +2248,8 @@ class CLITestCase(DockerClientTestCase):
 
         assert 'hello' in result.stdout
         assert 'test' in result.stdout
-        assert 'logscomposefile_another_1 exited with code 0' in result.stdout
-        assert 'logscomposefile_simple_1 exited with code 137' in result.stdout
+        assert 'logs-composefile_another_1 exited with code 0' in result.stdout
+        assert 'logs-composefile_simple_1 exited with code 137' in result.stdout
 
     def test_logs_default(self):
         self.base_dir = 'tests/fixtures/logs-composefile'
@@ -2481,7 +2496,7 @@ class CLITestCase(DockerClientTestCase):
 
         container, = self.project.containers()
         expected_template = ' container {} {}'
-        expected_meta_info = ['image=busybox:latest', 'name=simplecomposefile_simple_1']
+        expected_meta_info = ['image=busybox:latest', 'name=simple-composefile_simple_1']
 
         assert expected_template.format('create', container.id) in lines[0]
         assert expected_template.format('start', container.id) in lines[1]
@@ -2601,13 +2616,13 @@ class CLITestCase(DockerClientTestCase):
 
         result = wait_on_process(proc, returncode=1)
 
-        assert 'exitcodefrom_another_1 exited with code 1' in result.stdout
+        assert 'exit-code-from_another_1 exited with code 1' in result.stdout
 
     def test_images(self):
         self.project.get_service('simple').create_container()
         result = self.dispatch(['images'])
         assert 'busybox' in result.stdout
-        assert 'simplecomposefile_simple_1' in result.stdout
+        assert 'simple-composefile_simple_1' in result.stdout
 
     def test_images_default_composefile(self):
         self.base_dir = 'tests/fixtures/multiple-composefiles'
@@ -2615,8 +2630,8 @@ class CLITestCase(DockerClientTestCase):
         result = self.dispatch(['images'])
 
         assert 'busybox' in result.stdout
-        assert 'multiplecomposefiles_another_1' in result.stdout
-        assert 'multiplecomposefiles_simple_1' in result.stdout
+        assert 'multiple-composefiles_another_1' in result.stdout
+        assert 'multiple-composefiles_simple_1' in result.stdout
 
     @mock.patch.dict(os.environ)
     def test_images_tagless_image(self):
@@ -2636,7 +2651,7 @@ class CLITestCase(DockerClientTestCase):
         self.project.get_service('foo').create_container()
         result = self.dispatch(['images'])
         assert '<none>' in result.stdout
-        assert 'taglessimage_foo_1' in result.stdout
+        assert 'tagless-image_foo_1' in result.stdout
 
     def test_up_with_override_yaml(self):
         self.base_dir = 'tests/fixtures/override-yaml-files'

+ 20 - 0
tests/integration/network_test.py

@@ -1,7 +1,10 @@
 from __future__ import absolute_import
 from __future__ import unicode_literals
 
+import pytest
+
 from .testcases import DockerClientTestCase
+from compose.config.errors import ConfigurationError
 from compose.const import LABEL_NETWORK
 from compose.const import LABEL_PROJECT
 from compose.network import Network
@@ -15,3 +18,20 @@ class NetworkTest(DockerClientTestCase):
         labels = net_data['Labels']
         assert labels[LABEL_NETWORK] == net.name
         assert labels[LABEL_PROJECT] == net.project
+
+    def test_network_external_default_ensure(self):
+        net = Network(
+            self.client, 'composetest', 'foonet',
+            external=True
+        )
+
+        with pytest.raises(ConfigurationError):
+            net.ensure()
+
+    def test_network_external_overlay_ensure(self):
+        net = Network(
+            self.client, 'composetest', 'foonet',
+            driver='overlay', external=True
+        )
+
+        assert net.ensure() is None

+ 47 - 0
tests/integration/project_test.py

@@ -4,6 +4,7 @@ from __future__ import unicode_literals
 import json
 import os
 import random
+import shutil
 import tempfile
 
 import py
@@ -1537,6 +1538,52 @@ class ProjectTest(DockerClientTestCase):
             vol_name
         ) in str(e.value)
 
+    @v2_only()
+    @no_cluster('inspect volume by name defect on Swarm Classic')
+    def test_initialize_volumes_updated_driver_opts(self):
+        vol_name = '{0:x}'.format(random.getrandbits(32))
+        full_vol_name = 'composetest_{0}'.format(vol_name)
+        tmpdir = tempfile.mkdtemp(prefix='compose_test_')
+        self.addCleanup(shutil.rmtree, tmpdir)
+        driver_opts = {'o': 'bind', 'device': tmpdir, 'type': 'none'}
+
+        config_data = build_config(
+            version=V2_0,
+            services=[{
+                'name': 'web',
+                'image': 'busybox:latest',
+                'command': 'top'
+            }],
+            volumes={
+                vol_name: {
+                    'driver': 'local',
+                    'driver_opts': driver_opts
+                }
+            },
+        )
+        project = Project.from_config(
+            name='composetest',
+            config_data=config_data, client=self.client
+        )
+        project.volumes.initialize()
+
+        volume_data = self.get_volume_data(full_vol_name)
+        assert volume_data['Name'].split('/')[-1] == full_vol_name
+        assert volume_data['Driver'] == 'local'
+        assert volume_data['Options'] == driver_opts
+
+        driver_opts['device'] = '/opt/data/localdata'
+        project = Project.from_config(
+            name='composetest',
+            config_data=config_data,
+            client=self.client
+        )
+        with pytest.raises(config.ConfigurationError) as e:
+            project.volumes.initialize()
+        assert 'Configuration for volume {0} specifies "device" driver_opt {1}'.format(
+            vol_name, driver_opts['device']
+        ) in str(e.value)
+
     @v2_only()
     def test_initialize_volumes_updated_blank_driver(self):
         vol_name = '{0:x}'.format(random.getrandbits(32))

+ 42 - 1
tests/integration/service_test.py

@@ -122,10 +122,19 @@ class ServiceTest(DockerClientTestCase):
         assert container.get('HostConfig.CpuShares') == 73
 
     def test_create_container_with_cpu_quota(self):
-        service = self.create_service('db', cpu_quota=40000)
+        service = self.create_service('db', cpu_quota=40000, cpu_period=150000)
         container = service.create_container()
         container.start()
         assert container.get('HostConfig.CpuQuota') == 40000
+        assert container.get('HostConfig.CpuPeriod') == 150000
+
+    @pytest.mark.xfail(raises=OperationFailedError, reason='not supported by kernel')
+    def test_create_container_with_cpu_rt(self):
+        service = self.create_service('db', cpu_rt_runtime=40000, cpu_rt_period=150000)
+        container = service.create_container()
+        container.start()
+        assert container.get('HostConfig.CpuRealtimeRuntime') == 40000
+        assert container.get('HostConfig.CpuRealtimePeriod') == 150000
 
     @v2_2_only()
     def test_create_container_with_cpu_count(self):
@@ -1096,6 +1105,38 @@ class ServiceTest(DockerClientTestCase):
         service.build()
         assert service.image()
 
+    def test_build_with_gzip(self):
+        base_dir = tempfile.mkdtemp()
+        self.addCleanup(shutil.rmtree, base_dir)
+        with open(os.path.join(base_dir, 'Dockerfile'), 'w') as f:
+            f.write('\n'.join([
+                'FROM busybox',
+                'COPY . /src',
+                'RUN cat /src/hello.txt'
+            ]))
+        with open(os.path.join(base_dir, 'hello.txt'), 'w') as f:
+            f.write('hello world\n')
+
+        service = self.create_service('build_gzip', build={
+            'context': text_type(base_dir),
+        })
+        service.build(gzip=True)
+        assert service.image()
+
+    @v2_1_only()
+    def test_build_with_isolation(self):
+        base_dir = tempfile.mkdtemp()
+        self.addCleanup(shutil.rmtree, base_dir)
+        with open(os.path.join(base_dir, 'Dockerfile'), 'w') as f:
+            f.write('FROM busybox\n')
+
+        service = self.create_service('build_isolation', build={
+            'context': text_type(base_dir),
+            'isolation': 'default',
+        })
+        service.build()
+        assert service.image()
+
     def test_start_container_stays_unprivileged(self):
         service = self.create_service('web')
         container = create_and_start_container(service).inspect()

+ 8 - 0
tests/unit/cli/main_test.py

@@ -154,3 +154,11 @@ class TestCallDocker(object):
         assert fake_call.call_args[0][0] == [
             'docker', '--host', 'tcp://mydocker.net:2333', 'ps'
         ]
+
+    def test_with_host_option_shorthand_equal(self):
+        with mock.patch('subprocess.call') as fake_call:
+            call_docker(['ps'], {'--host': '=tcp://mydocker.net:2333'})
+
+        assert fake_call.call_args[0][0] == [
+            'docker', '--host', 'tcp://mydocker.net:2333', 'ps'
+        ]

+ 5 - 5
tests/unit/cli_test.py

@@ -30,12 +30,12 @@ class CLITestCase(unittest.TestCase):
         test_dir = py._path.local.LocalPath('tests/fixtures/simple-composefile')
         with test_dir.as_cwd():
             project_name = get_project_name('.')
-            assert 'simplecomposefile' == project_name
+            assert 'simple-composefile' == project_name
 
     def test_project_name_with_explicit_base_dir(self):
         base_dir = 'tests/fixtures/simple-composefile'
         project_name = get_project_name(base_dir)
-        assert 'simplecomposefile' == project_name
+        assert 'simple-composefile' == project_name
 
     def test_project_name_with_explicit_uppercase_base_dir(self):
         base_dir = 'tests/fixtures/UpperCaseDir'
@@ -45,7 +45,7 @@ class CLITestCase(unittest.TestCase):
     def test_project_name_with_explicit_project_name(self):
         name = 'explicit-project-name'
         project_name = get_project_name(None, project_name=name)
-        assert 'explicitprojectname' == project_name
+        assert 'explicit-project-name' == project_name
 
     @mock.patch.dict(os.environ)
     def test_project_name_from_environment_new_var(self):
@@ -59,7 +59,7 @@ class CLITestCase(unittest.TestCase):
         with mock.patch.dict(os.environ):
             os.environ['COMPOSE_PROJECT_NAME'] = ''
             project_name = get_project_name(base_dir)
-        assert 'simplecomposefile' == project_name
+        assert 'simple-composefile' == project_name
 
     @mock.patch.dict(os.environ)
     def test_project_name_with_environment_file(self):
@@ -80,7 +80,7 @@ class CLITestCase(unittest.TestCase):
     def test_get_project(self):
         base_dir = 'tests/fixtures/longer-filename-composefile'
         project = get_project(base_dir)
-        assert project.name == 'longerfilenamecomposefile'
+        assert project.name == 'longer-filename-composefile'
         assert project.client
         assert project.services
 

+ 28 - 0
tests/unit/config/config_test.py

@@ -3,6 +3,7 @@ from __future__ import absolute_import
 from __future__ import print_function
 from __future__ import unicode_literals
 
+import codecs
 import os
 import shutil
 import tempfile
@@ -1623,6 +1624,21 @@ class ConfigTest(unittest.TestCase):
 
         assert 'line 3, column 32' in exc.exconly()
 
+    def test_load_yaml_with_bom(self):
+        tmpdir = py.test.ensuretemp('bom_yaml')
+        self.addCleanup(tmpdir.remove)
+        bom_yaml = tmpdir.join('docker-compose.yml')
+        with codecs.open(str(bom_yaml), 'w', encoding='utf-8') as f:
+            f.write('''\ufeff
+                version: '2.3'
+                volumes:
+                    park_bom:
+            ''')
+        assert config.load_yaml(str(bom_yaml)) == {
+            'version': '2.3',
+            'volumes': {'park_bom': None}
+        }
+
     def test_validate_extra_hosts_invalid(self):
         with pytest.raises(ConfigurationError) as exc:
             config.load(build_config_details({
@@ -4927,6 +4943,18 @@ class SerializeTest(unittest.TestCase):
         serialized_config = yaml.load(serialize_config(config_dict))
         assert '8080:80/tcp' in serialized_config['services']['web']['ports']
 
+    def test_serialize_ports_with_ext_ip(self):
+        config_dict = config.Config(version=V3_5, services=[
+            {
+                'ports': [types.ServicePort('80', '8080', None, None, '127.0.0.1')],
+                'image': 'alpine',
+                'name': 'web'
+            }
+        ], volumes={}, networks={}, secrets={}, configs={})
+
+        serialized_config = yaml.load(serialize_config(config_dict))
+        assert '127.0.0.1:8080:80/tcp' in serialized_config['services']['web']['ports']
+
     def test_serialize_configs(self):
         service_dict = {
             'image': 'example/web',

+ 12 - 0
tests/unit/config/interpolation_test.py

@@ -420,3 +420,15 @@ def test_interpolate_unicode_values():
 
     interpol("$FOO") == '十六夜 咲夜'
     interpol("${BAR}") == '十六夜 咲夜'
+
+
+def test_interpolate_no_fallthrough():
+    # Test regression on docker/compose#5829
+    variable_mapping = {
+        'TEST:-': 'hello',
+        'TEST-': 'hello',
+    }
+    interpol = Interpolator(TemplateWithDefaults, variable_mapping).interpolate
+
+    assert interpol('${TEST:-}') == ''
+    assert interpol('${TEST-}') == ''

+ 27 - 0
tests/unit/project_test.py

@@ -13,6 +13,7 @@ from compose.config.config import Config
 from compose.config.types import VolumeFromSpec
 from compose.const import COMPOSEFILE_V1 as V1
 from compose.const import COMPOSEFILE_V2_0 as V2_0
+from compose.const import COMPOSEFILE_V2_4 as V2_4
 from compose.const import LABEL_SERVICE
 from compose.container import Container
 from compose.project import NoSuchService
@@ -561,3 +562,29 @@ class ProjectTest(unittest.TestCase):
     def test_no_such_service_unicode(self):
         assert NoSuchService('十六夜 咲夜'.encode('utf-8')).msg == 'No such service: 十六夜 咲夜'
         assert NoSuchService('十六夜 咲夜').msg == 'No such service: 十六夜 咲夜'
+
+    def test_project_platform_value(self):
+        service_config = {
+            'name': 'web',
+            'image': 'busybox:latest',
+        }
+        config_data = Config(
+            version=V2_4, services=[service_config], networks={}, volumes={}, secrets=None, configs=None
+        )
+
+        project = Project.from_config(name='test', client=self.mock_client, config_data=config_data)
+        assert project.get_service('web').options.get('platform') is None
+
+        project = Project.from_config(
+            name='test', client=self.mock_client, config_data=config_data, default_platform='windows'
+        )
+        assert project.get_service('web').options.get('platform') == 'windows'
+
+        service_config['platform'] = 'linux/s390x'
+        project = Project.from_config(name='test', client=self.mock_client, config_data=config_data)
+        assert project.get_service('web').options.get('platform') == 'linux/s390x'
+
+        project = Project.from_config(
+            name='test', client=self.mock_client, config_data=config_data, default_platform='windows'
+        )
+        assert project.get_service('web').options.get('platform') == 'linux/s390x'

+ 104 - 37
tests/unit/service_test.py

@@ -5,6 +5,7 @@ import docker
 import pytest
 from docker.constants import DEFAULT_DOCKER_API_VERSION
 from docker.errors import APIError
+from docker.errors import NotFound
 
 from .. import mock
 from .. import unittest
@@ -20,6 +21,7 @@ from compose.const import LABEL_PROJECT
 from compose.const import LABEL_SERVICE
 from compose.const import SECRETS_PATH
 from compose.container import Container
+from compose.errors import OperationFailedError
 from compose.parallel import ParallelStreamWriter
 from compose.project import OneOffFilter
 from compose.service import build_ulimits
@@ -399,7 +401,8 @@ class ServiceTest(unittest.TestCase):
         self.mock_client.pull.assert_called_once_with(
             'someimage',
             tag='sometag',
-            stream=True)
+            stream=True,
+            platform=None)
         mock_log.info.assert_called_once_with('Pulling foo (someimage:sometag)...')
 
     def test_pull_image_no_tag(self):
@@ -408,7 +411,8 @@ class ServiceTest(unittest.TestCase):
         self.mock_client.pull.assert_called_once_with(
             'ababab',
             tag='latest',
-            stream=True)
+            stream=True,
+            platform=None)
 
     @mock.patch('compose.service.log', autospec=True)
     def test_pull_image_digest(self, mock_log):
@@ -417,9 +421,30 @@ class ServiceTest(unittest.TestCase):
         self.mock_client.pull.assert_called_once_with(
             'someimage',
             tag='sha256:1234',
-            stream=True)
+            stream=True,
+            platform=None)
         mock_log.info.assert_called_once_with('Pulling foo (someimage@sha256:1234)...')
 
+    @mock.patch('compose.service.log', autospec=True)
+    def test_pull_image_with_platform(self, mock_log):
+        self.mock_client.api_version = '1.35'
+        service = Service(
+            'foo', client=self.mock_client, image='someimage:sometag', platform='windows/x86_64'
+        )
+        service.pull()
+        assert self.mock_client.pull.call_count == 1
+        call_args = self.mock_client.pull.call_args
+        assert call_args[1]['platform'] == 'windows/x86_64'
+
+    @mock.patch('compose.service.log', autospec=True)
+    def test_pull_image_with_platform_unsupported_api(self, mock_log):
+        self.mock_client.api_version = '1.33'
+        service = Service(
+            'foo', client=self.mock_client, image='someimage:sometag', platform='linux/arm'
+        )
+        with pytest.raises(OperationFailedError):
+            service.pull()
+
     @mock.patch('compose.service.Container', autospec=True)
     def test_recreate_container(self, _):
         mock_container = mock.create_autospec(Container)
@@ -471,23 +496,8 @@ class ServiceTest(unittest.TestCase):
             _, args, _ = mock_log.warn.mock_calls[0]
             assert 'was built because it did not already exist' in args[0]
 
-        self.mock_client.build.assert_called_once_with(
-            tag='default_foo',
-            dockerfile=None,
-            path='.',
-            pull=False,
-            forcerm=False,
-            nocache=False,
-            rm=True,
-            buildargs={},
-            labels=None,
-            cache_from=None,
-            network_mode=None,
-            target=None,
-            shmsize=None,
-            extra_hosts=None,
-            container_limits={'memory': None},
-        )
+        assert self.mock_client.build.call_count == 1
+        self.mock_client.build.call_args[1]['tag'] == 'default_foo'
 
     def test_ensure_image_exists_no_build(self):
         service = Service('foo', client=self.mock_client, build={'context': '.'})
@@ -513,23 +523,8 @@ class ServiceTest(unittest.TestCase):
             service.ensure_image_exists(do_build=BuildAction.force)
 
         assert not mock_log.warn.called
-        self.mock_client.build.assert_called_once_with(
-            tag='default_foo',
-            dockerfile=None,
-            path='.',
-            pull=False,
-            forcerm=False,
-            nocache=False,
-            rm=True,
-            buildargs={},
-            labels=None,
-            cache_from=None,
-            network_mode=None,
-            target=None,
-            shmsize=None,
-            extra_hosts=None,
-            container_limits={'memory': None},
-        )
+        assert self.mock_client.build.call_count == 1
+        self.mock_client.build.call_args[1]['tag'] == 'default_foo'
 
     def test_build_does_not_pull(self):
         self.mock_client.build.return_value = [
@@ -542,6 +537,19 @@ class ServiceTest(unittest.TestCase):
         assert self.mock_client.build.call_count == 1
         assert not self.mock_client.build.call_args[1]['pull']
 
+    def test_build_does_with_platform(self):
+        self.mock_client.api_version = '1.35'
+        self.mock_client.build.return_value = [
+            b'{"stream": "Successfully built 12345"}',
+        ]
+
+        service = Service('foo', client=self.mock_client, build={'context': '.'}, platform='linux')
+        service.build()
+
+        assert self.mock_client.build.call_count == 1
+        call_args = self.mock_client.build.call_args
+        assert call_args[1]['platform'] == 'linux'
+
     def test_build_with_override_build_args(self):
         self.mock_client.build.return_value = [
             b'{"stream": "Successfully built 12345"}',
@@ -559,6 +567,33 @@ class ServiceTest(unittest.TestCase):
         assert called_build_args['arg1'] == build_args['arg1']
         assert called_build_args['arg2'] == 'arg2'
 
+    def test_build_with_isolation_from_service_config(self):
+        self.mock_client.build.return_value = [
+            b'{"stream": "Successfully built 12345"}',
+        ]
+
+        service = Service('foo', client=self.mock_client, build={'context': '.'}, isolation='hyperv')
+        service.build()
+
+        assert self.mock_client.build.call_count == 1
+        called_build_args = self.mock_client.build.call_args[1]
+        assert called_build_args['isolation'] == 'hyperv'
+
+    def test_build_isolation_from_build_override_service_config(self):
+        self.mock_client.build.return_value = [
+            b'{"stream": "Successfully built 12345"}',
+        ]
+
+        service = Service(
+            'foo', client=self.mock_client, build={'context': '.', 'isolation': 'default'},
+            isolation='hyperv'
+        )
+        service.build()
+
+        assert self.mock_client.build.call_count == 1
+        called_build_args = self.mock_client.build.call_args[1]
+        assert called_build_args['isolation'] == 'default'
+
     def test_config_dict(self):
         self.mock_client.inspect_image.return_value = {'Id': 'abcd'}
         service = Service(
@@ -888,6 +923,38 @@ class ServiceTest(unittest.TestCase):
             'ftp_proxy': override_options['environment']['FTP_PROXY'],
         }))
 
+    def test_create_when_removed_containers_are_listed(self):
+        # This is aimed at simulating a race between the API call to list the
+        # containers, and the ones to inspect each of the listed containers.
+        # It can happen that a container has been removed after we listed it.
+
+        # containers() returns a container that is about to be removed
+        self.mock_client.containers.return_value = [
+            {'Id': 'rm_cont_id', 'Name': 'rm_cont', 'Image': 'img_id'},
+        ]
+
+        # inspect_container() will raise a NotFound when trying to inspect
+        # rm_cont_id, which at this point has been removed
+        def inspect(name):
+            if name == 'rm_cont_id':
+                raise NotFound(message='Not Found')
+
+            if name == 'new_cont_id':
+                return {'Id': 'new_cont_id'}
+
+            raise NotImplementedError("incomplete mock")
+
+        self.mock_client.inspect_container.side_effect = inspect
+
+        self.mock_client.inspect_image.return_value = {'Id': 'imageid'}
+
+        self.mock_client.create_container.return_value = {'Id': 'new_cont_id'}
+
+        # We should nonetheless be able to create a new container
+        service = Service('foo', client=self.mock_client)
+
+        assert service.create_container().id == 'new_cont_id'
+
 
 class TestServiceNetwork(unittest.TestCase):
     def setUp(self):