config.py 32 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000
  1. from __future__ import absolute_import
  2. from __future__ import unicode_literals
  3. import functools
  4. import logging
  5. import ntpath
  6. import os
  7. import string
  8. import sys
  9. from collections import namedtuple
  10. import six
  11. import yaml
  12. from cached_property import cached_property
  13. from ..const import COMPOSEFILE_V1 as V1
  14. from ..const import COMPOSEFILE_V2_0 as V2_0
  15. from ..const import COMPOSEFILE_V2_1 as V2_1
  16. from ..utils import build_string_dict
  17. from .environment import env_vars_from_file
  18. from .environment import Environment
  19. from .environment import split_env
  20. from .errors import CircularReference
  21. from .errors import ComposeFileNotFound
  22. from .errors import ConfigurationError
  23. from .errors import VERSION_EXPLANATION
  24. from .interpolation import interpolate_environment_variables
  25. from .sort_services import get_container_name_from_network_mode
  26. from .sort_services import get_service_name_from_network_mode
  27. from .sort_services import sort_service_dicts
  28. from .types import parse_extra_hosts
  29. from .types import parse_restart_spec
  30. from .types import ServiceLink
  31. from .types import VolumeFromSpec
  32. from .types import VolumeSpec
  33. from .validation import match_named_volumes
  34. from .validation import validate_against_config_schema
  35. from .validation import validate_config_section
  36. from .validation import validate_depends_on
  37. from .validation import validate_extends_file_path
  38. from .validation import validate_links
  39. from .validation import validate_network_mode
  40. from .validation import validate_service_constraints
  41. from .validation import validate_top_level_object
  42. from .validation import validate_ulimits
  43. DOCKER_CONFIG_KEYS = [
  44. 'cap_add',
  45. 'cap_drop',
  46. 'cgroup_parent',
  47. 'command',
  48. 'cpu_quota',
  49. 'cpu_shares',
  50. 'cpuset',
  51. 'detach',
  52. 'devices',
  53. 'dns',
  54. 'dns_search',
  55. 'domainname',
  56. 'entrypoint',
  57. 'env_file',
  58. 'environment',
  59. 'extra_hosts',
  60. 'hostname',
  61. 'image',
  62. 'ipc',
  63. 'labels',
  64. 'links',
  65. 'mac_address',
  66. 'mem_limit',
  67. 'memswap_limit',
  68. 'mem_swappiness',
  69. 'net',
  70. 'oom_score_adj'
  71. 'pid',
  72. 'ports',
  73. 'privileged',
  74. 'read_only',
  75. 'restart',
  76. 'security_opt',
  77. 'shm_size',
  78. 'stdin_open',
  79. 'stop_signal',
  80. 'tty',
  81. 'user',
  82. 'volume_driver',
  83. 'volumes',
  84. 'volumes_from',
  85. 'working_dir',
  86. ]
  87. ALLOWED_KEYS = DOCKER_CONFIG_KEYS + [
  88. 'build',
  89. 'container_name',
  90. 'dockerfile',
  91. 'log_driver',
  92. 'log_opt',
  93. 'logging',
  94. 'network_mode',
  95. ]
  96. DOCKER_VALID_URL_PREFIXES = (
  97. 'http://',
  98. 'https://',
  99. 'git://',
  100. 'github.com/',
  101. 'git@',
  102. )
  103. SUPPORTED_FILENAMES = [
  104. 'docker-compose.yml',
  105. 'docker-compose.yaml',
  106. ]
  107. DEFAULT_OVERRIDE_FILENAME = 'docker-compose.override.yml'
  108. log = logging.getLogger(__name__)
  109. class ConfigDetails(namedtuple('_ConfigDetails', 'working_dir config_files environment')):
  110. """
  111. :param working_dir: the directory to use for relative paths in the config
  112. :type working_dir: string
  113. :param config_files: list of configuration files to load
  114. :type config_files: list of :class:`ConfigFile`
  115. :param environment: computed environment values for this project
  116. :type environment: :class:`environment.Environment`
  117. """
  118. def __new__(cls, working_dir, config_files, environment=None):
  119. if environment is None:
  120. environment = Environment.from_env_file(working_dir)
  121. return super(ConfigDetails, cls).__new__(
  122. cls, working_dir, config_files, environment
  123. )
  124. class ConfigFile(namedtuple('_ConfigFile', 'filename config')):
  125. """
  126. :param filename: filename of the config file
  127. :type filename: string
  128. :param config: contents of the config file
  129. :type config: :class:`dict`
  130. """
  131. @classmethod
  132. def from_filename(cls, filename):
  133. return cls(filename, load_yaml(filename))
  134. @cached_property
  135. def version(self):
  136. if 'version' not in self.config:
  137. return V1
  138. version = self.config['version']
  139. if isinstance(version, dict):
  140. log.warn('Unexpected type for "version" key in "{}". Assuming '
  141. '"version" is the name of a service, and defaulting to '
  142. 'Compose file version 1.'.format(self.filename))
  143. return V1
  144. if not isinstance(version, six.string_types):
  145. raise ConfigurationError(
  146. 'Version in "{}" is invalid - it should be a string.'
  147. .format(self.filename))
  148. if version == '1':
  149. raise ConfigurationError(
  150. 'Version in "{}" is invalid. {}'
  151. .format(self.filename, VERSION_EXPLANATION))
  152. if version == '2':
  153. version = V2_0
  154. if version not in (V2_0, V2_1):
  155. raise ConfigurationError(
  156. 'Version in "{}" is unsupported. {}'
  157. .format(self.filename, VERSION_EXPLANATION))
  158. return version
  159. def get_service(self, name):
  160. return self.get_service_dicts()[name]
  161. def get_service_dicts(self):
  162. return self.config if self.version == V1 else self.config.get('services', {})
  163. def get_volumes(self):
  164. return {} if self.version == V1 else self.config.get('volumes', {})
  165. def get_networks(self):
  166. return {} if self.version == V1 else self.config.get('networks', {})
  167. class Config(namedtuple('_Config', 'version services volumes networks')):
  168. """
  169. :param version: configuration version
  170. :type version: int
  171. :param services: List of service description dictionaries
  172. :type services: :class:`list`
  173. :param volumes: Dictionary mapping volume names to description dictionaries
  174. :type volumes: :class:`dict`
  175. :param networks: Dictionary mapping network names to description dictionaries
  176. :type networks: :class:`dict`
  177. """
  178. class ServiceConfig(namedtuple('_ServiceConfig', 'working_dir filename name config')):
  179. @classmethod
  180. def with_abs_paths(cls, working_dir, filename, name, config):
  181. if not working_dir:
  182. raise ValueError("No working_dir for ServiceConfig.")
  183. return cls(
  184. os.path.abspath(working_dir),
  185. os.path.abspath(filename) if filename else filename,
  186. name,
  187. config)
  188. def find(base_dir, filenames, environment):
  189. if filenames == ['-']:
  190. return ConfigDetails(
  191. os.getcwd(),
  192. [ConfigFile(None, yaml.safe_load(sys.stdin))],
  193. environment
  194. )
  195. if filenames:
  196. filenames = [os.path.join(base_dir, f) for f in filenames]
  197. else:
  198. filenames = get_default_config_files(base_dir)
  199. log.debug("Using configuration files: {}".format(",".join(filenames)))
  200. return ConfigDetails(
  201. os.path.dirname(filenames[0]),
  202. [ConfigFile.from_filename(f) for f in filenames],
  203. environment
  204. )
  205. def validate_config_version(config_files):
  206. main_file = config_files[0]
  207. validate_top_level_object(main_file)
  208. for next_file in config_files[1:]:
  209. validate_top_level_object(next_file)
  210. if main_file.version != next_file.version:
  211. raise ConfigurationError(
  212. "Version mismatch: file {0} specifies version {1} but "
  213. "extension file {2} uses version {3}".format(
  214. main_file.filename,
  215. main_file.version,
  216. next_file.filename,
  217. next_file.version))
  218. def get_default_config_files(base_dir):
  219. (candidates, path) = find_candidates_in_parent_dirs(SUPPORTED_FILENAMES, base_dir)
  220. if not candidates:
  221. raise ComposeFileNotFound(SUPPORTED_FILENAMES)
  222. winner = candidates[0]
  223. if len(candidates) > 1:
  224. log.warn("Found multiple config files with supported names: %s", ", ".join(candidates))
  225. log.warn("Using %s\n", winner)
  226. return [os.path.join(path, winner)] + get_default_override_file(path)
  227. def get_default_override_file(path):
  228. override_filename = os.path.join(path, DEFAULT_OVERRIDE_FILENAME)
  229. return [override_filename] if os.path.exists(override_filename) else []
  230. def find_candidates_in_parent_dirs(filenames, path):
  231. """
  232. Given a directory path to start, looks for filenames in the
  233. directory, and then each parent directory successively,
  234. until found.
  235. Returns tuple (candidates, path).
  236. """
  237. candidates = [filename for filename in filenames
  238. if os.path.exists(os.path.join(path, filename))]
  239. if not candidates:
  240. parent_dir = os.path.join(path, '..')
  241. if os.path.abspath(parent_dir) != os.path.abspath(path):
  242. return find_candidates_in_parent_dirs(filenames, parent_dir)
  243. return (candidates, path)
  244. def load(config_details):
  245. """Load the configuration from a working directory and a list of
  246. configuration files. Files are loaded in order, and merged on top
  247. of each other to create the final configuration.
  248. Return a fully interpolated, extended and validated configuration.
  249. """
  250. validate_config_version(config_details.config_files)
  251. processed_files = [
  252. process_config_file(config_file, config_details.environment)
  253. for config_file in config_details.config_files
  254. ]
  255. config_details = config_details._replace(config_files=processed_files)
  256. main_file = config_details.config_files[0]
  257. volumes = load_mapping(
  258. config_details.config_files, 'get_volumes', 'Volume'
  259. )
  260. networks = load_mapping(
  261. config_details.config_files, 'get_networks', 'Network'
  262. )
  263. service_dicts = load_services(config_details, main_file)
  264. if main_file.version != V1:
  265. for service_dict in service_dicts:
  266. match_named_volumes(service_dict, volumes)
  267. return Config(main_file.version, service_dicts, volumes, networks)
  268. def load_mapping(config_files, get_func, entity_type):
  269. mapping = {}
  270. for config_file in config_files:
  271. for name, config in getattr(config_file, get_func)().items():
  272. mapping[name] = config or {}
  273. if not config:
  274. continue
  275. external = config.get('external')
  276. if external:
  277. if len(config.keys()) > 1:
  278. raise ConfigurationError(
  279. '{} {} declared as external but specifies'
  280. ' additional attributes ({}). '.format(
  281. entity_type,
  282. name,
  283. ', '.join([k for k in config.keys() if k != 'external'])
  284. )
  285. )
  286. if isinstance(external, dict):
  287. config['external_name'] = external.get('name')
  288. else:
  289. config['external_name'] = name
  290. mapping[name] = config
  291. if 'driver_opts' in config:
  292. config['driver_opts'] = build_string_dict(
  293. config['driver_opts']
  294. )
  295. return mapping
  296. def load_services(config_details, config_file):
  297. def build_service(service_name, service_dict, service_names):
  298. service_config = ServiceConfig.with_abs_paths(
  299. config_details.working_dir,
  300. config_file.filename,
  301. service_name,
  302. service_dict)
  303. resolver = ServiceExtendsResolver(
  304. service_config, config_file, environment=config_details.environment
  305. )
  306. service_dict = process_service(resolver.run())
  307. service_config = service_config._replace(config=service_dict)
  308. validate_service(service_config, service_names, config_file.version)
  309. service_dict = finalize_service(
  310. service_config,
  311. service_names,
  312. config_file.version,
  313. config_details.environment)
  314. return service_dict
  315. def build_services(service_config):
  316. service_names = service_config.keys()
  317. return sort_service_dicts([
  318. build_service(name, service_dict, service_names)
  319. for name, service_dict in service_config.items()
  320. ])
  321. def merge_services(base, override):
  322. all_service_names = set(base) | set(override)
  323. return {
  324. name: merge_service_dicts_from_files(
  325. base.get(name, {}),
  326. override.get(name, {}),
  327. config_file.version)
  328. for name in all_service_names
  329. }
  330. service_configs = [
  331. file.get_service_dicts() for file in config_details.config_files
  332. ]
  333. service_config = service_configs[0]
  334. for next_config in service_configs[1:]:
  335. service_config = merge_services(service_config, next_config)
  336. return build_services(service_config)
  337. def interpolate_config_section(filename, config, section, environment):
  338. validate_config_section(filename, config, section)
  339. return interpolate_environment_variables(config, section, environment)
  340. def process_config_file(config_file, environment, service_name=None):
  341. services = interpolate_config_section(
  342. config_file.filename,
  343. config_file.get_service_dicts(),
  344. 'service',
  345. environment,)
  346. if config_file.version in (V2_0, V2_1):
  347. processed_config = dict(config_file.config)
  348. processed_config['services'] = services
  349. processed_config['volumes'] = interpolate_config_section(
  350. config_file.filename,
  351. config_file.get_volumes(),
  352. 'volume',
  353. environment,)
  354. processed_config['networks'] = interpolate_config_section(
  355. config_file.filename,
  356. config_file.get_networks(),
  357. 'network',
  358. environment,)
  359. if config_file.version == V1:
  360. processed_config = services
  361. config_file = config_file._replace(config=processed_config)
  362. validate_against_config_schema(config_file)
  363. if service_name and service_name not in services:
  364. raise ConfigurationError(
  365. "Cannot extend service '{}' in {}: Service not found".format(
  366. service_name, config_file.filename))
  367. return config_file
  368. class ServiceExtendsResolver(object):
  369. def __init__(self, service_config, config_file, environment, already_seen=None):
  370. self.service_config = service_config
  371. self.working_dir = service_config.working_dir
  372. self.already_seen = already_seen or []
  373. self.config_file = config_file
  374. self.environment = environment
  375. @property
  376. def signature(self):
  377. return self.service_config.filename, self.service_config.name
  378. def detect_cycle(self):
  379. if self.signature in self.already_seen:
  380. raise CircularReference(self.already_seen + [self.signature])
  381. def run(self):
  382. self.detect_cycle()
  383. if 'extends' in self.service_config.config:
  384. service_dict = self.resolve_extends(*self.validate_and_construct_extends())
  385. return self.service_config._replace(config=service_dict)
  386. return self.service_config
  387. def validate_and_construct_extends(self):
  388. extends = self.service_config.config['extends']
  389. if not isinstance(extends, dict):
  390. extends = {'service': extends}
  391. config_path = self.get_extended_config_path(extends)
  392. service_name = extends['service']
  393. extends_file = ConfigFile.from_filename(config_path)
  394. validate_config_version([self.config_file, extends_file])
  395. extended_file = process_config_file(
  396. extends_file, self.environment, service_name=service_name
  397. )
  398. service_config = extended_file.get_service(service_name)
  399. return config_path, service_config, service_name
  400. def resolve_extends(self, extended_config_path, service_dict, service_name):
  401. resolver = ServiceExtendsResolver(
  402. ServiceConfig.with_abs_paths(
  403. os.path.dirname(extended_config_path),
  404. extended_config_path,
  405. service_name,
  406. service_dict),
  407. self.config_file,
  408. already_seen=self.already_seen + [self.signature],
  409. environment=self.environment
  410. )
  411. service_config = resolver.run()
  412. other_service_dict = process_service(service_config)
  413. validate_extended_service_dict(
  414. other_service_dict,
  415. extended_config_path,
  416. service_name)
  417. return merge_service_dicts(
  418. other_service_dict,
  419. self.service_config.config,
  420. self.config_file.version)
  421. def get_extended_config_path(self, extends_options):
  422. """Service we are extending either has a value for 'file' set, which we
  423. need to obtain a full path too or we are extending from a service
  424. defined in our own file.
  425. """
  426. filename = self.service_config.filename
  427. validate_extends_file_path(
  428. self.service_config.name,
  429. extends_options,
  430. filename)
  431. if 'file' in extends_options:
  432. return expand_path(self.working_dir, extends_options['file'])
  433. return filename
  434. def resolve_environment(service_dict, environment=None):
  435. """Unpack any environment variables from an env_file, if set.
  436. Interpolate environment values if set.
  437. """
  438. env = {}
  439. for env_file in service_dict.get('env_file', []):
  440. env.update(env_vars_from_file(env_file))
  441. env.update(parse_environment(service_dict.get('environment')))
  442. return dict(resolve_env_var(k, v, environment) for k, v in six.iteritems(env))
  443. def resolve_build_args(build, environment):
  444. args = parse_build_arguments(build.get('args'))
  445. return dict(resolve_env_var(k, v, environment) for k, v in six.iteritems(args))
  446. def validate_extended_service_dict(service_dict, filename, service):
  447. error_prefix = "Cannot extend service '%s' in %s:" % (service, filename)
  448. if 'links' in service_dict:
  449. raise ConfigurationError(
  450. "%s services with 'links' cannot be extended" % error_prefix)
  451. if 'volumes_from' in service_dict:
  452. raise ConfigurationError(
  453. "%s services with 'volumes_from' cannot be extended" % error_prefix)
  454. if 'net' in service_dict:
  455. if get_container_name_from_network_mode(service_dict['net']):
  456. raise ConfigurationError(
  457. "%s services with 'net: container' cannot be extended" % error_prefix)
  458. if 'network_mode' in service_dict:
  459. if get_service_name_from_network_mode(service_dict['network_mode']):
  460. raise ConfigurationError(
  461. "%s services with 'network_mode: service' cannot be extended" % error_prefix)
  462. if 'depends_on' in service_dict:
  463. raise ConfigurationError(
  464. "%s services with 'depends_on' cannot be extended" % error_prefix)
  465. def validate_service(service_config, service_names, version):
  466. service_dict, service_name = service_config.config, service_config.name
  467. validate_service_constraints(service_dict, service_name, version)
  468. validate_paths(service_dict)
  469. validate_ulimits(service_config)
  470. validate_network_mode(service_config, service_names)
  471. validate_depends_on(service_config, service_names)
  472. validate_links(service_config, service_names)
  473. if not service_dict.get('image') and has_uppercase(service_name):
  474. raise ConfigurationError(
  475. "Service '{name}' contains uppercase characters which are not valid "
  476. "as part of an image name. Either use a lowercase service name or "
  477. "use the `image` field to set a custom name for the service image."
  478. .format(name=service_name))
  479. def process_service(service_config):
  480. working_dir = service_config.working_dir
  481. service_dict = dict(service_config.config)
  482. if 'env_file' in service_dict:
  483. service_dict['env_file'] = [
  484. expand_path(working_dir, path)
  485. for path in to_list(service_dict['env_file'])
  486. ]
  487. if 'build' in service_dict:
  488. if isinstance(service_dict['build'], six.string_types):
  489. service_dict['build'] = resolve_build_path(working_dir, service_dict['build'])
  490. elif isinstance(service_dict['build'], dict) and 'context' in service_dict['build']:
  491. path = service_dict['build']['context']
  492. service_dict['build']['context'] = resolve_build_path(working_dir, path)
  493. if 'volumes' in service_dict and service_dict.get('volume_driver') is None:
  494. service_dict['volumes'] = resolve_volume_paths(working_dir, service_dict)
  495. if 'labels' in service_dict:
  496. service_dict['labels'] = parse_labels(service_dict['labels'])
  497. if 'extra_hosts' in service_dict:
  498. service_dict['extra_hosts'] = parse_extra_hosts(service_dict['extra_hosts'])
  499. for field in ['dns', 'dns_search', 'tmpfs']:
  500. if field in service_dict:
  501. service_dict[field] = to_list(service_dict[field])
  502. return service_dict
  503. def finalize_service(service_config, service_names, version, environment):
  504. service_dict = dict(service_config.config)
  505. if 'environment' in service_dict or 'env_file' in service_dict:
  506. service_dict['environment'] = resolve_environment(service_dict, environment)
  507. service_dict.pop('env_file', None)
  508. if 'volumes_from' in service_dict:
  509. service_dict['volumes_from'] = [
  510. VolumeFromSpec.parse(vf, service_names, version)
  511. for vf in service_dict['volumes_from']
  512. ]
  513. if 'volumes' in service_dict:
  514. service_dict['volumes'] = [
  515. VolumeSpec.parse(v) for v in service_dict['volumes']]
  516. if 'net' in service_dict:
  517. network_mode = service_dict.pop('net')
  518. container_name = get_container_name_from_network_mode(network_mode)
  519. if container_name and container_name in service_names:
  520. service_dict['network_mode'] = 'service:{}'.format(container_name)
  521. else:
  522. service_dict['network_mode'] = network_mode
  523. if 'networks' in service_dict:
  524. service_dict['networks'] = parse_networks(service_dict['networks'])
  525. if 'restart' in service_dict:
  526. service_dict['restart'] = parse_restart_spec(service_dict['restart'])
  527. normalize_build(service_dict, service_config.working_dir, environment)
  528. service_dict['name'] = service_config.name
  529. return normalize_v1_service_format(service_dict)
  530. def normalize_v1_service_format(service_dict):
  531. if 'log_driver' in service_dict or 'log_opt' in service_dict:
  532. if 'logging' not in service_dict:
  533. service_dict['logging'] = {}
  534. if 'log_driver' in service_dict:
  535. service_dict['logging']['driver'] = service_dict['log_driver']
  536. del service_dict['log_driver']
  537. if 'log_opt' in service_dict:
  538. service_dict['logging']['options'] = service_dict['log_opt']
  539. del service_dict['log_opt']
  540. if 'dockerfile' in service_dict:
  541. service_dict['build'] = service_dict.get('build', {})
  542. service_dict['build'].update({
  543. 'dockerfile': service_dict.pop('dockerfile')
  544. })
  545. return service_dict
  546. def merge_service_dicts_from_files(base, override, version):
  547. """When merging services from multiple files we need to merge the `extends`
  548. field. This is not handled by `merge_service_dicts()` which is used to
  549. perform the `extends`.
  550. """
  551. new_service = merge_service_dicts(base, override, version)
  552. if 'extends' in override:
  553. new_service['extends'] = override['extends']
  554. elif 'extends' in base:
  555. new_service['extends'] = base['extends']
  556. return new_service
  557. class MergeDict(dict):
  558. """A dict-like object responsible for merging two dicts into one."""
  559. def __init__(self, base, override):
  560. self.base = base
  561. self.override = override
  562. def needs_merge(self, field):
  563. return field in self.base or field in self.override
  564. def merge_field(self, field, merge_func, default=None):
  565. if not self.needs_merge(field):
  566. return
  567. self[field] = merge_func(
  568. self.base.get(field, default),
  569. self.override.get(field, default))
  570. def merge_mapping(self, field, parse_func):
  571. if not self.needs_merge(field):
  572. return
  573. self[field] = parse_func(self.base.get(field))
  574. self[field].update(parse_func(self.override.get(field)))
  575. def merge_sequence(self, field, parse_func):
  576. def parse_sequence_func(seq):
  577. return to_mapping((parse_func(item) for item in seq), 'merge_field')
  578. if not self.needs_merge(field):
  579. return
  580. merged = parse_sequence_func(self.base.get(field, []))
  581. merged.update(parse_sequence_func(self.override.get(field, [])))
  582. self[field] = [item.repr() for item in sorted(merged.values())]
  583. def merge_scalar(self, field):
  584. if self.needs_merge(field):
  585. self[field] = self.override.get(field, self.base.get(field))
  586. def merge_service_dicts(base, override, version):
  587. md = MergeDict(base, override)
  588. md.merge_mapping('environment', parse_environment)
  589. md.merge_mapping('labels', parse_labels)
  590. md.merge_mapping('ulimits', parse_ulimits)
  591. md.merge_mapping('networks', parse_networks)
  592. md.merge_sequence('links', ServiceLink.parse)
  593. for field in ['volumes', 'devices']:
  594. md.merge_field(field, merge_path_mappings)
  595. for field in [
  596. 'ports', 'cap_add', 'cap_drop', 'expose', 'external_links',
  597. 'security_opt', 'volumes_from', 'depends_on',
  598. ]:
  599. md.merge_field(field, merge_unique_items_lists, default=[])
  600. for field in ['dns', 'dns_search', 'env_file', 'tmpfs']:
  601. md.merge_field(field, merge_list_or_string)
  602. for field in set(ALLOWED_KEYS) - set(md):
  603. md.merge_scalar(field)
  604. if version == V1:
  605. legacy_v1_merge_image_or_build(md, base, override)
  606. elif md.needs_merge('build'):
  607. md['build'] = merge_build(md, base, override)
  608. return dict(md)
  609. def merge_unique_items_lists(base, override):
  610. return sorted(set().union(base, override))
  611. def merge_build(output, base, override):
  612. def to_dict(service):
  613. build_config = service.get('build', {})
  614. if isinstance(build_config, six.string_types):
  615. return {'context': build_config}
  616. return build_config
  617. md = MergeDict(to_dict(base), to_dict(override))
  618. md.merge_scalar('context')
  619. md.merge_scalar('dockerfile')
  620. md.merge_mapping('args', parse_build_arguments)
  621. return dict(md)
  622. def legacy_v1_merge_image_or_build(output, base, override):
  623. output.pop('image', None)
  624. output.pop('build', None)
  625. if 'image' in override:
  626. output['image'] = override['image']
  627. elif 'build' in override:
  628. output['build'] = override['build']
  629. elif 'image' in base:
  630. output['image'] = base['image']
  631. elif 'build' in base:
  632. output['build'] = base['build']
  633. def merge_environment(base, override):
  634. env = parse_environment(base)
  635. env.update(parse_environment(override))
  636. return env
  637. def split_label(label):
  638. if '=' in label:
  639. return label.split('=', 1)
  640. else:
  641. return label, ''
  642. def parse_dict_or_list(split_func, type_name, arguments):
  643. if not arguments:
  644. return {}
  645. if isinstance(arguments, list):
  646. return dict(split_func(e) for e in arguments)
  647. if isinstance(arguments, dict):
  648. return dict(arguments)
  649. raise ConfigurationError(
  650. "%s \"%s\" must be a list or mapping," %
  651. (type_name, arguments)
  652. )
  653. parse_build_arguments = functools.partial(parse_dict_or_list, split_env, 'build arguments')
  654. parse_environment = functools.partial(parse_dict_or_list, split_env, 'environment')
  655. parse_labels = functools.partial(parse_dict_or_list, split_label, 'labels')
  656. parse_networks = functools.partial(parse_dict_or_list, lambda k: (k, None), 'networks')
  657. def parse_ulimits(ulimits):
  658. if not ulimits:
  659. return {}
  660. if isinstance(ulimits, dict):
  661. return dict(ulimits)
  662. def resolve_env_var(key, val, environment):
  663. if val is not None:
  664. return key, val
  665. elif environment and key in environment:
  666. return key, environment[key]
  667. else:
  668. return key, None
  669. def resolve_volume_paths(working_dir, service_dict):
  670. return [
  671. resolve_volume_path(working_dir, volume)
  672. for volume in service_dict['volumes']
  673. ]
  674. def resolve_volume_path(working_dir, volume):
  675. container_path, host_path = split_path_mapping(volume)
  676. if host_path is not None:
  677. if host_path.startswith('.'):
  678. host_path = expand_path(working_dir, host_path)
  679. host_path = os.path.expanduser(host_path)
  680. return u"{}:{}".format(host_path, container_path)
  681. else:
  682. return container_path
  683. def normalize_build(service_dict, working_dir, environment):
  684. if 'build' in service_dict:
  685. build = {}
  686. # Shortcut where specifying a string is treated as the build context
  687. if isinstance(service_dict['build'], six.string_types):
  688. build['context'] = service_dict.pop('build')
  689. else:
  690. build.update(service_dict['build'])
  691. if 'args' in build:
  692. build['args'] = build_string_dict(
  693. resolve_build_args(build, environment)
  694. )
  695. service_dict['build'] = build
  696. def resolve_build_path(working_dir, build_path):
  697. if is_url(build_path):
  698. return build_path
  699. return expand_path(working_dir, build_path)
  700. def is_url(build_path):
  701. return build_path.startswith(DOCKER_VALID_URL_PREFIXES)
  702. def validate_paths(service_dict):
  703. if 'build' in service_dict:
  704. build = service_dict.get('build', {})
  705. if isinstance(build, six.string_types):
  706. build_path = build
  707. elif isinstance(build, dict) and 'context' in build:
  708. build_path = build['context']
  709. else:
  710. # We have a build section but no context, so nothing to validate
  711. return
  712. if (
  713. not is_url(build_path) and
  714. (not os.path.exists(build_path) or not os.access(build_path, os.R_OK))
  715. ):
  716. raise ConfigurationError(
  717. "build path %s either does not exist, is not accessible, "
  718. "or is not a valid URL." % build_path)
  719. def merge_path_mappings(base, override):
  720. d = dict_from_path_mappings(base)
  721. d.update(dict_from_path_mappings(override))
  722. return path_mappings_from_dict(d)
  723. def dict_from_path_mappings(path_mappings):
  724. if path_mappings:
  725. return dict(split_path_mapping(v) for v in path_mappings)
  726. else:
  727. return {}
  728. def path_mappings_from_dict(d):
  729. return [join_path_mapping(v) for v in sorted(d.items())]
  730. def split_path_mapping(volume_path):
  731. """
  732. Ascertain if the volume_path contains a host path as well as a container
  733. path. Using splitdrive so windows absolute paths won't cause issues with
  734. splitting on ':'.
  735. """
  736. # splitdrive is very naive, so handle special cases where we can be sure
  737. # the first character is not a drive.
  738. if (volume_path.startswith('.') or volume_path.startswith('~') or
  739. volume_path.startswith('/')):
  740. drive, volume_config = '', volume_path
  741. else:
  742. drive, volume_config = ntpath.splitdrive(volume_path)
  743. if ':' in volume_config:
  744. (host, container) = volume_config.split(':', 1)
  745. return (container, drive + host)
  746. else:
  747. return (volume_path, None)
  748. def join_path_mapping(pair):
  749. (container, host) = pair
  750. if host is None:
  751. return container
  752. else:
  753. return ":".join((host, container))
  754. def expand_path(working_dir, path):
  755. return os.path.abspath(os.path.join(working_dir, os.path.expanduser(path)))
  756. def merge_list_or_string(base, override):
  757. return to_list(base) + to_list(override)
  758. def to_list(value):
  759. if value is None:
  760. return []
  761. elif isinstance(value, six.string_types):
  762. return [value]
  763. else:
  764. return value
  765. def to_mapping(sequence, key_field):
  766. return {getattr(item, key_field): item for item in sequence}
  767. def has_uppercase(name):
  768. return any(char in string.ascii_uppercase for char in name)
  769. def load_yaml(filename):
  770. try:
  771. with open(filename, 'r') as fh:
  772. return yaml.safe_load(fh)
  773. except (IOError, yaml.YAMLError) as e:
  774. error_name = getattr(e, '__module__', '') + '.' + e.__class__.__name__
  775. raise ConfigurationError(u"{}: {}".format(error_name, e))