config.py 25 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816
  1. from __future__ import absolute_import
  2. from __future__ import unicode_literals
  3. import codecs
  4. import logging
  5. import operator
  6. import os
  7. import sys
  8. from collections import namedtuple
  9. import six
  10. import yaml
  11. from cached_property import cached_property
  12. from ..const import COMPOSEFILE_VERSIONS
  13. from .errors import CircularReference
  14. from .errors import ComposeFileNotFound
  15. from .errors import ConfigurationError
  16. from .interpolation import interpolate_environment_variables
  17. from .sort_services import get_service_name_from_net
  18. from .sort_services import sort_service_dicts
  19. from .types import parse_extra_hosts
  20. from .types import parse_restart_spec
  21. from .types import VolumeFromSpec
  22. from .types import VolumeSpec
  23. from .validation import validate_against_fields_schema
  24. from .validation import validate_against_service_schema
  25. from .validation import validate_extends_file_path
  26. from .validation import validate_top_level_object
  27. from .validation import validate_top_level_service_objects
  28. DOCKER_CONFIG_KEYS = [
  29. 'cap_add',
  30. 'cap_drop',
  31. 'cgroup_parent',
  32. 'command',
  33. 'cpu_quota',
  34. 'cpu_shares',
  35. 'cpuset',
  36. 'detach',
  37. 'devices',
  38. 'dns',
  39. 'dns_search',
  40. 'domainname',
  41. 'entrypoint',
  42. 'env_file',
  43. 'environment',
  44. 'extra_hosts',
  45. 'hostname',
  46. 'image',
  47. 'ipc',
  48. 'labels',
  49. 'links',
  50. 'mac_address',
  51. 'mem_limit',
  52. 'memswap_limit',
  53. 'net',
  54. 'pid',
  55. 'ports',
  56. 'privileged',
  57. 'read_only',
  58. 'restart',
  59. 'security_opt',
  60. 'stdin_open',
  61. 'stop_signal',
  62. 'tty',
  63. 'user',
  64. 'volume_driver',
  65. 'volumes',
  66. 'volumes_from',
  67. 'working_dir',
  68. ]
  69. ALLOWED_KEYS = DOCKER_CONFIG_KEYS + [
  70. 'build',
  71. 'container_name',
  72. 'dockerfile',
  73. 'expose',
  74. 'external_links',
  75. 'logging',
  76. ]
  77. DOCKER_VALID_URL_PREFIXES = (
  78. 'http://',
  79. 'https://',
  80. 'git://',
  81. 'github.com/',
  82. 'git@',
  83. )
  84. SUPPORTED_FILENAMES = [
  85. 'docker-compose.yml',
  86. 'docker-compose.yaml',
  87. ]
  88. DEFAULT_OVERRIDE_FILENAME = 'docker-compose.override.yml'
  89. log = logging.getLogger(__name__)
  90. class ConfigDetails(namedtuple('_ConfigDetails', 'working_dir config_files')):
  91. """
  92. :param working_dir: the directory to use for relative paths in the config
  93. :type working_dir: string
  94. :param config_files: list of configuration files to load
  95. :type config_files: list of :class:`ConfigFile`
  96. """
  97. class ConfigFile(namedtuple('_ConfigFile', 'filename config')):
  98. """
  99. :param filename: filename of the config file
  100. :type filename: string
  101. :param config: contents of the config file
  102. :type config: :class:`dict`
  103. """
  104. @classmethod
  105. def from_filename(cls, filename):
  106. return cls(filename, load_yaml(filename))
  107. @cached_property
  108. def version(self):
  109. if self.config is None:
  110. return 1
  111. version = self.config.get('version', 1)
  112. if isinstance(version, dict):
  113. log.warn("Unexpected type for field 'version', in file {} assuming "
  114. "version is the name of a service, and defaulting to "
  115. "Compose file version 1".format(self.filename))
  116. return 1
  117. return version
  118. def get_service_dicts(self):
  119. return self.config if self.version == 1 else self.config.get('services', {})
  120. def get_volumes(self):
  121. return {} if self.version == 1 else self.config.get('volumes', {})
  122. def get_networks(self):
  123. return {} if self.version == 1 else self.config.get('networks', {})
  124. class Config(namedtuple('_Config', 'version services volumes networks')):
  125. """
  126. :param version: configuration version
  127. :type version: int
  128. :param services: List of service description dictionaries
  129. :type services: :class:`list`
  130. :param volumes: Dictionary mapping volume names to description dictionaries
  131. :type volumes: :class:`dict`
  132. :param networks: Dictionary mapping network names to description dictionaries
  133. :type networks: :class:`dict`
  134. """
  135. class ServiceConfig(namedtuple('_ServiceConfig', 'working_dir filename name config')):
  136. @classmethod
  137. def with_abs_paths(cls, working_dir, filename, name, config):
  138. if not working_dir:
  139. raise ValueError("No working_dir for ServiceConfig.")
  140. return cls(
  141. os.path.abspath(working_dir),
  142. os.path.abspath(filename) if filename else filename,
  143. name,
  144. config)
  145. def find(base_dir, filenames):
  146. if filenames == ['-']:
  147. return ConfigDetails(
  148. os.getcwd(),
  149. [ConfigFile(None, yaml.safe_load(sys.stdin))])
  150. if filenames:
  151. filenames = [os.path.join(base_dir, f) for f in filenames]
  152. else:
  153. filenames = get_default_config_files(base_dir)
  154. log.debug("Using configuration files: {}".format(",".join(filenames)))
  155. return ConfigDetails(
  156. os.path.dirname(filenames[0]),
  157. [ConfigFile.from_filename(f) for f in filenames])
  158. def validate_config_version(config_details):
  159. main_file = config_details.config_files[0]
  160. validate_top_level_object(main_file)
  161. for next_file in config_details.config_files[1:]:
  162. validate_top_level_object(next_file)
  163. if main_file.version != next_file.version:
  164. raise ConfigurationError(
  165. "Version mismatch: file {0} specifies version {1} but "
  166. "extension file {2} uses version {3}".format(
  167. main_file.filename,
  168. main_file.version,
  169. next_file.filename,
  170. next_file.version))
  171. if main_file.version not in COMPOSEFILE_VERSIONS:
  172. raise ConfigurationError(
  173. 'Invalid Compose file version: {0}'.format(main_file.version))
  174. def get_default_config_files(base_dir):
  175. (candidates, path) = find_candidates_in_parent_dirs(SUPPORTED_FILENAMES, base_dir)
  176. if not candidates:
  177. raise ComposeFileNotFound(SUPPORTED_FILENAMES)
  178. winner = candidates[0]
  179. if len(candidates) > 1:
  180. log.warn("Found multiple config files with supported names: %s", ", ".join(candidates))
  181. log.warn("Using %s\n", winner)
  182. return [os.path.join(path, winner)] + get_default_override_file(path)
  183. def get_default_override_file(path):
  184. override_filename = os.path.join(path, DEFAULT_OVERRIDE_FILENAME)
  185. return [override_filename] if os.path.exists(override_filename) else []
  186. def find_candidates_in_parent_dirs(filenames, path):
  187. """
  188. Given a directory path to start, looks for filenames in the
  189. directory, and then each parent directory successively,
  190. until found.
  191. Returns tuple (candidates, path).
  192. """
  193. candidates = [filename for filename in filenames
  194. if os.path.exists(os.path.join(path, filename))]
  195. if not candidates:
  196. parent_dir = os.path.join(path, '..')
  197. if os.path.abspath(parent_dir) != os.path.abspath(path):
  198. return find_candidates_in_parent_dirs(filenames, parent_dir)
  199. return (candidates, path)
  200. def load(config_details):
  201. """Load the configuration from a working directory and a list of
  202. configuration files. Files are loaded in order, and merged on top
  203. of each other to create the final configuration.
  204. Return a fully interpolated, extended and validated configuration.
  205. """
  206. validate_config_version(config_details)
  207. processed_files = [
  208. process_config_file(config_file)
  209. for config_file in config_details.config_files
  210. ]
  211. config_details = config_details._replace(config_files=processed_files)
  212. main_file = config_details.config_files[0]
  213. volumes = load_mapping(config_details.config_files, 'get_volumes', 'Volume')
  214. networks = load_mapping(config_details.config_files, 'get_networks', 'Network')
  215. service_dicts = load_services(
  216. config_details.working_dir,
  217. main_file.filename,
  218. [file.get_service_dicts() for file in config_details.config_files],
  219. main_file.version)
  220. return Config(main_file.version, service_dicts, volumes, networks)
  221. def load_mapping(config_files, get_func, entity_type):
  222. mapping = {}
  223. for config_file in config_files:
  224. for name, config in getattr(config_file, get_func)().items():
  225. mapping[name] = config or {}
  226. if not config:
  227. continue
  228. external = config.get('external')
  229. if external:
  230. if len(config.keys()) > 1:
  231. raise ConfigurationError(
  232. '{} {} declared as external but specifies'
  233. ' additional attributes ({}). '.format(
  234. entity_type,
  235. name,
  236. ', '.join([k for k in config.keys() if k != 'external'])
  237. )
  238. )
  239. if isinstance(external, dict):
  240. config['external_name'] = external.get('name')
  241. else:
  242. config['external_name'] = name
  243. mapping[name] = config
  244. return mapping
  245. def load_services(working_dir, filename, service_configs, version):
  246. def build_service(service_name, service_dict, service_names):
  247. service_config = ServiceConfig.with_abs_paths(
  248. working_dir,
  249. filename,
  250. service_name,
  251. service_dict)
  252. resolver = ServiceExtendsResolver(service_config, version)
  253. service_dict = process_service(resolver.run())
  254. # TODO: move to validate_service()
  255. validate_against_service_schema(service_dict, service_config.name, version)
  256. validate_paths(service_dict)
  257. service_dict = finalize_service(
  258. service_config._replace(config=service_dict),
  259. service_names,
  260. version)
  261. service_dict['name'] = service_config.name
  262. return service_dict
  263. def build_services(service_config):
  264. service_names = service_config.keys()
  265. return sort_service_dicts([
  266. build_service(name, service_dict, service_names)
  267. for name, service_dict in service_config.items()
  268. ])
  269. def merge_services(base, override):
  270. all_service_names = set(base) | set(override)
  271. return {
  272. name: merge_service_dicts_from_files(
  273. base.get(name, {}),
  274. override.get(name, {}),
  275. version)
  276. for name in all_service_names
  277. }
  278. service_config = service_configs[0]
  279. for next_config in service_configs[1:]:
  280. service_config = merge_services(service_config, next_config)
  281. return build_services(service_config)
  282. def process_config_file(config_file, service_name=None):
  283. service_dicts = config_file.get_service_dicts()
  284. validate_top_level_service_objects(config_file.filename, service_dicts)
  285. interpolated_config = interpolate_environment_variables(service_dicts, 'service')
  286. if config_file.version == 2:
  287. processed_config = dict(config_file.config)
  288. processed_config['services'] = interpolated_config
  289. processed_config['volumes'] = interpolate_environment_variables(
  290. config_file.get_volumes(), 'volume')
  291. processed_config['networks'] = interpolate_environment_variables(
  292. config_file.get_networks(), 'network')
  293. if config_file.version == 1:
  294. processed_config = interpolated_config
  295. config_file = config_file._replace(config=processed_config)
  296. validate_against_fields_schema(config_file)
  297. if service_name and service_name not in processed_config:
  298. raise ConfigurationError(
  299. "Cannot extend service '{}' in {}: Service not found".format(
  300. service_name, config_file.filename))
  301. return config_file
  302. class ServiceExtendsResolver(object):
  303. def __init__(self, service_config, version, already_seen=None):
  304. self.service_config = service_config
  305. self.working_dir = service_config.working_dir
  306. self.already_seen = already_seen or []
  307. self.version = version
  308. @property
  309. def signature(self):
  310. return self.service_config.filename, self.service_config.name
  311. def detect_cycle(self):
  312. if self.signature in self.already_seen:
  313. raise CircularReference(self.already_seen + [self.signature])
  314. def run(self):
  315. self.detect_cycle()
  316. if 'extends' in self.service_config.config:
  317. service_dict = self.resolve_extends(*self.validate_and_construct_extends())
  318. return self.service_config._replace(config=service_dict)
  319. return self.service_config
  320. def validate_and_construct_extends(self):
  321. extends = self.service_config.config['extends']
  322. if not isinstance(extends, dict):
  323. extends = {'service': extends}
  324. config_path = self.get_extended_config_path(extends)
  325. service_name = extends['service']
  326. extended_file = process_config_file(
  327. ConfigFile.from_filename(config_path),
  328. service_name=service_name)
  329. service_config = extended_file.config[service_name]
  330. return config_path, service_config, service_name
  331. def resolve_extends(self, extended_config_path, service_dict, service_name):
  332. resolver = ServiceExtendsResolver(
  333. ServiceConfig.with_abs_paths(
  334. os.path.dirname(extended_config_path),
  335. extended_config_path,
  336. service_name,
  337. service_dict),
  338. self.version,
  339. already_seen=self.already_seen + [self.signature])
  340. service_config = resolver.run()
  341. other_service_dict = process_service(service_config)
  342. validate_extended_service_dict(
  343. other_service_dict,
  344. extended_config_path,
  345. service_name,
  346. )
  347. return merge_service_dicts(
  348. other_service_dict,
  349. self.service_config.config,
  350. self.version)
  351. def get_extended_config_path(self, extends_options):
  352. """Service we are extending either has a value for 'file' set, which we
  353. need to obtain a full path too or we are extending from a service
  354. defined in our own file.
  355. """
  356. filename = self.service_config.filename
  357. validate_extends_file_path(
  358. self.service_config.name,
  359. extends_options,
  360. filename)
  361. if 'file' in extends_options:
  362. return expand_path(self.working_dir, extends_options['file'])
  363. return filename
  364. def resolve_environment(service_dict):
  365. """Unpack any environment variables from an env_file, if set.
  366. Interpolate environment values if set.
  367. """
  368. env = {}
  369. for env_file in service_dict.get('env_file', []):
  370. env.update(env_vars_from_file(env_file))
  371. env.update(parse_environment(service_dict.get('environment')))
  372. return dict(resolve_env_var(k, v) for k, v in six.iteritems(env))
  373. def validate_extended_service_dict(service_dict, filename, service):
  374. error_prefix = "Cannot extend service '%s' in %s:" % (service, filename)
  375. if 'links' in service_dict:
  376. raise ConfigurationError(
  377. "%s services with 'links' cannot be extended" % error_prefix)
  378. if 'volumes_from' in service_dict:
  379. raise ConfigurationError(
  380. "%s services with 'volumes_from' cannot be extended" % error_prefix)
  381. if 'net' in service_dict:
  382. if get_service_name_from_net(service_dict['net']) is not None:
  383. raise ConfigurationError(
  384. "%s services with 'net: container' cannot be extended" % error_prefix)
  385. def validate_ulimits(ulimit_config):
  386. for limit_name, soft_hard_values in six.iteritems(ulimit_config):
  387. if isinstance(soft_hard_values, dict):
  388. if not soft_hard_values['soft'] <= soft_hard_values['hard']:
  389. raise ConfigurationError(
  390. "ulimit_config \"{}\" cannot contain a 'soft' value higher "
  391. "than 'hard' value".format(ulimit_config))
  392. # TODO: rename to normalize_service
  393. def process_service(service_config):
  394. working_dir = service_config.working_dir
  395. service_dict = dict(service_config.config)
  396. if 'env_file' in service_dict:
  397. service_dict['env_file'] = [
  398. expand_path(working_dir, path)
  399. for path in to_list(service_dict['env_file'])
  400. ]
  401. if 'volumes' in service_dict and service_dict.get('volume_driver') is None:
  402. service_dict['volumes'] = resolve_volume_paths(working_dir, service_dict)
  403. if 'build' in service_dict:
  404. service_dict['build'] = resolve_build_path(working_dir, service_dict['build'])
  405. if 'labels' in service_dict:
  406. service_dict['labels'] = parse_labels(service_dict['labels'])
  407. if 'extra_hosts' in service_dict:
  408. service_dict['extra_hosts'] = parse_extra_hosts(service_dict['extra_hosts'])
  409. for field in ['dns', 'dns_search']:
  410. if field in service_dict:
  411. service_dict[field] = to_list(service_dict[field])
  412. # TODO: move to a validate_service()
  413. if 'ulimits' in service_dict:
  414. validate_ulimits(service_dict['ulimits'])
  415. return service_dict
  416. def finalize_service(service_config, service_names, version):
  417. service_dict = dict(service_config.config)
  418. if 'environment' in service_dict or 'env_file' in service_dict:
  419. service_dict['environment'] = resolve_environment(service_dict)
  420. service_dict.pop('env_file', None)
  421. if 'volumes_from' in service_dict:
  422. service_dict['volumes_from'] = [
  423. VolumeFromSpec.parse(vf, service_names, version)
  424. for vf in service_dict['volumes_from']
  425. ]
  426. if 'volumes' in service_dict:
  427. service_dict['volumes'] = [
  428. VolumeSpec.parse(v) for v in service_dict['volumes']]
  429. if 'restart' in service_dict:
  430. service_dict['restart'] = parse_restart_spec(service_dict['restart'])
  431. return normalize_v1_service_format(service_dict)
  432. def normalize_v1_service_format(service_dict):
  433. if 'log_driver' in service_dict or 'log_opt' in service_dict:
  434. if 'logging' not in service_dict:
  435. service_dict['logging'] = {}
  436. if 'log_driver' in service_dict:
  437. service_dict['logging']['driver'] = service_dict['log_driver']
  438. del service_dict['log_driver']
  439. if 'log_opt' in service_dict:
  440. service_dict['logging']['options'] = service_dict['log_opt']
  441. del service_dict['log_opt']
  442. return service_dict
  443. def merge_service_dicts_from_files(base, override, version):
  444. """When merging services from multiple files we need to merge the `extends`
  445. field. This is not handled by `merge_service_dicts()` which is used to
  446. perform the `extends`.
  447. """
  448. new_service = merge_service_dicts(base, override, version)
  449. if 'extends' in override:
  450. new_service['extends'] = override['extends']
  451. elif 'extends' in base:
  452. new_service['extends'] = base['extends']
  453. return new_service
  454. def merge_service_dicts(base, override, version):
  455. d = {}
  456. def merge_field(field, merge_func, default=None):
  457. if field in base or field in override:
  458. d[field] = merge_func(
  459. base.get(field, default),
  460. override.get(field, default))
  461. def merge_mapping(mapping, parse_func):
  462. if mapping in base or mapping in override:
  463. merged = parse_func(base.get(mapping, None))
  464. merged.update(parse_func(override.get(mapping, None)))
  465. d[mapping] = merged
  466. merge_mapping('environment', parse_environment)
  467. merge_mapping('labels', parse_labels)
  468. merge_mapping('ulimits', parse_ulimits)
  469. for field in ['volumes', 'devices']:
  470. merge_field(field, merge_path_mappings)
  471. for field in ['ports', 'expose', 'external_links']:
  472. merge_field(field, operator.add, default=[])
  473. for field in ['dns', 'dns_search', 'env_file']:
  474. merge_field(field, merge_list_or_string)
  475. for field in set(ALLOWED_KEYS) - set(d):
  476. if field in base or field in override:
  477. d[field] = override.get(field, base.get(field))
  478. if version == 1:
  479. legacy_v1_merge_image_or_build(d, base, override)
  480. return d
  481. def legacy_v1_merge_image_or_build(output, base, override):
  482. output.pop('image', None)
  483. output.pop('build', None)
  484. if 'image' in override:
  485. output['image'] = override['image']
  486. elif 'build' in override:
  487. output['build'] = override['build']
  488. elif 'image' in base:
  489. output['image'] = base['image']
  490. elif 'build' in base:
  491. output['build'] = base['build']
  492. def merge_environment(base, override):
  493. env = parse_environment(base)
  494. env.update(parse_environment(override))
  495. return env
  496. def parse_environment(environment):
  497. if not environment:
  498. return {}
  499. if isinstance(environment, list):
  500. return dict(split_env(e) for e in environment)
  501. if isinstance(environment, dict):
  502. return dict(environment)
  503. raise ConfigurationError(
  504. "environment \"%s\" must be a list or mapping," %
  505. environment
  506. )
  507. def split_env(env):
  508. if isinstance(env, six.binary_type):
  509. env = env.decode('utf-8', 'replace')
  510. if '=' in env:
  511. return env.split('=', 1)
  512. else:
  513. return env, None
  514. def resolve_env_var(key, val):
  515. if val is not None:
  516. return key, val
  517. elif key in os.environ:
  518. return key, os.environ[key]
  519. else:
  520. return key, ''
  521. def env_vars_from_file(filename):
  522. """
  523. Read in a line delimited file of environment variables.
  524. """
  525. if not os.path.exists(filename):
  526. raise ConfigurationError("Couldn't find env file: %s" % filename)
  527. env = {}
  528. for line in codecs.open(filename, 'r', 'utf-8'):
  529. line = line.strip()
  530. if line and not line.startswith('#'):
  531. k, v = split_env(line)
  532. env[k] = v
  533. return env
  534. def resolve_volume_paths(working_dir, service_dict):
  535. return [
  536. resolve_volume_path(working_dir, volume)
  537. for volume in service_dict['volumes']
  538. ]
  539. def resolve_volume_path(working_dir, volume):
  540. container_path, host_path = split_path_mapping(volume)
  541. if host_path is not None:
  542. if host_path.startswith('.'):
  543. host_path = expand_path(working_dir, host_path)
  544. host_path = os.path.expanduser(host_path)
  545. return u"{}:{}".format(host_path, container_path)
  546. else:
  547. return container_path
  548. def resolve_build_path(working_dir, build_path):
  549. if is_url(build_path):
  550. return build_path
  551. return expand_path(working_dir, build_path)
  552. def is_url(build_path):
  553. return build_path.startswith(DOCKER_VALID_URL_PREFIXES)
  554. def validate_paths(service_dict):
  555. if 'build' in service_dict:
  556. build_path = service_dict['build']
  557. if (
  558. not is_url(build_path) and
  559. (not os.path.exists(build_path) or not os.access(build_path, os.R_OK))
  560. ):
  561. raise ConfigurationError(
  562. "build path %s either does not exist, is not accessible, "
  563. "or is not a valid URL." % build_path)
  564. def merge_path_mappings(base, override):
  565. d = dict_from_path_mappings(base)
  566. d.update(dict_from_path_mappings(override))
  567. return path_mappings_from_dict(d)
  568. def dict_from_path_mappings(path_mappings):
  569. if path_mappings:
  570. return dict(split_path_mapping(v) for v in path_mappings)
  571. else:
  572. return {}
  573. def path_mappings_from_dict(d):
  574. return [join_path_mapping(v) for v in d.items()]
  575. def split_path_mapping(volume_path):
  576. """
  577. Ascertain if the volume_path contains a host path as well as a container
  578. path. Using splitdrive so windows absolute paths won't cause issues with
  579. splitting on ':'.
  580. """
  581. # splitdrive has limitations when it comes to relative paths, so when it's
  582. # relative, handle special case to set the drive to ''
  583. if volume_path.startswith('.') or volume_path.startswith('~'):
  584. drive, volume_config = '', volume_path
  585. else:
  586. drive, volume_config = os.path.splitdrive(volume_path)
  587. if ':' in volume_config:
  588. (host, container) = volume_config.split(':', 1)
  589. return (container, drive + host)
  590. else:
  591. return (volume_path, None)
  592. def join_path_mapping(pair):
  593. (container, host) = pair
  594. if host is None:
  595. return container
  596. else:
  597. return ":".join((host, container))
  598. def parse_labels(labels):
  599. if not labels:
  600. return {}
  601. if isinstance(labels, list):
  602. return dict(split_label(e) for e in labels)
  603. if isinstance(labels, dict):
  604. return dict(labels)
  605. def split_label(label):
  606. if '=' in label:
  607. return label.split('=', 1)
  608. else:
  609. return label, ''
  610. def parse_ulimits(ulimits):
  611. if not ulimits:
  612. return {}
  613. if isinstance(ulimits, dict):
  614. return dict(ulimits)
  615. def expand_path(working_dir, path):
  616. return os.path.abspath(os.path.join(working_dir, os.path.expanduser(path)))
  617. def merge_list_or_string(base, override):
  618. return to_list(base) + to_list(override)
  619. def to_list(value):
  620. if value is None:
  621. return []
  622. elif isinstance(value, six.string_types):
  623. return [value]
  624. else:
  625. return value
  626. def load_yaml(filename):
  627. try:
  628. with open(filename, 'r') as fh:
  629. return yaml.safe_load(fh)
  630. except (IOError, yaml.YAMLError) as e:
  631. error_name = getattr(e, '__module__', '') + '.' + e.__class__.__name__
  632. raise ConfigurationError(u"{}: {}".format(error_name, e))