utils.py 1.2 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243
  1. import hashlib
  2. import json
  3. import logging
  4. import os
  5. import concurrent.futures
  6. from .const import DEFAULT_MAX_WORKERS
  7. log = logging.getLogger(__name__)
  8. def parallel_execute(command, containers, doing_msg, done_msg, **options):
  9. """
  10. Execute a given command upon a list of containers in parallel.
  11. """
  12. max_workers = os.environ.get('COMPOSE_MAX_WORKERS', DEFAULT_MAX_WORKERS)
  13. def container_command_execute(container, command, **options):
  14. log.info("{} {}...".format(doing_msg, container.name))
  15. return getattr(container, command)(**options)
  16. with concurrent.futures.ThreadPoolExecutor(max_workers=max_workers) as executor:
  17. future_container = {
  18. executor.submit(
  19. container_command_execute,
  20. container,
  21. command,
  22. **options
  23. ): container for container in containers
  24. }
  25. for future in concurrent.futures.as_completed(future_container):
  26. container = future_container[future]
  27. log.info("{} {}".format(done_msg, container.name))
  28. def json_hash(obj):
  29. dump = json.dumps(obj, sort_keys=True, separators=(',', ':'))
  30. h = hashlib.sha256()
  31. h.update(dump)
  32. return h.hexdigest()