parallel.py 5.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198
  1. from __future__ import absolute_import
  2. from __future__ import unicode_literals
  3. import logging
  4. import operator
  5. import sys
  6. from threading import Thread
  7. from docker.errors import APIError
  8. from six.moves import _thread as thread
  9. from six.moves.queue import Empty
  10. from six.moves.queue import Queue
  11. from compose.cli.signals import ShutdownException
  12. from compose.utils import get_output_stream
  13. log = logging.getLogger(__name__)
  14. def parallel_execute(objects, func, get_name, msg, get_deps=None):
  15. """Runs func on objects in parallel while ensuring that func is
  16. ran on object only after it is ran on all its dependencies.
  17. get_deps called on object must return a collection with its dependencies.
  18. get_name called on object must return its name.
  19. """
  20. objects = list(objects)
  21. stream = get_output_stream(sys.stderr)
  22. writer = ParallelStreamWriter(stream, msg)
  23. for obj in objects:
  24. writer.initialize(get_name(obj))
  25. events = parallel_execute_stream(objects, func, get_deps)
  26. errors = {}
  27. results = []
  28. error_to_reraise = None
  29. for obj, result, exception in events:
  30. if exception is None:
  31. writer.write(get_name(obj), 'done')
  32. results.append(result)
  33. elif isinstance(exception, APIError):
  34. errors[get_name(obj)] = exception.explanation
  35. writer.write(get_name(obj), 'error')
  36. elif isinstance(exception, UpstreamError):
  37. writer.write(get_name(obj), 'error')
  38. else:
  39. errors[get_name(obj)] = exception
  40. error_to_reraise = exception
  41. for obj_name, error in errors.items():
  42. stream.write("\nERROR: for {} {}\n".format(obj_name, error))
  43. if error_to_reraise:
  44. raise error_to_reraise
  45. return results
  46. def _no_deps(x):
  47. return []
  48. def parallel_execute_stream(objects, func, get_deps):
  49. if get_deps is None:
  50. get_deps = _no_deps
  51. results = Queue()
  52. started = set() # objects being processed
  53. finished = set() # objects which have been processed
  54. failed = set() # objects which either failed or whose dependencies failed
  55. while len(finished) + len(failed) < len(objects):
  56. for event in feed_queue(objects, func, get_deps, results, started, finished, failed):
  57. yield event
  58. try:
  59. event = results.get(timeout=1)
  60. except Empty:
  61. continue
  62. # See https://github.com/docker/compose/issues/189
  63. except thread.error:
  64. raise ShutdownException()
  65. obj, _, exception = event
  66. if exception is None:
  67. log.debug('Finished processing: {}'.format(obj))
  68. finished.add(obj)
  69. else:
  70. log.debug('Failed: {}'.format(obj))
  71. failed.add(obj)
  72. yield event
  73. def queue_producer(obj, func, results):
  74. try:
  75. result = func(obj)
  76. results.put((obj, result, None))
  77. except Exception as e:
  78. results.put((obj, None, e))
  79. def feed_queue(objects, func, get_deps, results, started, finished, failed):
  80. pending = set(objects) - started - finished - failed
  81. log.debug('Pending: {}'.format(pending))
  82. for obj in pending:
  83. deps = get_deps(obj)
  84. if any(dep in failed for dep in deps):
  85. log.debug('{} has upstream errors - not processing'.format(obj))
  86. yield (obj, None, UpstreamError())
  87. failed.add(obj)
  88. elif all(
  89. dep not in objects or dep in finished
  90. for dep in deps
  91. ):
  92. log.debug('Starting producer thread for {}'.format(obj))
  93. t = Thread(target=queue_producer, args=(obj, func, results))
  94. t.daemon = True
  95. t.start()
  96. started.add(obj)
  97. class UpstreamError(Exception):
  98. pass
  99. class ParallelStreamWriter(object):
  100. """Write out messages for operations happening in parallel.
  101. Each operation has it's own line, and ANSI code characters are used
  102. to jump to the correct line, and write over the line.
  103. """
  104. def __init__(self, stream, msg):
  105. self.stream = stream
  106. self.msg = msg
  107. self.lines = []
  108. def initialize(self, obj_index):
  109. if self.msg is None:
  110. return
  111. self.lines.append(obj_index)
  112. self.stream.write("{} {} ... \r\n".format(self.msg, obj_index))
  113. self.stream.flush()
  114. def write(self, obj_index, status):
  115. if self.msg is None:
  116. return
  117. position = self.lines.index(obj_index)
  118. diff = len(self.lines) - position
  119. # move up
  120. self.stream.write("%c[%dA" % (27, diff))
  121. # erase
  122. self.stream.write("%c[2K\r" % 27)
  123. self.stream.write("{} {} ... {}\r".format(self.msg, obj_index, status))
  124. # move back down
  125. self.stream.write("%c[%dB" % (27, diff))
  126. self.stream.flush()
  127. def parallel_operation(containers, operation, options, message):
  128. parallel_execute(
  129. containers,
  130. operator.methodcaller(operation, **options),
  131. operator.attrgetter('name'),
  132. message)
  133. def parallel_remove(containers, options):
  134. stopped_containers = [c for c in containers if not c.is_running]
  135. parallel_operation(stopped_containers, 'remove', options, 'Removing')
  136. def parallel_start(containers, options):
  137. parallel_operation(containers, 'start', options, 'Starting')
  138. def parallel_pause(containers, options):
  139. parallel_operation(containers, 'pause', options, 'Pausing')
  140. def parallel_unpause(containers, options):
  141. parallel_operation(containers, 'unpause', options, 'Unpausing')
  142. def parallel_kill(containers, options):
  143. parallel_operation(containers, 'kill', options, 'Killing')
  144. def parallel_restart(containers, options):
  145. parallel_operation(containers, 'restart', options, 'Restarting')