parallel_test.py 4.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187
  1. from __future__ import absolute_import
  2. from __future__ import unicode_literals
  3. from threading import Lock
  4. import six
  5. from docker.errors import APIError
  6. from compose.parallel import GlobalLimit
  7. from compose.parallel import parallel_execute
  8. from compose.parallel import parallel_execute_iter
  9. from compose.parallel import ParallelStreamWriter
  10. from compose.parallel import UpstreamError
  11. web = 'web'
  12. db = 'db'
  13. data_volume = 'data_volume'
  14. cache = 'cache'
  15. objects = [web, db, data_volume, cache]
  16. deps = {
  17. web: [db, cache],
  18. db: [data_volume],
  19. data_volume: [],
  20. cache: [],
  21. }
  22. def get_deps(obj):
  23. return [(dep, None) for dep in deps[obj]]
  24. def test_parallel_execute():
  25. results, errors = parallel_execute(
  26. objects=[1, 2, 3, 4, 5],
  27. func=lambda x: x * 2,
  28. get_name=six.text_type,
  29. msg="Doubling",
  30. )
  31. assert sorted(results) == [2, 4, 6, 8, 10]
  32. assert errors == {}
  33. def test_parallel_execute_with_limit():
  34. limit = 1
  35. tasks = 20
  36. lock = Lock()
  37. def f(obj):
  38. locked = lock.acquire(False)
  39. # we should always get the lock because we're the only thread running
  40. assert locked
  41. lock.release()
  42. return None
  43. results, errors = parallel_execute(
  44. objects=list(range(tasks)),
  45. func=f,
  46. get_name=six.text_type,
  47. msg="Testing",
  48. limit=limit,
  49. )
  50. assert results == tasks * [None]
  51. assert errors == {}
  52. def test_parallel_execute_with_global_limit():
  53. GlobalLimit.set_global_limit(1)
  54. tasks = 20
  55. lock = Lock()
  56. def f(obj):
  57. locked = lock.acquire(False)
  58. # we should always get the lock because we're the only thread running
  59. assert locked
  60. lock.release()
  61. return None
  62. results, errors = parallel_execute(
  63. objects=list(range(tasks)),
  64. func=f,
  65. get_name=six.text_type,
  66. msg="Testing",
  67. )
  68. assert results == tasks * [None]
  69. assert errors == {}
  70. def test_parallel_execute_with_deps():
  71. log = []
  72. def process(x):
  73. log.append(x)
  74. parallel_execute(
  75. objects=objects,
  76. func=process,
  77. get_name=lambda obj: obj,
  78. msg="Processing",
  79. get_deps=get_deps,
  80. )
  81. assert sorted(log) == sorted(objects)
  82. assert log.index(data_volume) < log.index(db)
  83. assert log.index(db) < log.index(web)
  84. assert log.index(cache) < log.index(web)
  85. def test_parallel_execute_with_upstream_errors():
  86. log = []
  87. def process(x):
  88. if x is data_volume:
  89. raise APIError(None, None, "Something went wrong")
  90. log.append(x)
  91. parallel_execute(
  92. objects=objects,
  93. func=process,
  94. get_name=lambda obj: obj,
  95. msg="Processing",
  96. get_deps=get_deps,
  97. )
  98. assert log == [cache]
  99. events = [
  100. (obj, result, type(exception))
  101. for obj, result, exception
  102. in parallel_execute_iter(objects, process, get_deps, None)
  103. ]
  104. assert (cache, None, type(None)) in events
  105. assert (data_volume, None, APIError) in events
  106. assert (db, None, UpstreamError) in events
  107. assert (web, None, UpstreamError) in events
  108. def test_parallel_execute_alignment(capsys):
  109. results, errors = parallel_execute(
  110. objects=["short", "a very long name"],
  111. func=lambda x: x,
  112. get_name=six.text_type,
  113. msg="Aligning",
  114. )
  115. assert errors == {}
  116. _, err = capsys.readouterr()
  117. a, b = err.split('\n')[:2]
  118. assert a.index('...') == b.index('...')
  119. def test_parallel_execute_ansi(capsys):
  120. ParallelStreamWriter.set_noansi(value=False)
  121. results, errors = parallel_execute(
  122. objects=["something", "something more"],
  123. func=lambda x: x,
  124. get_name=six.text_type,
  125. msg="Control characters",
  126. )
  127. assert errors == {}
  128. _, err = capsys.readouterr()
  129. assert "\x1b" in err
  130. def test_parallel_execute_noansi(capsys):
  131. ParallelStreamWriter.set_noansi()
  132. results, errors = parallel_execute(
  133. objects=["something", "something more"],
  134. func=lambda x: x,
  135. get_name=six.text_type,
  136. msg="Control characters",
  137. )
  138. assert errors == {}
  139. _, err = capsys.readouterr()
  140. assert "\x1b" not in err