浏览代码

Rename a couple of functions in parallel.py

Signed-off-by: Aanand Prasad <[email protected]>
Aanand Prasad 9 年之前
父节点
当前提交
15c5bc2e6c
共有 2 个文件被更改,包括 6 次插入6 次删除
  1. 4 4
      compose/parallel.py
  2. 2 2
      tests/unit/parallel_test.py

+ 4 - 4
compose/parallel.py

@@ -32,7 +32,7 @@ def parallel_execute(objects, func, get_name, msg, get_deps=None):
     for obj in objects:
     for obj in objects:
         writer.initialize(get_name(obj))
         writer.initialize(get_name(obj))
 
 
-    events = parallel_execute_stream(objects, func, get_deps)
+    events = parallel_execute_iter(objects, func, get_deps)
 
 
     errors = {}
     errors = {}
     results = []
     results = []
@@ -86,7 +86,7 @@ class State(object):
         return set(self.objects) - self.started - self.finished - self.failed
         return set(self.objects) - self.started - self.finished - self.failed
 
 
 
 
-def parallel_execute_stream(objects, func, get_deps):
+def parallel_execute_iter(objects, func, get_deps):
     """
     """
     Runs func on objects in parallel while ensuring that func is
     Runs func on objects in parallel while ensuring that func is
     ran on object only after it is ran on all its dependencies.
     ran on object only after it is ran on all its dependencies.
@@ -130,7 +130,7 @@ def parallel_execute_stream(objects, func, get_deps):
         yield event
         yield event
 
 
 
 
-def queue_producer(obj, func, results):
+def producer(obj, func, results):
     """
     """
     The entry point for a producer thread which runs func on a single object.
     The entry point for a producer thread which runs func on a single object.
     Places a tuple on the results queue once func has either returned or raised.
     Places a tuple on the results queue once func has either returned or raised.
@@ -165,7 +165,7 @@ def feed_queue(objects, func, get_deps, results, state):
             for dep in deps
             for dep in deps
         ):
         ):
             log.debug('Starting producer thread for {}'.format(obj))
             log.debug('Starting producer thread for {}'.format(obj))
-            t = Thread(target=queue_producer, args=(obj, func, results))
+            t = Thread(target=producer, args=(obj, func, results))
             t.daemon = True
             t.daemon = True
             t.start()
             t.start()
             state.started.add(obj)
             state.started.add(obj)

+ 2 - 2
tests/unit/parallel_test.py

@@ -5,7 +5,7 @@ import six
 from docker.errors import APIError
 from docker.errors import APIError
 
 
 from compose.parallel import parallel_execute
 from compose.parallel import parallel_execute
-from compose.parallel import parallel_execute_stream
+from compose.parallel import parallel_execute_iter
 from compose.parallel import UpstreamError
 from compose.parallel import UpstreamError
 
 
 
 
@@ -81,7 +81,7 @@ def test_parallel_execute_with_upstream_errors():
     events = [
     events = [
         (obj, result, type(exception))
         (obj, result, type(exception))
         for obj, result, exception
         for obj, result, exception
-        in parallel_execute_stream(objects, process, get_deps)
+        in parallel_execute_iter(objects, process, get_deps)
     ]
     ]
 
 
     assert (cache, None, type(None)) in events
     assert (cache, None, type(None)) in events