utils.py 2.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687
  1. import codecs
  2. import hashlib
  3. import json
  4. import json.decoder
  5. import six
  6. json_decoder = json.JSONDecoder()
  7. def get_output_stream(stream):
  8. if six.PY3:
  9. return stream
  10. return codecs.getwriter('utf-8')(stream)
  11. def stream_as_text(stream):
  12. """Given a stream of bytes or text, if any of the items in the stream
  13. are bytes convert them to text.
  14. This function can be removed once docker-py returns text streams instead
  15. of byte streams.
  16. """
  17. for data in stream:
  18. if not isinstance(data, six.text_type):
  19. data = data.decode('utf-8', 'replace')
  20. yield data
  21. def line_splitter(buffer, separator=u'\n'):
  22. index = buffer.find(six.text_type(separator))
  23. if index == -1:
  24. return None
  25. return buffer[:index + 1], buffer[index + 1:]
  26. def split_buffer(stream, splitter=None, decoder=lambda a: a):
  27. """Given a generator which yields strings and a splitter function,
  28. joins all input, splits on the separator and yields each chunk.
  29. Unlike string.split(), each chunk includes the trailing
  30. separator, except for the last one if none was found on the end
  31. of the input.
  32. """
  33. splitter = splitter or line_splitter
  34. buffered = six.text_type('')
  35. for data in stream_as_text(stream):
  36. buffered += data
  37. while True:
  38. buffer_split = splitter(buffered)
  39. if buffer_split is None:
  40. break
  41. item, buffered = buffer_split
  42. yield item
  43. if buffered:
  44. yield decoder(buffered)
  45. def json_splitter(buffer):
  46. """Attempt to parse a json object from a buffer. If there is at least one
  47. object, return it and the rest of the buffer, otherwise return None.
  48. """
  49. try:
  50. obj, index = json_decoder.raw_decode(buffer)
  51. rest = buffer[json.decoder.WHITESPACE.match(buffer, index).end():]
  52. return obj, rest
  53. except ValueError:
  54. return None
  55. def json_stream(stream):
  56. """Given a stream of text, return a stream of json objects.
  57. This handles streams which are inconsistently buffered (some entries may
  58. be newline delimited, and others are not).
  59. """
  60. return split_buffer(stream, json_splitter, json_decoder.decode)
  61. def json_hash(obj):
  62. dump = json.dumps(obj, sort_keys=True, separators=(',', ':'))
  63. h = hashlib.sha256()
  64. h.update(dump.encode('utf8'))
  65. return h.hexdigest()