__init__.py 9.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312
  1. from .error import *
  2. from .tokens import *
  3. from .events import *
  4. from .nodes import *
  5. from .loader import *
  6. from .dumper import *
  7. __version__ = '3.10'
  8. try:
  9. from .cyaml import *
  10. __with_libyaml__ = True
  11. except ImportError:
  12. __with_libyaml__ = False
  13. import io
  14. def scan(stream, Loader=Loader):
  15. """
  16. Scan a YAML stream and produce scanning tokens.
  17. """
  18. loader = Loader(stream)
  19. try:
  20. while loader.check_token():
  21. yield loader.get_token()
  22. finally:
  23. loader.dispose()
  24. def parse(stream, Loader=Loader):
  25. """
  26. Parse a YAML stream and produce parsing events.
  27. """
  28. loader = Loader(stream)
  29. try:
  30. while loader.check_event():
  31. yield loader.get_event()
  32. finally:
  33. loader.dispose()
  34. def compose(stream, Loader=Loader):
  35. """
  36. Parse the first YAML document in a stream
  37. and produce the corresponding representation tree.
  38. """
  39. loader = Loader(stream)
  40. try:
  41. return loader.get_single_node()
  42. finally:
  43. loader.dispose()
  44. def compose_all(stream, Loader=Loader):
  45. """
  46. Parse all YAML documents in a stream
  47. and produce corresponding representation trees.
  48. """
  49. loader = Loader(stream)
  50. try:
  51. while loader.check_node():
  52. yield loader.get_node()
  53. finally:
  54. loader.dispose()
  55. def load(stream, Loader=Loader):
  56. """
  57. Parse the first YAML document in a stream
  58. and produce the corresponding Python object.
  59. """
  60. loader = Loader(stream)
  61. try:
  62. return loader.get_single_data()
  63. finally:
  64. loader.dispose()
  65. def load_all(stream, Loader=Loader):
  66. """
  67. Parse all YAML documents in a stream
  68. and produce corresponding Python objects.
  69. """
  70. loader = Loader(stream)
  71. try:
  72. while loader.check_data():
  73. yield loader.get_data()
  74. finally:
  75. loader.dispose()
  76. def safe_load(stream):
  77. """
  78. Parse the first YAML document in a stream
  79. and produce the corresponding Python object.
  80. Resolve only basic YAML tags.
  81. """
  82. return load(stream, SafeLoader)
  83. def safe_load_all(stream):
  84. """
  85. Parse all YAML documents in a stream
  86. and produce corresponding Python objects.
  87. Resolve only basic YAML tags.
  88. """
  89. return load_all(stream, SafeLoader)
  90. def emit(events, stream=None, Dumper=Dumper,
  91. canonical=None, indent=None, width=None,
  92. allow_unicode=None, line_break=None):
  93. """
  94. Emit YAML parsing events into a stream.
  95. If stream is None, return the produced string instead.
  96. """
  97. getvalue = None
  98. if stream is None:
  99. stream = io.StringIO()
  100. getvalue = stream.getvalue
  101. dumper = Dumper(stream, canonical=canonical, indent=indent, width=width,
  102. allow_unicode=allow_unicode, line_break=line_break)
  103. try:
  104. for event in events:
  105. dumper.emit(event)
  106. finally:
  107. dumper.dispose()
  108. if getvalue:
  109. return getvalue()
  110. def serialize_all(nodes, stream=None, Dumper=Dumper,
  111. canonical=None, indent=None, width=None,
  112. allow_unicode=None, line_break=None,
  113. encoding=None, explicit_start=None, explicit_end=None,
  114. version=None, tags=None):
  115. """
  116. Serialize a sequence of representation trees into a YAML stream.
  117. If stream is None, return the produced string instead.
  118. """
  119. getvalue = None
  120. if stream is None:
  121. if encoding is None:
  122. stream = io.StringIO()
  123. else:
  124. stream = io.BytesIO()
  125. getvalue = stream.getvalue
  126. dumper = Dumper(stream, canonical=canonical, indent=indent, width=width,
  127. allow_unicode=allow_unicode, line_break=line_break,
  128. encoding=encoding, version=version, tags=tags,
  129. explicit_start=explicit_start, explicit_end=explicit_end)
  130. try:
  131. dumper.open()
  132. for node in nodes:
  133. dumper.serialize(node)
  134. dumper.close()
  135. finally:
  136. dumper.dispose()
  137. if getvalue:
  138. return getvalue()
  139. def serialize(node, stream=None, Dumper=Dumper, **kwds):
  140. """
  141. Serialize a representation tree into a YAML stream.
  142. If stream is None, return the produced string instead.
  143. """
  144. return serialize_all([node], stream, Dumper=Dumper, **kwds)
  145. def dump_all(documents, stream=None, Dumper=Dumper,
  146. default_style=None, default_flow_style=None,
  147. canonical=None, indent=None, width=None,
  148. allow_unicode=None, line_break=None,
  149. encoding=None, explicit_start=None, explicit_end=None,
  150. version=None, tags=None):
  151. """
  152. Serialize a sequence of Python objects into a YAML stream.
  153. If stream is None, return the produced string instead.
  154. """
  155. getvalue = None
  156. if stream is None:
  157. if encoding is None:
  158. stream = io.StringIO()
  159. else:
  160. stream = io.BytesIO()
  161. getvalue = stream.getvalue
  162. dumper = Dumper(stream, default_style=default_style,
  163. default_flow_style=default_flow_style,
  164. canonical=canonical, indent=indent, width=width,
  165. allow_unicode=allow_unicode, line_break=line_break,
  166. encoding=encoding, version=version, tags=tags,
  167. explicit_start=explicit_start, explicit_end=explicit_end)
  168. try:
  169. dumper.open()
  170. for data in documents:
  171. dumper.represent(data)
  172. dumper.close()
  173. finally:
  174. dumper.dispose()
  175. if getvalue:
  176. return getvalue()
  177. def dump(data, stream=None, Dumper=Dumper, **kwds):
  178. """
  179. Serialize a Python object into a YAML stream.
  180. If stream is None, return the produced string instead.
  181. """
  182. return dump_all([data], stream, Dumper=Dumper, **kwds)
  183. def safe_dump_all(documents, stream=None, **kwds):
  184. """
  185. Serialize a sequence of Python objects into a YAML stream.
  186. Produce only basic YAML tags.
  187. If stream is None, return the produced string instead.
  188. """
  189. return dump_all(documents, stream, Dumper=SafeDumper, **kwds)
  190. def safe_dump(data, stream=None, **kwds):
  191. """
  192. Serialize a Python object into a YAML stream.
  193. Produce only basic YAML tags.
  194. If stream is None, return the produced string instead.
  195. """
  196. return dump_all([data], stream, Dumper=SafeDumper, **kwds)
  197. def add_implicit_resolver(tag, regexp, first=None,
  198. Loader=Loader, Dumper=Dumper):
  199. """
  200. Add an implicit scalar detector.
  201. If an implicit scalar value matches the given regexp,
  202. the corresponding tag is assigned to the scalar.
  203. first is a sequence of possible initial characters or None.
  204. """
  205. Loader.add_implicit_resolver(tag, regexp, first)
  206. Dumper.add_implicit_resolver(tag, regexp, first)
  207. def add_path_resolver(tag, path, kind=None, Loader=Loader, Dumper=Dumper):
  208. """
  209. Add a path based resolver for the given tag.
  210. A path is a list of keys that forms a path
  211. to a node in the representation tree.
  212. Keys can be string values, integers, or None.
  213. """
  214. Loader.add_path_resolver(tag, path, kind)
  215. Dumper.add_path_resolver(tag, path, kind)
  216. def add_constructor(tag, constructor, Loader=Loader):
  217. """
  218. Add a constructor for the given tag.
  219. Constructor is a function that accepts a Loader instance
  220. and a node object and produces the corresponding Python object.
  221. """
  222. Loader.add_constructor(tag, constructor)
  223. def add_multi_constructor(tag_prefix, multi_constructor, Loader=Loader):
  224. """
  225. Add a multi-constructor for the given tag prefix.
  226. Multi-constructor is called for a node if its tag starts with tag_prefix.
  227. Multi-constructor accepts a Loader instance, a tag suffix,
  228. and a node object and produces the corresponding Python object.
  229. """
  230. Loader.add_multi_constructor(tag_prefix, multi_constructor)
  231. def add_representer(data_type, representer, Dumper=Dumper):
  232. """
  233. Add a representer for the given type.
  234. Representer is a function accepting a Dumper instance
  235. and an instance of the given data type
  236. and producing the corresponding representation node.
  237. """
  238. Dumper.add_representer(data_type, representer)
  239. def add_multi_representer(data_type, multi_representer, Dumper=Dumper):
  240. """
  241. Add a representer for the given type.
  242. Multi-representer is a function accepting a Dumper instance
  243. and an instance of the given data type or subtype
  244. and producing the corresponding representation node.
  245. """
  246. Dumper.add_multi_representer(data_type, multi_representer)
  247. class YAMLObjectMetaclass(type):
  248. """
  249. The metaclass for YAMLObject.
  250. """
  251. def __init__(cls, name, bases, kwds):
  252. super(YAMLObjectMetaclass, cls).__init__(name, bases, kwds)
  253. if 'yaml_tag' in kwds and kwds['yaml_tag'] is not None:
  254. cls.yaml_loader.add_constructor(cls.yaml_tag, cls.from_yaml)
  255. cls.yaml_dumper.add_representer(cls, cls.to_yaml)
  256. class YAMLObject(metaclass=YAMLObjectMetaclass):
  257. """
  258. An object that can dump itself to a YAML stream
  259. and load itself from a YAML stream.
  260. """
  261. __slots__ = () # no direct instantiation, so allow immutable subclasses
  262. yaml_loader = Loader
  263. yaml_dumper = Dumper
  264. yaml_tag = None
  265. yaml_flow_style = None
  266. @classmethod
  267. def from_yaml(cls, loader, node):
  268. """
  269. Convert a representation node to a Python object.
  270. """
  271. return loader.construct_yaml_object(node, cls)
  272. @classmethod
  273. def to_yaml(cls, dumper, data):
  274. """
  275. Convert a Python object to a representation node.
  276. """
  277. return dumper.represent_yaml_object(cls.yaml_tag, data, cls,
  278. flow_style=cls.yaml_flow_style)