1

2

3

4

5

6

7

8

9

10

11

12

13

14

15

16

17

18

19

20

21

22

23

24

25

26

27

28

29

30

31

32

33

34

35

36

37

38

39

40

41

42

43

44

45

46

47

48

49

50

51

52

53

54

55

56

57

58

59

60

61

62

63

64

65

66

67

68

69

70

71

72

73

74

75

76

77

78

79

80

81

82

83

84

85

86

87

88

89

90

91

92

93

94

95

96

97

98

99

100

101

102

103

104

105

106

107

108

109

110

111

112

113

114

115

116

117

118

119

120

121

122

123

124

125

126

127

128

129

130

131

132

133

134

135

136

137

138

139

140

141

142

143

144

145

146

147

148

149

150

151

152

153

154

155

156

157

158

159

160

161

162

163

164

165

166

167

168

169

170

171

172

173

174

175

176

177

178

179

180

181

182

183

184

185

186

187

188

189

190

191

192

193

194

195

196

197

198

199

200

201

202

203

204

205

206

207

208

209

210

211

212

213

214

215

216

217

218

219

220

221

222

223

224

225

226

227

228

229

230

231

232

233

234

235

236

237

238

239

240

241

242

243

244

245

246

247

248

249

250

251

252

253

254

255

256

257

258

259

260

261

262

263

264

265

266

267

268

269

270

271

272

273

274

275

276

277

278

279

280

281

282

283

284

285

286

287

288

289

290

291

292

293

294

295

296

297

298

299

300

301

302

303

304

305

306

307

308

309

310

311

312

 

from .error import * 

 

from .tokens import * 

from .events import * 

from .nodes import * 

 

from .loader import * 

from .dumper import * 

 

__version__ = '3.13' 

try: 

from .cyaml import * 

__with_libyaml__ = True 

except ImportError: 

__with_libyaml__ = False 

 

import io 

 

def scan(stream, Loader=Loader): 

""" 

Scan a YAML stream and produce scanning tokens. 

""" 

loader = Loader(stream) 

try: 

while loader.check_token(): 

yield loader.get_token() 

finally: 

loader.dispose() 

 

def parse(stream, Loader=Loader): 

""" 

Parse a YAML stream and produce parsing events. 

""" 

loader = Loader(stream) 

try: 

while loader.check_event(): 

yield loader.get_event() 

finally: 

loader.dispose() 

 

def compose(stream, Loader=Loader): 

""" 

Parse the first YAML document in a stream 

and produce the corresponding representation tree. 

""" 

loader = Loader(stream) 

try: 

return loader.get_single_node() 

finally: 

loader.dispose() 

 

def compose_all(stream, Loader=Loader): 

""" 

Parse all YAML documents in a stream 

and produce corresponding representation trees. 

""" 

loader = Loader(stream) 

try: 

while loader.check_node(): 

yield loader.get_node() 

finally: 

loader.dispose() 

 

def load(stream, Loader=Loader): 

""" 

Parse the first YAML document in a stream 

and produce the corresponding Python object. 

""" 

loader = Loader(stream) 

try: 

return loader.get_single_data() 

finally: 

loader.dispose() 

 

def load_all(stream, Loader=Loader): 

""" 

Parse all YAML documents in a stream 

and produce corresponding Python objects. 

""" 

loader = Loader(stream) 

try: 

while loader.check_data(): 

yield loader.get_data() 

finally: 

loader.dispose() 

 

def safe_load(stream): 

""" 

Parse the first YAML document in a stream 

and produce the corresponding Python object. 

Resolve only basic YAML tags. 

""" 

return load(stream, SafeLoader) 

 

def safe_load_all(stream): 

""" 

Parse all YAML documents in a stream 

and produce corresponding Python objects. 

Resolve only basic YAML tags. 

""" 

return load_all(stream, SafeLoader) 

 

def emit(events, stream=None, Dumper=Dumper, 

canonical=None, indent=None, width=None, 

allow_unicode=None, line_break=None): 

""" 

Emit YAML parsing events into a stream. 

If stream is None, return the produced string instead. 

""" 

getvalue = None 

if stream is None: 

stream = io.StringIO() 

getvalue = stream.getvalue 

dumper = Dumper(stream, canonical=canonical, indent=indent, width=width, 

allow_unicode=allow_unicode, line_break=line_break) 

try: 

for event in events: 

dumper.emit(event) 

finally: 

dumper.dispose() 

if getvalue: 

return getvalue() 

 

def serialize_all(nodes, stream=None, Dumper=Dumper, 

canonical=None, indent=None, width=None, 

allow_unicode=None, line_break=None, 

encoding=None, explicit_start=None, explicit_end=None, 

version=None, tags=None): 

""" 

Serialize a sequence of representation trees into a YAML stream. 

If stream is None, return the produced string instead. 

""" 

getvalue = None 

if stream is None: 

if encoding is None: 

stream = io.StringIO() 

else: 

stream = io.BytesIO() 

getvalue = stream.getvalue 

dumper = Dumper(stream, canonical=canonical, indent=indent, width=width, 

allow_unicode=allow_unicode, line_break=line_break, 

encoding=encoding, version=version, tags=tags, 

explicit_start=explicit_start, explicit_end=explicit_end) 

try: 

dumper.open() 

for node in nodes: 

dumper.serialize(node) 

dumper.close() 

finally: 

dumper.dispose() 

if getvalue: 

return getvalue() 

 

def serialize(node, stream=None, Dumper=Dumper, **kwds): 

""" 

Serialize a representation tree into a YAML stream. 

If stream is None, return the produced string instead. 

""" 

return serialize_all([node], stream, Dumper=Dumper, **kwds) 

 

def dump_all(documents, stream=None, Dumper=Dumper, 

default_style=None, default_flow_style=None, 

canonical=None, indent=None, width=None, 

allow_unicode=None, line_break=None, 

encoding=None, explicit_start=None, explicit_end=None, 

version=None, tags=None): 

""" 

Serialize a sequence of Python objects into a YAML stream. 

If stream is None, return the produced string instead. 

""" 

getvalue = None 

if stream is None: 

if encoding is None: 

stream = io.StringIO() 

else: 

stream = io.BytesIO() 

getvalue = stream.getvalue 

dumper = Dumper(stream, default_style=default_style, 

default_flow_style=default_flow_style, 

canonical=canonical, indent=indent, width=width, 

allow_unicode=allow_unicode, line_break=line_break, 

encoding=encoding, version=version, tags=tags, 

explicit_start=explicit_start, explicit_end=explicit_end) 

try: 

dumper.open() 

for data in documents: 

dumper.represent(data) 

dumper.close() 

finally: 

dumper.dispose() 

if getvalue: 

return getvalue() 

 

def dump(data, stream=None, Dumper=Dumper, **kwds): 

""" 

Serialize a Python object into a YAML stream. 

If stream is None, return the produced string instead. 

""" 

return dump_all([data], stream, Dumper=Dumper, **kwds) 

 

def safe_dump_all(documents, stream=None, **kwds): 

""" 

Serialize a sequence of Python objects into a YAML stream. 

Produce only basic YAML tags. 

If stream is None, return the produced string instead. 

""" 

return dump_all(documents, stream, Dumper=SafeDumper, **kwds) 

 

def safe_dump(data, stream=None, **kwds): 

""" 

Serialize a Python object into a YAML stream. 

Produce only basic YAML tags. 

If stream is None, return the produced string instead. 

""" 

return dump_all([data], stream, Dumper=SafeDumper, **kwds) 

 

def add_implicit_resolver(tag, regexp, first=None, 

Loader=Loader, Dumper=Dumper): 

""" 

Add an implicit scalar detector. 

If an implicit scalar value matches the given regexp, 

the corresponding tag is assigned to the scalar. 

first is a sequence of possible initial characters or None. 

""" 

Loader.add_implicit_resolver(tag, regexp, first) 

Dumper.add_implicit_resolver(tag, regexp, first) 

 

def add_path_resolver(tag, path, kind=None, Loader=Loader, Dumper=Dumper): 

""" 

Add a path based resolver for the given tag. 

A path is a list of keys that forms a path 

to a node in the representation tree. 

Keys can be string values, integers, or None. 

""" 

Loader.add_path_resolver(tag, path, kind) 

Dumper.add_path_resolver(tag, path, kind) 

 

def add_constructor(tag, constructor, Loader=Loader): 

""" 

Add a constructor for the given tag. 

Constructor is a function that accepts a Loader instance 

and a node object and produces the corresponding Python object. 

""" 

Loader.add_constructor(tag, constructor) 

 

def add_multi_constructor(tag_prefix, multi_constructor, Loader=Loader): 

""" 

Add a multi-constructor for the given tag prefix. 

Multi-constructor is called for a node if its tag starts with tag_prefix. 

Multi-constructor accepts a Loader instance, a tag suffix, 

and a node object and produces the corresponding Python object. 

""" 

Loader.add_multi_constructor(tag_prefix, multi_constructor) 

 

def add_representer(data_type, representer, Dumper=Dumper): 

""" 

Add a representer for the given type. 

Representer is a function accepting a Dumper instance 

and an instance of the given data type 

and producing the corresponding representation node. 

""" 

Dumper.add_representer(data_type, representer) 

 

def add_multi_representer(data_type, multi_representer, Dumper=Dumper): 

""" 

Add a representer for the given type. 

Multi-representer is a function accepting a Dumper instance 

and an instance of the given data type or subtype 

and producing the corresponding representation node. 

""" 

Dumper.add_multi_representer(data_type, multi_representer) 

 

class YAMLObjectMetaclass(type): 

""" 

The metaclass for YAMLObject. 

""" 

def __init__(cls, name, bases, kwds): 

super(YAMLObjectMetaclass, cls).__init__(name, bases, kwds) 

if 'yaml_tag' in kwds and kwds['yaml_tag'] is not None: 

cls.yaml_loader.add_constructor(cls.yaml_tag, cls.from_yaml) 

cls.yaml_dumper.add_representer(cls, cls.to_yaml) 

 

class YAMLObject(metaclass=YAMLObjectMetaclass): 

""" 

An object that can dump itself to a YAML stream 

and load itself from a YAML stream. 

""" 

 

__slots__ = () # no direct instantiation, so allow immutable subclasses 

 

yaml_loader = Loader 

yaml_dumper = Dumper 

 

yaml_tag = None 

yaml_flow_style = None 

 

@classmethod 

def from_yaml(cls, loader, node): 

""" 

Convert a representation node to a Python object. 

""" 

return loader.construct_yaml_object(node, cls) 

 

@classmethod 

def to_yaml(cls, dumper, data): 

""" 

Convert a Python object to a representation node. 

""" 

return dumper.represent_yaml_object(cls.yaml_tag, data, cls, 

flow_style=cls.yaml_flow_style)