Edit File by line
/home/barbar84/www/wp-conte.../plugins/sujqvwi/AnonR/smanonr..../lib64/python3..../site-pac.../yaml
File: parser.py
[0] Fix | Delete
# The following YAML grammar is LL(1) and is parsed by a recursive descent
[1] Fix | Delete
# parser.
[2] Fix | Delete
#
[3] Fix | Delete
# stream ::= STREAM-START implicit_document? explicit_document* STREAM-END
[4] Fix | Delete
# implicit_document ::= block_node DOCUMENT-END*
[5] Fix | Delete
# explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END*
[6] Fix | Delete
# block_node_or_indentless_sequence ::=
[7] Fix | Delete
# ALIAS
[8] Fix | Delete
# | properties (block_content | indentless_block_sequence)?
[9] Fix | Delete
# | block_content
[10] Fix | Delete
# | indentless_block_sequence
[11] Fix | Delete
# block_node ::= ALIAS
[12] Fix | Delete
# | properties block_content?
[13] Fix | Delete
# | block_content
[14] Fix | Delete
# flow_node ::= ALIAS
[15] Fix | Delete
# | properties flow_content?
[16] Fix | Delete
# | flow_content
[17] Fix | Delete
# properties ::= TAG ANCHOR? | ANCHOR TAG?
[18] Fix | Delete
# block_content ::= block_collection | flow_collection | SCALAR
[19] Fix | Delete
# flow_content ::= flow_collection | SCALAR
[20] Fix | Delete
# block_collection ::= block_sequence | block_mapping
[21] Fix | Delete
# flow_collection ::= flow_sequence | flow_mapping
[22] Fix | Delete
# block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)* BLOCK-END
[23] Fix | Delete
# indentless_sequence ::= (BLOCK-ENTRY block_node?)+
[24] Fix | Delete
# block_mapping ::= BLOCK-MAPPING_START
[25] Fix | Delete
# ((KEY block_node_or_indentless_sequence?)?
[26] Fix | Delete
# (VALUE block_node_or_indentless_sequence?)?)*
[27] Fix | Delete
# BLOCK-END
[28] Fix | Delete
# flow_sequence ::= FLOW-SEQUENCE-START
[29] Fix | Delete
# (flow_sequence_entry FLOW-ENTRY)*
[30] Fix | Delete
# flow_sequence_entry?
[31] Fix | Delete
# FLOW-SEQUENCE-END
[32] Fix | Delete
# flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
[33] Fix | Delete
# flow_mapping ::= FLOW-MAPPING-START
[34] Fix | Delete
# (flow_mapping_entry FLOW-ENTRY)*
[35] Fix | Delete
# flow_mapping_entry?
[36] Fix | Delete
# FLOW-MAPPING-END
[37] Fix | Delete
# flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
[38] Fix | Delete
#
[39] Fix | Delete
# FIRST sets:
[40] Fix | Delete
#
[41] Fix | Delete
# stream: { STREAM-START }
[42] Fix | Delete
# explicit_document: { DIRECTIVE DOCUMENT-START }
[43] Fix | Delete
# implicit_document: FIRST(block_node)
[44] Fix | Delete
# block_node: { ALIAS TAG ANCHOR SCALAR BLOCK-SEQUENCE-START BLOCK-MAPPING-START FLOW-SEQUENCE-START FLOW-MAPPING-START }
[45] Fix | Delete
# flow_node: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START }
[46] Fix | Delete
# block_content: { BLOCK-SEQUENCE-START BLOCK-MAPPING-START FLOW-SEQUENCE-START FLOW-MAPPING-START SCALAR }
[47] Fix | Delete
# flow_content: { FLOW-SEQUENCE-START FLOW-MAPPING-START SCALAR }
[48] Fix | Delete
# block_collection: { BLOCK-SEQUENCE-START BLOCK-MAPPING-START }
[49] Fix | Delete
# flow_collection: { FLOW-SEQUENCE-START FLOW-MAPPING-START }
[50] Fix | Delete
# block_sequence: { BLOCK-SEQUENCE-START }
[51] Fix | Delete
# block_mapping: { BLOCK-MAPPING-START }
[52] Fix | Delete
# block_node_or_indentless_sequence: { ALIAS ANCHOR TAG SCALAR BLOCK-SEQUENCE-START BLOCK-MAPPING-START FLOW-SEQUENCE-START FLOW-MAPPING-START BLOCK-ENTRY }
[53] Fix | Delete
# indentless_sequence: { ENTRY }
[54] Fix | Delete
# flow_collection: { FLOW-SEQUENCE-START FLOW-MAPPING-START }
[55] Fix | Delete
# flow_sequence: { FLOW-SEQUENCE-START }
[56] Fix | Delete
# flow_mapping: { FLOW-MAPPING-START }
[57] Fix | Delete
# flow_sequence_entry: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START KEY }
[58] Fix | Delete
# flow_mapping_entry: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START KEY }
[59] Fix | Delete
[60] Fix | Delete
__all__ = ['Parser', 'ParserError']
[61] Fix | Delete
[62] Fix | Delete
from .error import MarkedYAMLError
[63] Fix | Delete
from .tokens import *
[64] Fix | Delete
from .events import *
[65] Fix | Delete
from .scanner import *
[66] Fix | Delete
[67] Fix | Delete
class ParserError(MarkedYAMLError):
[68] Fix | Delete
pass
[69] Fix | Delete
[70] Fix | Delete
class Parser:
[71] Fix | Delete
# Since writing a recursive-descendant parser is a straightforward task, we
[72] Fix | Delete
# do not give many comments here.
[73] Fix | Delete
[74] Fix | Delete
DEFAULT_TAGS = {
[75] Fix | Delete
'!': '!',
[76] Fix | Delete
'!!': 'tag:yaml.org,2002:',
[77] Fix | Delete
}
[78] Fix | Delete
[79] Fix | Delete
def __init__(self):
[80] Fix | Delete
self.current_event = None
[81] Fix | Delete
self.yaml_version = None
[82] Fix | Delete
self.tag_handles = {}
[83] Fix | Delete
self.states = []
[84] Fix | Delete
self.marks = []
[85] Fix | Delete
self.state = self.parse_stream_start
[86] Fix | Delete
[87] Fix | Delete
def dispose(self):
[88] Fix | Delete
# Reset the state attributes (to clear self-references)
[89] Fix | Delete
self.states = []
[90] Fix | Delete
self.state = None
[91] Fix | Delete
[92] Fix | Delete
def check_event(self, *choices):
[93] Fix | Delete
# Check the type of the next event.
[94] Fix | Delete
if self.current_event is None:
[95] Fix | Delete
if self.state:
[96] Fix | Delete
self.current_event = self.state()
[97] Fix | Delete
if self.current_event is not None:
[98] Fix | Delete
if not choices:
[99] Fix | Delete
return True
[100] Fix | Delete
for choice in choices:
[101] Fix | Delete
if isinstance(self.current_event, choice):
[102] Fix | Delete
return True
[103] Fix | Delete
return False
[104] Fix | Delete
[105] Fix | Delete
def peek_event(self):
[106] Fix | Delete
# Get the next event.
[107] Fix | Delete
if self.current_event is None:
[108] Fix | Delete
if self.state:
[109] Fix | Delete
self.current_event = self.state()
[110] Fix | Delete
return self.current_event
[111] Fix | Delete
[112] Fix | Delete
def get_event(self):
[113] Fix | Delete
# Get the next event and proceed further.
[114] Fix | Delete
if self.current_event is None:
[115] Fix | Delete
if self.state:
[116] Fix | Delete
self.current_event = self.state()
[117] Fix | Delete
value = self.current_event
[118] Fix | Delete
self.current_event = None
[119] Fix | Delete
return value
[120] Fix | Delete
[121] Fix | Delete
# stream ::= STREAM-START implicit_document? explicit_document* STREAM-END
[122] Fix | Delete
# implicit_document ::= block_node DOCUMENT-END*
[123] Fix | Delete
# explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END*
[124] Fix | Delete
[125] Fix | Delete
def parse_stream_start(self):
[126] Fix | Delete
[127] Fix | Delete
# Parse the stream start.
[128] Fix | Delete
token = self.get_token()
[129] Fix | Delete
event = StreamStartEvent(token.start_mark, token.end_mark,
[130] Fix | Delete
encoding=token.encoding)
[131] Fix | Delete
[132] Fix | Delete
# Prepare the next state.
[133] Fix | Delete
self.state = self.parse_implicit_document_start
[134] Fix | Delete
[135] Fix | Delete
return event
[136] Fix | Delete
[137] Fix | Delete
def parse_implicit_document_start(self):
[138] Fix | Delete
[139] Fix | Delete
# Parse an implicit document.
[140] Fix | Delete
if not self.check_token(DirectiveToken, DocumentStartToken,
[141] Fix | Delete
StreamEndToken):
[142] Fix | Delete
self.tag_handles = self.DEFAULT_TAGS
[143] Fix | Delete
token = self.peek_token()
[144] Fix | Delete
start_mark = end_mark = token.start_mark
[145] Fix | Delete
event = DocumentStartEvent(start_mark, end_mark,
[146] Fix | Delete
explicit=False)
[147] Fix | Delete
[148] Fix | Delete
# Prepare the next state.
[149] Fix | Delete
self.states.append(self.parse_document_end)
[150] Fix | Delete
self.state = self.parse_block_node
[151] Fix | Delete
[152] Fix | Delete
return event
[153] Fix | Delete
[154] Fix | Delete
else:
[155] Fix | Delete
return self.parse_document_start()
[156] Fix | Delete
[157] Fix | Delete
def parse_document_start(self):
[158] Fix | Delete
[159] Fix | Delete
# Parse any extra document end indicators.
[160] Fix | Delete
while self.check_token(DocumentEndToken):
[161] Fix | Delete
self.get_token()
[162] Fix | Delete
[163] Fix | Delete
# Parse an explicit document.
[164] Fix | Delete
if not self.check_token(StreamEndToken):
[165] Fix | Delete
token = self.peek_token()
[166] Fix | Delete
start_mark = token.start_mark
[167] Fix | Delete
version, tags = self.process_directives()
[168] Fix | Delete
if not self.check_token(DocumentStartToken):
[169] Fix | Delete
raise ParserError(None, None,
[170] Fix | Delete
"expected '<document start>', but found %r"
[171] Fix | Delete
% self.peek_token().id,
[172] Fix | Delete
self.peek_token().start_mark)
[173] Fix | Delete
token = self.get_token()
[174] Fix | Delete
end_mark = token.end_mark
[175] Fix | Delete
event = DocumentStartEvent(start_mark, end_mark,
[176] Fix | Delete
explicit=True, version=version, tags=tags)
[177] Fix | Delete
self.states.append(self.parse_document_end)
[178] Fix | Delete
self.state = self.parse_document_content
[179] Fix | Delete
else:
[180] Fix | Delete
# Parse the end of the stream.
[181] Fix | Delete
token = self.get_token()
[182] Fix | Delete
event = StreamEndEvent(token.start_mark, token.end_mark)
[183] Fix | Delete
assert not self.states
[184] Fix | Delete
assert not self.marks
[185] Fix | Delete
self.state = None
[186] Fix | Delete
return event
[187] Fix | Delete
[188] Fix | Delete
def parse_document_end(self):
[189] Fix | Delete
[190] Fix | Delete
# Parse the document end.
[191] Fix | Delete
token = self.peek_token()
[192] Fix | Delete
start_mark = end_mark = token.start_mark
[193] Fix | Delete
explicit = False
[194] Fix | Delete
if self.check_token(DocumentEndToken):
[195] Fix | Delete
token = self.get_token()
[196] Fix | Delete
end_mark = token.end_mark
[197] Fix | Delete
explicit = True
[198] Fix | Delete
event = DocumentEndEvent(start_mark, end_mark,
[199] Fix | Delete
explicit=explicit)
[200] Fix | Delete
[201] Fix | Delete
# Prepare the next state.
[202] Fix | Delete
self.state = self.parse_document_start
[203] Fix | Delete
[204] Fix | Delete
return event
[205] Fix | Delete
[206] Fix | Delete
def parse_document_content(self):
[207] Fix | Delete
if self.check_token(DirectiveToken,
[208] Fix | Delete
DocumentStartToken, DocumentEndToken, StreamEndToken):
[209] Fix | Delete
event = self.process_empty_scalar(self.peek_token().start_mark)
[210] Fix | Delete
self.state = self.states.pop()
[211] Fix | Delete
return event
[212] Fix | Delete
else:
[213] Fix | Delete
return self.parse_block_node()
[214] Fix | Delete
[215] Fix | Delete
def process_directives(self):
[216] Fix | Delete
self.yaml_version = None
[217] Fix | Delete
self.tag_handles = {}
[218] Fix | Delete
while self.check_token(DirectiveToken):
[219] Fix | Delete
token = self.get_token()
[220] Fix | Delete
if token.name == 'YAML':
[221] Fix | Delete
if self.yaml_version is not None:
[222] Fix | Delete
raise ParserError(None, None,
[223] Fix | Delete
"found duplicate YAML directive", token.start_mark)
[224] Fix | Delete
major, minor = token.value
[225] Fix | Delete
if major != 1:
[226] Fix | Delete
raise ParserError(None, None,
[227] Fix | Delete
"found incompatible YAML document (version 1.* is required)",
[228] Fix | Delete
token.start_mark)
[229] Fix | Delete
self.yaml_version = token.value
[230] Fix | Delete
elif token.name == 'TAG':
[231] Fix | Delete
handle, prefix = token.value
[232] Fix | Delete
if handle in self.tag_handles:
[233] Fix | Delete
raise ParserError(None, None,
[234] Fix | Delete
"duplicate tag handle %r" % handle,
[235] Fix | Delete
token.start_mark)
[236] Fix | Delete
self.tag_handles[handle] = prefix
[237] Fix | Delete
if self.tag_handles:
[238] Fix | Delete
value = self.yaml_version, self.tag_handles.copy()
[239] Fix | Delete
else:
[240] Fix | Delete
value = self.yaml_version, None
[241] Fix | Delete
for key in self.DEFAULT_TAGS:
[242] Fix | Delete
if key not in self.tag_handles:
[243] Fix | Delete
self.tag_handles[key] = self.DEFAULT_TAGS[key]
[244] Fix | Delete
return value
[245] Fix | Delete
[246] Fix | Delete
# block_node_or_indentless_sequence ::= ALIAS
[247] Fix | Delete
# | properties (block_content | indentless_block_sequence)?
[248] Fix | Delete
# | block_content
[249] Fix | Delete
# | indentless_block_sequence
[250] Fix | Delete
# block_node ::= ALIAS
[251] Fix | Delete
# | properties block_content?
[252] Fix | Delete
# | block_content
[253] Fix | Delete
# flow_node ::= ALIAS
[254] Fix | Delete
# | properties flow_content?
[255] Fix | Delete
# | flow_content
[256] Fix | Delete
# properties ::= TAG ANCHOR? | ANCHOR TAG?
[257] Fix | Delete
# block_content ::= block_collection | flow_collection | SCALAR
[258] Fix | Delete
# flow_content ::= flow_collection | SCALAR
[259] Fix | Delete
# block_collection ::= block_sequence | block_mapping
[260] Fix | Delete
# flow_collection ::= flow_sequence | flow_mapping
[261] Fix | Delete
[262] Fix | Delete
def parse_block_node(self):
[263] Fix | Delete
return self.parse_node(block=True)
[264] Fix | Delete
[265] Fix | Delete
def parse_flow_node(self):
[266] Fix | Delete
return self.parse_node()
[267] Fix | Delete
[268] Fix | Delete
def parse_block_node_or_indentless_sequence(self):
[269] Fix | Delete
return self.parse_node(block=True, indentless_sequence=True)
[270] Fix | Delete
[271] Fix | Delete
def parse_node(self, block=False, indentless_sequence=False):
[272] Fix | Delete
if self.check_token(AliasToken):
[273] Fix | Delete
token = self.get_token()
[274] Fix | Delete
event = AliasEvent(token.value, token.start_mark, token.end_mark)
[275] Fix | Delete
self.state = self.states.pop()
[276] Fix | Delete
else:
[277] Fix | Delete
anchor = None
[278] Fix | Delete
tag = None
[279] Fix | Delete
start_mark = end_mark = tag_mark = None
[280] Fix | Delete
if self.check_token(AnchorToken):
[281] Fix | Delete
token = self.get_token()
[282] Fix | Delete
start_mark = token.start_mark
[283] Fix | Delete
end_mark = token.end_mark
[284] Fix | Delete
anchor = token.value
[285] Fix | Delete
if self.check_token(TagToken):
[286] Fix | Delete
token = self.get_token()
[287] Fix | Delete
tag_mark = token.start_mark
[288] Fix | Delete
end_mark = token.end_mark
[289] Fix | Delete
tag = token.value
[290] Fix | Delete
elif self.check_token(TagToken):
[291] Fix | Delete
token = self.get_token()
[292] Fix | Delete
start_mark = tag_mark = token.start_mark
[293] Fix | Delete
end_mark = token.end_mark
[294] Fix | Delete
tag = token.value
[295] Fix | Delete
if self.check_token(AnchorToken):
[296] Fix | Delete
token = self.get_token()
[297] Fix | Delete
end_mark = token.end_mark
[298] Fix | Delete
anchor = token.value
[299] Fix | Delete
if tag is not None:
[300] Fix | Delete
handle, suffix = tag
[301] Fix | Delete
if handle is not None:
[302] Fix | Delete
if handle not in self.tag_handles:
[303] Fix | Delete
raise ParserError("while parsing a node", start_mark,
[304] Fix | Delete
"found undefined tag handle %r" % handle,
[305] Fix | Delete
tag_mark)
[306] Fix | Delete
tag = self.tag_handles[handle]+suffix
[307] Fix | Delete
else:
[308] Fix | Delete
tag = suffix
[309] Fix | Delete
#if tag == '!':
[310] Fix | Delete
# raise ParserError("while parsing a node", start_mark,
[311] Fix | Delete
# "found non-specific tag '!'", tag_mark,
[312] Fix | Delete
# "Please check 'http://pyyaml.org/wiki/YAMLNonSpecificTag' and share your opinion.")
[313] Fix | Delete
if start_mark is None:
[314] Fix | Delete
start_mark = end_mark = self.peek_token().start_mark
[315] Fix | Delete
event = None
[316] Fix | Delete
implicit = (tag is None or tag == '!')
[317] Fix | Delete
if indentless_sequence and self.check_token(BlockEntryToken):
[318] Fix | Delete
end_mark = self.peek_token().end_mark
[319] Fix | Delete
event = SequenceStartEvent(anchor, tag, implicit,
[320] Fix | Delete
start_mark, end_mark)
[321] Fix | Delete
self.state = self.parse_indentless_sequence_entry
[322] Fix | Delete
else:
[323] Fix | Delete
if self.check_token(ScalarToken):
[324] Fix | Delete
token = self.get_token()
[325] Fix | Delete
end_mark = token.end_mark
[326] Fix | Delete
if (token.plain and tag is None) or tag == '!':
[327] Fix | Delete
implicit = (True, False)
[328] Fix | Delete
elif tag is None:
[329] Fix | Delete
implicit = (False, True)
[330] Fix | Delete
else:
[331] Fix | Delete
implicit = (False, False)
[332] Fix | Delete
event = ScalarEvent(anchor, tag, implicit, token.value,
[333] Fix | Delete
start_mark, end_mark, style=token.style)
[334] Fix | Delete
self.state = self.states.pop()
[335] Fix | Delete
elif self.check_token(FlowSequenceStartToken):
[336] Fix | Delete
end_mark = self.peek_token().end_mark
[337] Fix | Delete
event = SequenceStartEvent(anchor, tag, implicit,
[338] Fix | Delete
start_mark, end_mark, flow_style=True)
[339] Fix | Delete
self.state = self.parse_flow_sequence_first_entry
[340] Fix | Delete
elif self.check_token(FlowMappingStartToken):
[341] Fix | Delete
end_mark = self.peek_token().end_mark
[342] Fix | Delete
event = MappingStartEvent(anchor, tag, implicit,
[343] Fix | Delete
start_mark, end_mark, flow_style=True)
[344] Fix | Delete
self.state = self.parse_flow_mapping_first_key
[345] Fix | Delete
elif block and self.check_token(BlockSequenceStartToken):
[346] Fix | Delete
end_mark = self.peek_token().start_mark
[347] Fix | Delete
event = SequenceStartEvent(anchor, tag, implicit,
[348] Fix | Delete
start_mark, end_mark, flow_style=False)
[349] Fix | Delete
self.state = self.parse_block_sequence_first_entry
[350] Fix | Delete
elif block and self.check_token(BlockMappingStartToken):
[351] Fix | Delete
end_mark = self.peek_token().start_mark
[352] Fix | Delete
event = MappingStartEvent(anchor, tag, implicit,
[353] Fix | Delete
start_mark, end_mark, flow_style=False)
[354] Fix | Delete
self.state = self.parse_block_mapping_first_key
[355] Fix | Delete
elif anchor is not None or tag is not None:
[356] Fix | Delete
# Empty scalars are allowed even if a tag or an anchor is
[357] Fix | Delete
# specified.
[358] Fix | Delete
event = ScalarEvent(anchor, tag, (implicit, False), '',
[359] Fix | Delete
start_mark, end_mark)
[360] Fix | Delete
self.state = self.states.pop()
[361] Fix | Delete
else:
[362] Fix | Delete
if block:
[363] Fix | Delete
node = 'block'
[364] Fix | Delete
else:
[365] Fix | Delete
node = 'flow'
[366] Fix | Delete
token = self.peek_token()
[367] Fix | Delete
raise ParserError("while parsing a %s node" % node, start_mark,
[368] Fix | Delete
"expected the node content, but found %r" % token.id,
[369] Fix | Delete
token.start_mark)
[370] Fix | Delete
return event
[371] Fix | Delete
[372] Fix | Delete
# block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)* BLOCK-END
[373] Fix | Delete
[374] Fix | Delete
def parse_block_sequence_first_entry(self):
[375] Fix | Delete
token = self.get_token()
[376] Fix | Delete
self.marks.append(token.start_mark)
[377] Fix | Delete
return self.parse_block_sequence_entry()
[378] Fix | Delete
[379] Fix | Delete
def parse_block_sequence_entry(self):
[380] Fix | Delete
if self.check_token(BlockEntryToken):
[381] Fix | Delete
token = self.get_token()
[382] Fix | Delete
if not self.check_token(BlockEntryToken, BlockEndToken):
[383] Fix | Delete
self.states.append(self.parse_block_sequence_entry)
[384] Fix | Delete
return self.parse_block_node()
[385] Fix | Delete
else:
[386] Fix | Delete
self.state = self.parse_block_sequence_entry
[387] Fix | Delete
return self.process_empty_scalar(token.end_mark)
[388] Fix | Delete
if not self.check_token(BlockEndToken):
[389] Fix | Delete
token = self.peek_token()
[390] Fix | Delete
raise ParserError("while parsing a block collection", self.marks[-1],
[391] Fix | Delete
"expected <block end>, but found %r" % token.id, token.start_mark)
[392] Fix | Delete
token = self.get_token()
[393] Fix | Delete
event = SequenceEndEvent(token.start_mark, token.end_mark)
[394] Fix | Delete
self.state = self.states.pop()
[395] Fix | Delete
self.marks.pop()
[396] Fix | Delete
return event
[397] Fix | Delete
[398] Fix | Delete
# indentless_sequence ::= (BLOCK-ENTRY block_node?)+
[399] Fix | Delete
[400] Fix | Delete
def parse_indentless_sequence_entry(self):
[401] Fix | Delete
if self.check_token(BlockEntryToken):
[402] Fix | Delete
token = self.get_token()
[403] Fix | Delete
if not self.check_token(BlockEntryToken,
[404] Fix | Delete
KeyToken, ValueToken, BlockEndToken):
[405] Fix | Delete
self.states.append(self.parse_indentless_sequence_entry)
[406] Fix | Delete
return self.parse_block_node()
[407] Fix | Delete
else:
[408] Fix | Delete
self.state = self.parse_indentless_sequence_entry
[409] Fix | Delete
return self.process_empty_scalar(token.end_mark)
[410] Fix | Delete
token = self.peek_token()
[411] Fix | Delete
event = SequenceEndEvent(token.start_mark, token.start_mark)
[412] Fix | Delete
self.state = self.states.pop()
[413] Fix | Delete
return event
[414] Fix | Delete
[415] Fix | Delete
# block_mapping ::= BLOCK-MAPPING_START
[416] Fix | Delete
# ((KEY block_node_or_indentless_sequence?)?
[417] Fix | Delete
# (VALUE block_node_or_indentless_sequence?)?)*
[418] Fix | Delete
# BLOCK-END
[419] Fix | Delete
[420] Fix | Delete
def parse_block_mapping_first_key(self):
[421] Fix | Delete
token = self.get_token()
[422] Fix | Delete
self.marks.append(token.start_mark)
[423] Fix | Delete
return self.parse_block_mapping_key()
[424] Fix | Delete
[425] Fix | Delete
def parse_block_mapping_key(self):
[426] Fix | Delete
if self.check_token(KeyToken):
[427] Fix | Delete
token = self.get_token()
[428] Fix | Delete
if not self.check_token(KeyToken, ValueToken, BlockEndToken):
[429] Fix | Delete
self.states.append(self.parse_block_mapping_value)
[430] Fix | Delete
return self.parse_block_node_or_indentless_sequence()
[431] Fix | Delete
else:
[432] Fix | Delete
self.state = self.parse_block_mapping_value
[433] Fix | Delete
return self.process_empty_scalar(token.end_mark)
[434] Fix | Delete
if not self.check_token(BlockEndToken):
[435] Fix | Delete
token = self.peek_token()
[436] Fix | Delete
raise ParserError("while parsing a block mapping", self.marks[-1],
[437] Fix | Delete
"expected <block end>, but found %r" % token.id, token.start_mark)
[438] Fix | Delete
token = self.get_token()
[439] Fix | Delete
event = MappingEndEvent(token.start_mark, token.end_mark)
[440] Fix | Delete
self.state = self.states.pop()
[441] Fix | Delete
self.marks.pop()
[442] Fix | Delete
return event
[443] Fix | Delete
[444] Fix | Delete
def parse_block_mapping_value(self):
[445] Fix | Delete
if self.check_token(ValueToken):
[446] Fix | Delete
token = self.get_token()
[447] Fix | Delete
if not self.check_token(KeyToken, ValueToken, BlockEndToken):
[448] Fix | Delete
self.states.append(self.parse_block_mapping_key)
[449] Fix | Delete
return self.parse_block_node_or_indentless_sequence()
[450] Fix | Delete
else:
[451] Fix | Delete
self.state = self.parse_block_mapping_key
[452] Fix | Delete
return self.process_empty_scalar(token.end_mark)
[453] Fix | Delete
else:
[454] Fix | Delete
self.state = self.parse_block_mapping_key
[455] Fix | Delete
token = self.peek_token()
[456] Fix | Delete
return self.process_empty_scalar(token.start_mark)
[457] Fix | Delete
[458] Fix | Delete
# flow_sequence ::= FLOW-SEQUENCE-START
[459] Fix | Delete
# (flow_sequence_entry FLOW-ENTRY)*
[460] Fix | Delete
# flow_sequence_entry?
[461] Fix | Delete
# FLOW-SEQUENCE-END
[462] Fix | Delete
# flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
[463] Fix | Delete
#
[464] Fix | Delete
# Note that while production rules for both flow_sequence_entry and
[465] Fix | Delete
# flow_mapping_entry are equal, their interpretations are different.
[466] Fix | Delete
# For `flow_sequence_entry`, the part `KEY flow_node? (VALUE flow_node?)?`
[467] Fix | Delete
# generate an inline mapping (set syntax).
[468] Fix | Delete
[469] Fix | Delete
def parse_flow_sequence_first_entry(self):
[470] Fix | Delete
token = self.get_token()
[471] Fix | Delete
self.marks.append(token.start_mark)
[472] Fix | Delete
return self.parse_flow_sequence_entry(first=True)
[473] Fix | Delete
[474] Fix | Delete
def parse_flow_sequence_entry(self, first=False):
[475] Fix | Delete
if not self.check_token(FlowSequenceEndToken):
[476] Fix | Delete
if not first:
[477] Fix | Delete
if self.check_token(FlowEntryToken):
[478] Fix | Delete
self.get_token()
[479] Fix | Delete
else:
[480] Fix | Delete
token = self.peek_token()
[481] Fix | Delete
raise ParserError("while parsing a flow sequence", self.marks[-1],
[482] Fix | Delete
"expected ',' or ']', but got %r" % token.id, token.start_mark)
[483] Fix | Delete
[484] Fix | Delete
if self.check_token(KeyToken):
[485] Fix | Delete
token = self.peek_token()
[486] Fix | Delete
event = MappingStartEvent(None, None, True,
[487] Fix | Delete
token.start_mark, token.end_mark,
[488] Fix | Delete
flow_style=True)
[489] Fix | Delete
self.state = self.parse_flow_sequence_entry_mapping_key
[490] Fix | Delete
return event
[491] Fix | Delete
elif not self.check_token(FlowSequenceEndToken):
[492] Fix | Delete
self.states.append(self.parse_flow_sequence_entry)
[493] Fix | Delete
return self.parse_flow_node()
[494] Fix | Delete
token = self.get_token()
[495] Fix | Delete
event = SequenceEndEvent(token.start_mark, token.end_mark)
[496] Fix | Delete
self.state = self.states.pop()
[497] Fix | Delete
self.marks.pop()
[498] Fix | Delete
return event
[499] Fix | Delete
12
It is recommended that you Edit text format, this type of Fix handles quite a lot in one request
Function