1 | # To maximize python3/python2 compatibility |
---|
2 | from __future__ import print_function |
---|
3 | from __future__ import unicode_literals |
---|
4 | from __future__ import division |
---|
5 | from __future__ import absolute_import |
---|
6 | |
---|
7 | from .StarFile import StarBlock,StarFile,StarList,StarDict |
---|
8 | from io import StringIO |
---|
9 | # An alternative specification for the Cif Parser, based on Yapps2 |
---|
10 | # by Amit Patel (http://theory.stanford.edu/~amitp/Yapps) |
---|
11 | # |
---|
12 | # helper code: we define our match tokens |
---|
13 | lastval = '' |
---|
14 | def monitor(location,value): |
---|
15 | global lastval |
---|
16 | #print 'At %s: %s' % (location,repr(value)) |
---|
17 | lastval = repr(value) |
---|
18 | return value |
---|
19 | |
---|
20 | # Strip extras gets rid of leading and trailing whitespace, and |
---|
21 | # semicolons. |
---|
22 | def stripextras(value): |
---|
23 | from .StarFile import remove_line_folding, remove_line_prefix |
---|
24 | # we get rid of semicolons and leading/trailing terminators etc. |
---|
25 | import re |
---|
26 | jj = re.compile("[\n\r\f \t\v]*") |
---|
27 | semis = re.compile("[\n\r\f \t\v]*[\n\r\f]\n*;") |
---|
28 | cut = semis.match(value) |
---|
29 | if cut: #we have a semicolon-delimited string |
---|
30 | nv = value[cut.end():len(value)-2] |
---|
31 | try: |
---|
32 | if nv[-1]=='\r': nv = nv[:-1] |
---|
33 | except IndexError: #empty data value |
---|
34 | pass |
---|
35 | # apply protocols |
---|
36 | nv = remove_line_prefix(nv) |
---|
37 | nv = remove_line_folding(nv) |
---|
38 | return nv |
---|
39 | else: |
---|
40 | cut = jj.match(value) |
---|
41 | if cut: |
---|
42 | return stripstring(value[cut.end():]) |
---|
43 | return value |
---|
44 | |
---|
45 | # helper function to get rid of inverted commas etc. |
---|
46 | |
---|
47 | def stripstring(value): |
---|
48 | if value: |
---|
49 | if value[0]== '\'' and value[-1]=='\'': |
---|
50 | return value[1:-1] |
---|
51 | if value[0]=='"' and value[-1]=='"': |
---|
52 | return value[1:-1] |
---|
53 | return value |
---|
54 | |
---|
55 | # helper function to get rid of triple quotes |
---|
56 | def striptriple(value): |
---|
57 | if value: |
---|
58 | if value[:3] == '"""' and value[-3:] == '"""': |
---|
59 | return value[3:-3] |
---|
60 | if value[:3] == "'''" and value[-3:] == "'''": |
---|
61 | return value[3:-3] |
---|
62 | return value |
---|
63 | |
---|
64 | # helper function to populate a StarBlock given a list of names |
---|
65 | # and values . |
---|
66 | # |
---|
67 | # Note that there may be an empty list at the very end of our itemlists, |
---|
68 | # so we remove that if necessary. |
---|
69 | # |
---|
70 | |
---|
71 | def makeloop(target_block,loopdata): |
---|
72 | loop_seq,itemlists = loopdata |
---|
73 | if itemlists[-1] == []: itemlists.pop(-1) |
---|
74 | # print 'Making loop with %s' % repr(itemlists) |
---|
75 | step_size = len(loop_seq) |
---|
76 | for col_no in range(step_size): |
---|
77 | target_block.AddItem(loop_seq[col_no], itemlists[col_no::step_size],precheck=True) |
---|
78 | # print 'Makeloop constructed %s' % repr(loopstructure) |
---|
79 | # now construct the loop |
---|
80 | try: |
---|
81 | target_block.CreateLoop(loop_seq) #will raise ValueError on problem |
---|
82 | except ValueError: |
---|
83 | error_string = 'Incorrect number of loop values for loop containing %s' % repr(loop_seq) |
---|
84 | print(error_string, file=sys.stderr) |
---|
85 | raise ValueError(error_string) |
---|
86 | |
---|
87 | # return an object with the appropriate amount of nesting |
---|
88 | def make_empty(nestlevel): |
---|
89 | gd = [] |
---|
90 | for i in range(1,nestlevel): |
---|
91 | gd = [gd] |
---|
92 | return gd |
---|
93 | |
---|
94 | # this function updates a dictionary first checking for name collisions, |
---|
95 | # which imply that the CIF is invalid. We need case insensitivity for |
---|
96 | # names. |
---|
97 | |
---|
98 | # Unfortunately we cannot check loop item contents against non-loop contents |
---|
99 | # in a non-messy way during parsing, as we may not have easy access to previous |
---|
100 | # key value pairs in the context of our call (unlike our built-in access to all |
---|
101 | # previous loops). |
---|
102 | # For this reason, we don't waste time checking looped items against non-looped |
---|
103 | # names during parsing of a data block. This would only match a subset of the |
---|
104 | # final items. We do check against ordinary items, however. |
---|
105 | # |
---|
106 | # Note the following situations: |
---|
107 | # (1) new_dict is empty -> we have just added a loop; do no checking |
---|
108 | # (2) new_dict is not empty -> we have some new key-value pairs |
---|
109 | # |
---|
110 | def cif_update(old_dict,new_dict,loops): |
---|
111 | old_keys = map(lambda a:a.lower(),old_dict.keys()) |
---|
112 | if new_dict != {}: # otherwise we have a new loop |
---|
113 | #print 'Comparing %s to %s' % (repr(old_keys),repr(new_dict.keys())) |
---|
114 | for new_key in new_dict.keys(): |
---|
115 | if new_key.lower() in old_keys: |
---|
116 | raise CifError("Duplicate dataname or blockname %s in input file" % new_key) |
---|
117 | old_dict[new_key] = new_dict[new_key] |
---|
118 | # |
---|
119 | # this takes two lines, so we couldn't fit it into a one line execution statement... |
---|
120 | def order_update(order_array,new_name): |
---|
121 | order_array.append(new_name) |
---|
122 | return new_name |
---|
123 | |
---|
124 | # and finally...turn a sequence into a python dict (thanks to Stackoverflow) |
---|
125 | def pairwise(iterable): |
---|
126 | it = iter(iterable) |
---|
127 | while 1: |
---|
128 | yield next(it), next(it) |
---|
129 | |
---|
130 | |
---|
131 | # Begin -- grammar generated by Yapps |
---|
132 | import sys, re |
---|
133 | from . import yapps3_compiled_rt as yappsrt |
---|
134 | |
---|
135 | class StarParserScanner(yappsrt.Scanner): |
---|
136 | def __init__(self, *args,**kwargs): |
---|
137 | patterns = [ |
---|
138 | ('":"', ':'), |
---|
139 | ('([ \t\n\r](?!;))|[ \t]', '([ \t\n\r](?!;))|[ \t]'), |
---|
140 | ('(#.*[\n\r](?!;))|(#.*)', '(#.*[\n\r](?!;))|(#.*)'), |
---|
141 | ('LBLOCK', '(L|l)(O|o)(O|o)(P|p)_'), |
---|
142 | ('GLOBAL', '(G|g)(L|l)(O|o)(B|b)(A|a)(L|l)_'), |
---|
143 | ('STOP', '(S|s)(T|t)(O|o)(P|p)_'), |
---|
144 | ('save_heading', u'(S|s)(A|a)(V|v)(E|e)_[][!%&\\(\\)*+,./:<=>?@0-9A-Za-z\\\\^`{}\\|~"#$\';_\xa0-\ud7ff\ue000-\ufdcf\ufdf0-\ufffd\U00010000-\U0001fffd\U00020000-\U0002fffd\U00030000-\U0003fffd\U00040000-\U0004fffd\U00050000-\U0005fffd\U00060000-\U0006fffd\U00070000-\U0007fffd\U00080000-\U0008fffd\U00090000-\U0009fffd\U000a0000-\U000afffd\U000b0000-\U000bfffd\U000c0000-\U000cfffd\U000d0000-\U000dfffd\U000e0000-\U000efffd\U000f0000-\U000ffffd\U00100000-\U0010fffd-]+'), |
---|
145 | ('save_end', '(S|s)(A|a)(V|v)(E|e)_'), |
---|
146 | ('data_name', u'_[][!%&\\(\\)*+,./:<=>?@0-9A-Za-z\\\\^`{}\\|~"#$\';_\xa0-\ud7ff\ue000-\ufdcf\ufdf0-\ufffd\U00010000-\U0001fffd\U00020000-\U0002fffd\U00030000-\U0003fffd\U00040000-\U0004fffd\U00050000-\U0005fffd\U00060000-\U0006fffd\U00070000-\U0007fffd\U00080000-\U0008fffd\U00090000-\U0009fffd\U000a0000-\U000afffd\U000b0000-\U000bfffd\U000c0000-\U000cfffd\U000d0000-\U000dfffd\U000e0000-\U000efffd\U000f0000-\U000ffffd\U00100000-\U0010fffd-]+'), |
---|
147 | ('data_heading', u'(D|d)(A|a)(T|t)(A|a)_[][!%&\\(\\)*+,./:<=>?@0-9A-Za-z\\\\^`{}\\|~"#$\';_\xa0-\ud7ff\ue000-\ufdcf\ufdf0-\ufffd\U00010000-\U0001fffd\U00020000-\U0002fffd\U00030000-\U0003fffd\U00040000-\U0004fffd\U00050000-\U0005fffd\U00060000-\U0006fffd\U00070000-\U0007fffd\U00080000-\U0008fffd\U00090000-\U0009fffd\U000a0000-\U000afffd\U000b0000-\U000bfffd\U000c0000-\U000cfffd\U000d0000-\U000dfffd\U000e0000-\U000efffd\U000f0000-\U000ffffd\U00100000-\U0010fffd-]+'), |
---|
148 | ('start_sc_line', '(\n|\r\n);([^\n\r])*(\r\n|\r|\n)+'), |
---|
149 | ('sc_line_of_text', '[^;\r\n]([^\r\n])*(\r\n|\r|\n)+'), |
---|
150 | ('end_sc_line', ';'), |
---|
151 | ('c_c_b', '\\}'), |
---|
152 | ('o_c_b', '\\{'), |
---|
153 | ('c_s_b', '\\]'), |
---|
154 | ('o_s_b', '\\['), |
---|
155 | ('dat_val_internal_sq', '\\[([^\\s\\[\\]]*)\\]'), |
---|
156 | ('triple_quote_data_value', '(?s)\'\'\'.*?\'\'\'|""".*?"""'), |
---|
157 | ('single_quote_data_value', '\'([^\n\r\x0c\'])*\'+|"([^\n\r"])*"+'), |
---|
158 | ('data_value_1', '((?!(((S|s)(A|a)(V|v)(E|e)_[^\\s]*)|((G|g)(L|l)(O|o)(B|b)(A|a)(L|l)_[^\\s]*)|((S|s)(T|t)(O|o)(P|p)_[^\\s]*)|((D|d)(A|a)(T|t)(A|a)_[^\\s]*)))[^\\s"#$\'_\\{\\}\\[\\]][^\\s\\{\\}\\[\\]]*)'), |
---|
159 | ('END', '$'), |
---|
160 | ] |
---|
161 | yappsrt.Scanner.__init__(self,patterns,['([ \t\n\r](?!;))|[ \t]', '(#.*[\n\r](?!;))|(#.*)'],*args,**kwargs) |
---|
162 | |
---|
163 | class StarParser(yappsrt.Parser): |
---|
164 | Context = yappsrt.Context |
---|
165 | def input(self, prepared, _parent=None): |
---|
166 | _context = self.Context(_parent, self._scanner, self._pos, 'input', [prepared]) |
---|
167 | _token = self._peek('END', 'data_heading') |
---|
168 | if _token == 'data_heading': |
---|
169 | dblock = self.dblock(prepared, _context) |
---|
170 | allblocks = prepared; allblocks.merge_fast(dblock) |
---|
171 | while self._peek('END', 'data_heading') == 'data_heading': |
---|
172 | dblock = self.dblock(prepared, _context) |
---|
173 | allblocks.merge_fast(dblock) |
---|
174 | if self._peek() not in ['END', 'data_heading']: |
---|
175 | raise yappsrt.SyntaxError(charpos=self._scanner.get_prev_char_pos(), context=_context, msg='Need one of ' + ', '.join(['END', 'data_heading'])) |
---|
176 | END = self._scan('END') |
---|
177 | else: # == 'END' |
---|
178 | END = self._scan('END') |
---|
179 | allblocks = prepared |
---|
180 | return allblocks |
---|
181 | |
---|
182 | def dblock(self, prepared, _parent=None): |
---|
183 | _context = self.Context(_parent, self._scanner, self._pos, 'dblock', [prepared]) |
---|
184 | data_heading = self._scan('data_heading') |
---|
185 | heading = data_heading[5:];thisbc=StarFile(characterset='unicode',standard=prepared.standard);act_heading = thisbc.NewBlock(heading,StarBlock(overwrite=False));stored_block = thisbc[act_heading] |
---|
186 | while self._peek('save_heading', 'LBLOCK', 'data_name', 'save_end', 'END', 'data_heading') in ['save_heading', 'LBLOCK', 'data_name']: |
---|
187 | _token = self._peek('save_heading', 'LBLOCK', 'data_name') |
---|
188 | if _token != 'save_heading': |
---|
189 | dataseq = self.dataseq(stored_block, _context) |
---|
190 | else: # == 'save_heading' |
---|
191 | save_frame = self.save_frame(_context) |
---|
192 | thisbc.merge_fast(save_frame,parent=stored_block) |
---|
193 | if self._peek() not in ['save_heading', 'LBLOCK', 'data_name', 'save_end', 'END', 'data_heading']: |
---|
194 | raise yappsrt.SyntaxError(charpos=self._scanner.get_prev_char_pos(), context=_context, msg='Need one of ' + ', '.join(['save_heading', 'LBLOCK', 'data_name', 'save_end', 'END', 'data_heading'])) |
---|
195 | stored_block.setmaxnamelength(stored_block.maxnamelength);return (monitor('dblock',thisbc)) |
---|
196 | |
---|
197 | def dataseq(self, starblock, _parent=None): |
---|
198 | _context = self.Context(_parent, self._scanner, self._pos, 'dataseq', [starblock]) |
---|
199 | data = self.data(starblock, _context) |
---|
200 | while self._peek('LBLOCK', 'data_name', 'save_heading', 'save_end', 'END', 'data_heading') in ['LBLOCK', 'data_name']: |
---|
201 | data = self.data(starblock, _context) |
---|
202 | if self._peek() not in ['LBLOCK', 'data_name', 'save_heading', 'save_end', 'END', 'data_heading']: |
---|
203 | raise yappsrt.SyntaxError(charpos=self._scanner.get_prev_char_pos(), context=_context, msg='Need one of ' + ', '.join(['LBLOCK', 'data_name', 'save_heading', 'save_end', 'END', 'data_heading'])) |
---|
204 | |
---|
205 | def data(self, currentblock, _parent=None): |
---|
206 | _context = self.Context(_parent, self._scanner, self._pos, 'data', [currentblock]) |
---|
207 | _token = self._peek('LBLOCK', 'data_name') |
---|
208 | if _token == 'LBLOCK': |
---|
209 | top_loop = self.top_loop(_context) |
---|
210 | makeloop(currentblock,top_loop) |
---|
211 | else: # == 'data_name' |
---|
212 | datakvpair = self.datakvpair(_context) |
---|
213 | currentblock.AddItem(datakvpair[0],datakvpair[1],precheck=False) |
---|
214 | |
---|
215 | def datakvpair(self, _parent=None): |
---|
216 | _context = self.Context(_parent, self._scanner, self._pos, 'datakvpair', []) |
---|
217 | data_name = self._scan('data_name') |
---|
218 | data_value = self.data_value(_context) |
---|
219 | return [data_name,data_value] |
---|
220 | |
---|
221 | def data_value(self, _parent=None): |
---|
222 | _context = self.Context(_parent, self._scanner, self._pos, 'data_value', []) |
---|
223 | _token = self._peek('data_value_1', 'triple_quote_data_value', 'single_quote_data_value', 'start_sc_line', 'o_s_b', 'o_c_b') |
---|
224 | if _token == 'data_value_1': |
---|
225 | data_value_1 = self._scan('data_value_1') |
---|
226 | thisval = data_value_1 |
---|
227 | elif _token not in ['start_sc_line', 'o_s_b', 'o_c_b']: |
---|
228 | delimited_data_value = self.delimited_data_value(_context) |
---|
229 | thisval = delimited_data_value |
---|
230 | elif _token == 'start_sc_line': |
---|
231 | sc_lines_of_text = self.sc_lines_of_text(_context) |
---|
232 | thisval = stripextras(sc_lines_of_text) |
---|
233 | else: # in ['o_s_b', 'o_c_b'] |
---|
234 | bracket_expression = self.bracket_expression(_context) |
---|
235 | thisval = bracket_expression |
---|
236 | return monitor('data_value',thisval) |
---|
237 | |
---|
238 | def delimited_data_value(self, _parent=None): |
---|
239 | _context = self.Context(_parent, self._scanner, self._pos, 'delimited_data_value', []) |
---|
240 | _token = self._peek('triple_quote_data_value', 'single_quote_data_value') |
---|
241 | if _token == 'triple_quote_data_value': |
---|
242 | triple_quote_data_value = self._scan('triple_quote_data_value') |
---|
243 | thisval = striptriple(triple_quote_data_value) |
---|
244 | else: # == 'single_quote_data_value' |
---|
245 | single_quote_data_value = self._scan('single_quote_data_value') |
---|
246 | thisval = stripstring(single_quote_data_value) |
---|
247 | return thisval |
---|
248 | |
---|
249 | def sc_lines_of_text(self, _parent=None): |
---|
250 | _context = self.Context(_parent, self._scanner, self._pos, 'sc_lines_of_text', []) |
---|
251 | start_sc_line = self._scan('start_sc_line') |
---|
252 | lines = StringIO();lines.write(start_sc_line) |
---|
253 | while self._peek('end_sc_line', 'sc_line_of_text') == 'sc_line_of_text': |
---|
254 | sc_line_of_text = self._scan('sc_line_of_text') |
---|
255 | lines.write(sc_line_of_text) |
---|
256 | if self._peek() not in ['end_sc_line', 'sc_line_of_text']: |
---|
257 | raise yappsrt.SyntaxError(charpos=self._scanner.get_prev_char_pos(), context=_context, msg='Need one of ' + ', '.join(['sc_line_of_text', 'end_sc_line'])) |
---|
258 | end_sc_line = self._scan('end_sc_line') |
---|
259 | lines.write(end_sc_line);return monitor('sc_line_of_text',lines.getvalue()) |
---|
260 | |
---|
261 | def bracket_expression(self, _parent=None): |
---|
262 | _context = self.Context(_parent, self._scanner, self._pos, 'bracket_expression', []) |
---|
263 | _token = self._peek('o_s_b', 'o_c_b') |
---|
264 | if _token == 'o_s_b': |
---|
265 | square_bracket_expr = self.square_bracket_expr(_context) |
---|
266 | return square_bracket_expr |
---|
267 | else: # == 'o_c_b' |
---|
268 | curly_bracket_expr = self.curly_bracket_expr(_context) |
---|
269 | return curly_bracket_expr |
---|
270 | |
---|
271 | def top_loop(self, _parent=None): |
---|
272 | _context = self.Context(_parent, self._scanner, self._pos, 'top_loop', []) |
---|
273 | LBLOCK = self._scan('LBLOCK') |
---|
274 | loopfield = self.loopfield(_context) |
---|
275 | loopvalues = self.loopvalues(_context) |
---|
276 | return loopfield,loopvalues |
---|
277 | |
---|
278 | def loopfield(self, _parent=None): |
---|
279 | _context = self.Context(_parent, self._scanner, self._pos, 'loopfield', []) |
---|
280 | loop_seq=[] |
---|
281 | while self._peek('data_name', 'data_value_1', 'triple_quote_data_value', 'single_quote_data_value', 'start_sc_line', 'o_s_b', 'o_c_b') == 'data_name': |
---|
282 | data_name = self._scan('data_name') |
---|
283 | loop_seq.append(data_name) |
---|
284 | if self._peek() not in ['data_name', 'data_value_1', 'triple_quote_data_value', 'single_quote_data_value', 'start_sc_line', 'o_s_b', 'o_c_b']: |
---|
285 | raise yappsrt.SyntaxError(charpos=self._scanner.get_prev_char_pos(), context=_context, msg='Need one of ' + ', '.join(['data_name', 'data_value_1', 'triple_quote_data_value', 'single_quote_data_value', 'start_sc_line', 'o_s_b', 'o_c_b'])) |
---|
286 | return loop_seq |
---|
287 | |
---|
288 | def loopvalues(self, _parent=None): |
---|
289 | _context = self.Context(_parent, self._scanner, self._pos, 'loopvalues', []) |
---|
290 | data_value = self.data_value(_context) |
---|
291 | dataloop=[data_value] |
---|
292 | while self._peek('data_value_1', 'triple_quote_data_value', 'single_quote_data_value', 'start_sc_line', 'o_s_b', 'o_c_b', 'LBLOCK', 'data_name', 'save_heading', 'save_end', 'END', 'data_heading') in ['data_value_1', 'triple_quote_data_value', 'single_quote_data_value', 'start_sc_line', 'o_s_b', 'o_c_b']: |
---|
293 | data_value = self.data_value(_context) |
---|
294 | dataloop.append(monitor('loopval',data_value)) |
---|
295 | if self._peek() not in ['data_value_1', 'triple_quote_data_value', 'single_quote_data_value', 'start_sc_line', 'o_s_b', 'o_c_b', 'LBLOCK', 'data_name', 'save_heading', 'save_end', 'END', 'data_heading']: |
---|
296 | raise yappsrt.SyntaxError(charpos=self._scanner.get_prev_char_pos(), context=_context, msg='Need one of ' + ', '.join(['data_value_1', 'triple_quote_data_value', 'single_quote_data_value', 'start_sc_line', 'o_s_b', 'o_c_b', 'LBLOCK', 'data_name', 'save_heading', 'save_end', 'END', 'data_heading'])) |
---|
297 | return dataloop |
---|
298 | |
---|
299 | def save_frame(self, _parent=None): |
---|
300 | _context = self.Context(_parent, self._scanner, self._pos, 'save_frame', []) |
---|
301 | save_heading = self._scan('save_heading') |
---|
302 | savehead = save_heading[5:];savebc = StarFile();newname = savebc.NewBlock(savehead,StarBlock(overwrite=False));stored_block = savebc[newname] |
---|
303 | while self._peek('save_end', 'save_heading', 'LBLOCK', 'data_name', 'END', 'data_heading') in ['save_heading', 'LBLOCK', 'data_name']: |
---|
304 | _token = self._peek('save_heading', 'LBLOCK', 'data_name') |
---|
305 | if _token != 'save_heading': |
---|
306 | dataseq = self.dataseq(savebc[savehead], _context) |
---|
307 | else: # == 'save_heading' |
---|
308 | save_frame = self.save_frame(_context) |
---|
309 | savebc.merge_fast(save_frame,parent=stored_block) |
---|
310 | if self._peek() not in ['save_end', 'save_heading', 'LBLOCK', 'data_name', 'END', 'data_heading']: |
---|
311 | raise yappsrt.SyntaxError(charpos=self._scanner.get_prev_char_pos(), context=_context, msg='Need one of ' + ', '.join(['save_end', 'save_heading', 'LBLOCK', 'data_name', 'END', 'data_heading'])) |
---|
312 | save_end = self._scan('save_end') |
---|
313 | return monitor('save_frame',savebc) |
---|
314 | |
---|
315 | def square_bracket_expr(self, _parent=None): |
---|
316 | _context = self.Context(_parent, self._scanner, self._pos, 'square_bracket_expr', []) |
---|
317 | o_s_b = self._scan('o_s_b') |
---|
318 | this_list = [] |
---|
319 | while self._peek('c_s_b', 'data_value_1', 'triple_quote_data_value', 'single_quote_data_value', 'start_sc_line', 'o_s_b', 'o_c_b') != 'c_s_b': |
---|
320 | data_value = self.data_value(_context) |
---|
321 | this_list.append(data_value) |
---|
322 | while self._peek('data_value_1', 'triple_quote_data_value', 'single_quote_data_value', 'start_sc_line', 'c_s_b', 'o_s_b', 'o_c_b') != 'c_s_b': |
---|
323 | data_value = self.data_value(_context) |
---|
324 | this_list.append(data_value) |
---|
325 | if self._peek() not in ['data_value_1', 'triple_quote_data_value', 'single_quote_data_value', 'start_sc_line', 'c_s_b', 'o_s_b', 'o_c_b']: |
---|
326 | raise yappsrt.SyntaxError(charpos=self._scanner.get_prev_char_pos(), context=_context, msg='Need one of ' + ', '.join(['data_value_1', 'triple_quote_data_value', 'single_quote_data_value', 'start_sc_line', 'o_s_b', 'o_c_b', 'c_s_b'])) |
---|
327 | if self._peek() not in ['c_s_b', 'data_value_1', 'triple_quote_data_value', 'single_quote_data_value', 'start_sc_line', 'o_s_b', 'o_c_b']: |
---|
328 | raise yappsrt.SyntaxError(charpos=self._scanner.get_prev_char_pos(), context=_context, msg='Need one of ' + ', '.join(['data_value_1', 'c_s_b', 'triple_quote_data_value', 'single_quote_data_value', 'start_sc_line', 'o_s_b', 'o_c_b'])) |
---|
329 | c_s_b = self._scan('c_s_b') |
---|
330 | return StarList(this_list) |
---|
331 | |
---|
332 | def curly_bracket_expr(self, _parent=None): |
---|
333 | _context = self.Context(_parent, self._scanner, self._pos, 'curly_bracket_expr', []) |
---|
334 | o_c_b = self._scan('o_c_b') |
---|
335 | table_as_list = [] |
---|
336 | while self._peek('c_c_b', 'triple_quote_data_value', 'single_quote_data_value') != 'c_c_b': |
---|
337 | delimited_data_value = self.delimited_data_value(_context) |
---|
338 | table_as_list = [delimited_data_value] |
---|
339 | self._scan('":"') |
---|
340 | data_value = self.data_value(_context) |
---|
341 | table_as_list.append(data_value) |
---|
342 | while self._peek('triple_quote_data_value', 'single_quote_data_value', 'c_c_b') != 'c_c_b': |
---|
343 | delimited_data_value = self.delimited_data_value(_context) |
---|
344 | table_as_list.append(delimited_data_value) |
---|
345 | self._scan('":"') |
---|
346 | data_value = self.data_value(_context) |
---|
347 | table_as_list.append(data_value) |
---|
348 | if self._peek() not in ['triple_quote_data_value', 'single_quote_data_value', 'c_c_b']: |
---|
349 | raise yappsrt.SyntaxError(charpos=self._scanner.get_prev_char_pos(), context=_context, msg='Need one of ' + ', '.join(['triple_quote_data_value', 'single_quote_data_value', 'c_c_b'])) |
---|
350 | if self._peek() not in ['c_c_b', 'triple_quote_data_value', 'single_quote_data_value']: |
---|
351 | raise yappsrt.SyntaxError(charpos=self._scanner.get_prev_char_pos(), context=_context, msg='Need one of ' + ', '.join(['triple_quote_data_value', 'single_quote_data_value', 'c_c_b'])) |
---|
352 | c_c_b = self._scan('c_c_b') |
---|
353 | return StarDict(pairwise(table_as_list)) |
---|
354 | |
---|
355 | |
---|
356 | def parse(rule, text): |
---|
357 | P = StarParser(StarParserScanner(text)) |
---|
358 | return yappsrt.wrap_error_reporter(P, rule) |
---|
359 | |
---|
360 | # End -- grammar generated by Yapps |
---|
361 | |
---|
362 | |
---|