Changeset 350
- Timestamp:
- Mar 12, 2011 12:48:04 AM (14 years ago)
- Location:
- topdoc/src/TopDoc
- Files:
-
- 4 edited
Legend:
- Unmodified
- Added
- Removed
-
TabularUnified topdoc/src/TopDoc/EpicsDatabase.py ¶
r348 r350 50 50 self.filename = dbFilename 51 51 self.absolute_filename = os.path.abspath(dbFilename) 52 self. buf = utilities.tokenizeTextFile(dbFilename)52 self.tokenLog, self.buf = utilities.tokenizeTextFile(dbFilename) 53 53 self._parse() 54 54 … … 124 124 into the internal memory structure. 125 125 ''' 126 # p_tkn, tkn = self.tokenLog.next() 127 # p_tkn, tkn = self.tokenLog.next(p_tkn) 126 128 if self.buf is not None: 127 129 pvDict = {} -
TabularUnified topdoc/src/TopDoc/EpicsTemplate.py ¶
r343 r350 37 37 self.filename = templateFilename 38 38 self.absolute_filename = os.path.abspath(templateFilename) 39 self. buf = utilities.tokenizeTextFile(templateFilename)39 self.tokenLog, self.buf = utilities.tokenizeTextFile(templateFilename) 40 40 self._parse_() 41 41 … … 54 54 return db.getFull(macros) 55 55 56 def nextUsefulToken(self, ptr = -1):57 '''58 walk through the tokens and find the next actionable token59 @param ptr: current buffer pointer, integer [0 .. len(self.buf)-1]60 @return: tuple (ptr, self.buf[ptr]), where ptr points to an actionable token or (None, None)61 '''62 # TODO Should this be part of the TokenLog module?63 ptr += 164 while ptr < len(self.buf):65 tkn = self.buf[ptr]66 if tkn['tokName'] not in ('COMMENT', 'NEWLINE', 'ENDMARKER'):67 return ptr, tkn68 ptr += 169 return None, None70 71 56 def _parse_(self): 72 57 ''' 73 58 walk through the tokens and find the databases, then macros 74 59 ''' 75 p_tkn, tkn = self. nextUsefulToken()60 p_tkn, tkn = self.tokenLog.next() 76 61 while p_tkn is not None: 77 62 if tkn['tokName'] == 'NAME' and tkn['tokStr'] == 'file': 78 p_tkn, tkn = self. nextUsefulToken(p_tkn)63 p_tkn, tkn = self.tokenLog.next(p_tkn) 79 64 dbFile = utilities.strip_quotes( tkn['tokStr'] ) 80 65 # … … 83 68 # TODO macro expansion of dbFile 84 69 # 85 p_tkn, tkn = self. nextUsefulToken(p_tkn)70 p_tkn, tkn = self.tokenLog.next(p_tkn) 86 71 if tkn['tokStr'] != '{': 87 72 raise Exception, "problem token: " + str(tkn) 88 73 else: 89 p_tkn, tkn = self. nextUsefulToken(p_tkn)74 p_tkn, tkn = self.tokenLog.next(p_tkn) 90 75 if tkn['tokStr'] == 'pattern': 91 76 # get the macro names, then … … 98 83 raise Exception, "problem token: " + str(tkn) 99 84 # start processing the macro set(s) 100 p_tkn, tkn = self. nextUsefulToken(p_tkn)85 p_tkn, tkn = self.tokenLog.next(p_tkn) 101 86 102 87 def _gather_inline_macros_(self, dbFile, p_tkn): … … 113 98 buf = "" 114 99 while tkn['tokStr'] != '}': 115 p_tkn, tkn = self. nextUsefulToken(p_tkn)100 p_tkn, tkn = self.tokenLog.next(p_tkn) 116 101 s = tkn['tokStr'] 117 102 if s == '=': … … 126 111 # TODO do something with db 127 112 print dbFile, macros 128 p_tkn, tkn = self. nextUsefulToken(p_tkn)113 p_tkn, tkn = self.tokenLog.next(p_tkn) 129 114 return p_tkn, tkn 130 115 … … 139 124 tkn = self.buf[p_tkn] 140 125 keyList = [] 141 p_tkn, tkn = self. nextUsefulToken(p_tkn)126 p_tkn, tkn = self.tokenLog.next(p_tkn) 142 127 if tkn['tokStr'] != '{': 143 128 raise Exception, "problem token: " + str(tkn) 144 129 else: 145 p_tkn, tkn = self. nextUsefulToken(p_tkn)130 p_tkn, tkn = self.tokenLog.next(p_tkn) 146 131 # gather the macro keys in a list 147 132 while tkn['tokStr'] != '}': … … 152 137 if key != ',': 153 138 keyList.append( key ) 154 p_tkn, tkn = self. nextUsefulToken(p_tkn)155 p_tkn, tkn = self. nextUsefulToken(p_tkn)139 p_tkn, tkn = self.tokenLog.next(p_tkn) 140 p_tkn, tkn = self.tokenLog.next(p_tkn) 156 141 # loop through each set of macro definitions 157 142 while tkn['tokStr'] == '{': 158 p_tkn, tkn = self. nextUsefulToken(p_tkn)143 p_tkn, tkn = self.tokenLog.next(p_tkn) 159 144 buf = "" 160 145 liszt = [] … … 169 154 buf += tkn['tokStr'] 170 155 lastend = tkn['end'][1] 171 p_tkn, tkn = self. nextUsefulToken(p_tkn)156 p_tkn, tkn = self.tokenLog.next(p_tkn) 172 157 liszt.append( buf ) 173 158 if len(keyList) != len(liszt): … … 184 169 # TODO do something with db 185 170 print dbFile, macros 186 p_tkn, tkn = self. nextUsefulToken(p_tkn)171 p_tkn, tkn = self.tokenLog.next(p_tkn) 187 172 return p_tkn, tkn 188 173 -
TabularUnified topdoc/src/TopDoc/TokenLog.py ¶
r259 r350 169 169 return lines 170 170 171 def next(self, ptr = -1): 172 ''' 173 walk through the tokens and find the next actionable token 174 @param ptr: current buffer pointer, integer [0 .. len(self.tokenList)-1] 175 @return: tuple (ptr, self.tokenList[ptr]), where ptr points to an actionable token or (None, None) 176 ''' 177 ptr += 1 178 while ptr < len(self.tokenList): 179 tkn = self.tokenList[ptr] 180 if tkn['tokName'] not in ('COMMENT', 'NEWLINE', 'ENDMARKER'): 181 return ptr, tkn 182 ptr += 1 183 return None, None 184 171 185 172 186 ###################################################################### -
TabularUnified topdoc/src/TopDoc/utilities.py ¶
r338 r350 16 16 import re 17 17 import TokenLog 18 19 18 20 19 … … 128 127 ''' 129 128 @param filename: string name of file to be read 130 @return: structure from TokenLog.lineAnalysis()129 @return: tuple of (TokenLog object, structure from TokenLog.lineAnalysis()) 131 130 ''' 132 131 obj = TokenLog.TokenLog() 133 132 obj.processFile(filename) 134 return obj .getTokenList()133 return obj, obj.getTokenList() 135 134 136 135
Note: See TracChangeset
for help on using the changeset viewer.