[Python-checkins] CVS: python/dist/src/Lib shlex.py,1.5,1.6
Guido van Rossum
python-dev@python.org
Mon, 1 May 2000 16:08:49 -0400 (EDT)
Update of /projects/cvsroot/python/dist/src/Lib
In directory eric:/projects/python/develop/guido/src/Lib
Modified Files:
shlex.py
Log Message:
Eric Raymond:
Added and documented the capability for shlex to handle lexical-level
inclusion and a stack of input sources. Also, the input stream member
is now documented, and the constructor takes an optional source-filename.
The class provides facilities to generate error messages that track
file and line number.
[GvR: I changed the __main__ code so that it actually stops at EOF, as
Eric surely intended -- however it returned '' instead of the None he
was testing for.]
Index: shlex.py
===================================================================
RCS file: /projects/cvsroot/python/dist/src/Lib/shlex.py,v
retrieving revision 1.5
retrieving revision 1.6
diff -C2 -r1.5 -r1.6
*** shlex.py 2000年02月04日 15:28:40 1.5
--- shlex.py 2000年05月01日 20:08:46 1.6
***************
*** 2,5 ****
--- 2,6 ----
# Module and documentation by Eric S. Raymond, 21 Dec 1998
+ # Input stacking and error message cleanup added by ESR, March 2000
import sys
***************
*** 7,15 ****
class shlex:
"A lexical analyzer class for simple shell-like syntaxes."
! def __init__(self, instream=None):
if instream:
self.instream = instream
else:
self.instream = sys.stdin
self.commenters = '#'
self.wordchars = 'abcdfeghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_'
--- 8,18 ----
class shlex:
"A lexical analyzer class for simple shell-like syntaxes."
! def __init__(self, instream=None, infile=None):
if instream:
self.instream = instream
+ self.infile = infile
else:
self.instream = sys.stdin
+ self.infile = None
self.commenters = '#'
self.wordchars = 'abcdfeghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_'
***************
*** 21,39 ****
self.debug = 0
self.token = ''
def push_token(self, tok):
"Push a token onto the stack popped by the get_token method"
! if (self.debug >= 1):
! print "Pushing " + tok
self.pushback = [tok] + self.pushback;
def get_token(self):
! "Get a token from the input stream (or from stack if it's monempty)"
if self.pushback:
tok = self.pushback[0]
self.pushback = self.pushback[1:]
! if (self.debug >= 1):
! print "Popping " + tok
return tok
tok = ''
while 1:
--- 24,80 ----
self.debug = 0
self.token = ''
+ self.filestack = []
+ self.source = None
+ if self.debug:
+ print 'shlex: reading from %s, line %d' % (self.instream,self.lineno)
def push_token(self, tok):
"Push a token onto the stack popped by the get_token method"
! if self.debug >= 1:
! print "shlex: pushing token " + `tok`
self.pushback = [tok] + self.pushback;
def get_token(self):
! "Get a token from the input stream (or from stack if it's nonempty)"
if self.pushback:
tok = self.pushback[0]
self.pushback = self.pushback[1:]
! if self.debug >= 1:
! print "shlex: popping token " + `tok`
return tok
+ # No pushback. Get a token.
+ raw = self.read_token()
+ # Handle inclusions
+ while raw == self.source:
+ (newfile, newstream) = self.sourcehook(self.read_token())
+ self.filestack = [(self.infile,self.instream,self.lineno)] + self.filestack
+ self.infile = newfile
+ self.instream = newstream
+ self.lineno = 1
+ if self.debug:
+ print 'shlex: pushing to file %s' % (self.infile,)
+ raw = self.get_token()
+ # Maybe we got EOF instead?
+ while raw == "":
+ if len(self.filestack) == 0:
+ return ""
+ else:
+ self.instream.close()
+ (self.infile, self.instream, self.lineno) = self.filestack[0]
+ self.filestack = self.filestack[1:]
+ if self.debug:
+ print 'shlex: popping to %s, line %d' % (self.instream, self.lineno)
+ self.state = ' '
+ raw = self.get_token()
+ # Neither inclusion nor EOF
+ if self.debug >= 1:
+ if raw:
+ print "shlex: token=" + `raw`
+ else:
+ print "shlex: token=EOF"
+ return raw
+
+ def read_token(self):
+ "Read a token from the input stream (no pushback or inclusions)"
tok = ''
while 1:
***************
*** 42,48 ****
self.lineno = self.lineno + 1
if self.debug >= 3:
! print "In state " + repr(self.state) + " I see character: " + repr(nextchar)
if self.state == None:
! return ''
elif self.state == ' ':
if not nextchar:
--- 83,90 ----
self.lineno = self.lineno + 1
if self.debug >= 3:
! print "shlex: in state " + repr(self.state) + " I see character: " + repr(nextchar)
if self.state == None:
! self.token = ''; # past end of file
! break
elif self.state == ' ':
if not nextchar:
***************
*** 51,55 ****
elif nextchar in self.whitespace:
if self.debug >= 2:
! print "I see whitespace in whitespace state"
if self.token:
break # emit current token
--- 93,97 ----
elif nextchar in self.whitespace:
if self.debug >= 2:
! print "shlex: I see whitespace in whitespace state"
if self.token:
break # emit current token
***************
*** 82,86 ****
elif nextchar in self.whitespace:
if self.debug >= 2:
! print "I see whitespace in word state"
self.state = ' '
if self.token:
--- 124,128 ----
elif nextchar in self.whitespace:
if self.debug >= 2:
! print "shlex: I see whitespace in word state"
self.state = ' '
if self.token:
***************
*** 96,100 ****
self.pushback = [nextchar] + self.pushback
if self.debug >= 2:
! print "I see punctuation in word state"
self.state = ' '
if self.token:
--- 138,142 ----
self.pushback = [nextchar] + self.pushback
if self.debug >= 2:
! print "shlex: I see punctuation in word state"
self.state = ' '
if self.token:
***************
*** 102,112 ****
else:
continue
-
result = self.token
self.token = ''
! if self.debug >= 1:
! print "Token: " + result
return result
if __name__ == '__main__':
--- 144,162 ----
else:
continue
result = self.token
self.token = ''
! if self.debug > 1:
! if result:
! print "shlex: raw token=" + `result`
! else:
! print "shlex: raw token=EOF"
return result
+ def sourcehook(self, newfile):
+ "Hook called on a filename to be sourced."
+ if newfile[0] == '"':
+ newfile = newfile[1:-1]
+ return (newfile, open(newfile, "r"))
+
if __name__ == '__main__':
***************
*** 114,120 ****
while 1:
tt = lexer.get_token()
! if tt != None:
! print "Token: " + repr(tt)
! else:
break
--- 164,169 ----
while 1:
tt = lexer.get_token()
! print "Token: " + repr(tt)
! if not tt:
break