Adding shlex and netrc by Eric Raymond.
This commit is contained in:
parent
5e97c9dff9
commit
9c30c24bc2
93
Lib/netrc.py
Normal file
93
Lib/netrc.py
Normal file
@ -0,0 +1,93 @@
|
|||||||
|
#!/usr/bin/python
|
||||||
|
# Module and documentation by Eric S. Raymond, 21 Dec 1998
|
||||||
|
|
||||||
|
import sys, os, string, shlex
|
||||||
|
|
||||||
|
class netrc:
|
||||||
|
def __init__(self, file=None):
|
||||||
|
if not file:
|
||||||
|
file = os.path.join(os.environ['HOME'], ".netrc")
|
||||||
|
try:
|
||||||
|
fp = open(file)
|
||||||
|
except:
|
||||||
|
return None
|
||||||
|
self.hosts = {}
|
||||||
|
self.macros = {}
|
||||||
|
lexer = shlex.shlex(fp)
|
||||||
|
lexer.wordchars = lexer.wordchars + '.'
|
||||||
|
while 1:
|
||||||
|
# Look for a machine, default, or macdef top-level keyword
|
||||||
|
toplevel = tt = lexer.get_token()
|
||||||
|
if tt == '' or tt == None:
|
||||||
|
break
|
||||||
|
elif tt == 'machine':
|
||||||
|
entryname = lexer.get_token()
|
||||||
|
elif tt == 'default':
|
||||||
|
entryname = 'default'
|
||||||
|
elif tt == 'macdef': # Just skip to end of macdefs
|
||||||
|
entryname = lexer.get_token()
|
||||||
|
self.macros[entryname] = []
|
||||||
|
lexer.whitepace = ' \t'
|
||||||
|
while 1:
|
||||||
|
line = lexer.instream.readline()
|
||||||
|
if not line or line == '\012' and tt == '\012':
|
||||||
|
lexer.whitepace = ' \t\r\n'
|
||||||
|
break
|
||||||
|
tt = line
|
||||||
|
self.macros[entryname].append(line)
|
||||||
|
else:
|
||||||
|
raise SyntaxError, "bad toplevel token %s, file %s, line %d" \
|
||||||
|
% (tt, file, lexer.lineno)
|
||||||
|
|
||||||
|
# We're looking at start of an entry for a named machine or default.
|
||||||
|
if toplevel == 'machine':
|
||||||
|
login = account = password = None
|
||||||
|
self.hosts[entryname] = {}
|
||||||
|
while 1:
|
||||||
|
tt = lexer.get_token()
|
||||||
|
if tt=='' or tt == 'machine' or tt == 'default' or tt == 'macdef':
|
||||||
|
if toplevel == 'macdef':
|
||||||
|
break;
|
||||||
|
elif login and password:
|
||||||
|
self.hosts[entryname] = (login, account, password)
|
||||||
|
lexer.push_token(tt)
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
raise SyntaxError, "malformed %s entry %s terminated by %s" % (toplevel, entryname, repr(tt))
|
||||||
|
elif tt == 'login' or tt == 'user':
|
||||||
|
login = lexer.get_token()
|
||||||
|
elif tt == 'account':
|
||||||
|
account = lexer.get_token()
|
||||||
|
elif tt == 'password':
|
||||||
|
password = lexer.get_token()
|
||||||
|
else:
|
||||||
|
raise SyntaxError, "bad follower token %s, file %s, line %d"%(tt,file,lexer.lineno)
|
||||||
|
|
||||||
|
def authenticators(self, host):
|
||||||
|
"Return a (user, account, password) tuple for given host."
|
||||||
|
if self.hosts.has_key(host):
|
||||||
|
return self.hosts[host]
|
||||||
|
elif self.hosts.has_key('default'):
|
||||||
|
return self.hosts['default']
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
"Dump the class data in the format of a .netrc file"
|
||||||
|
rep = ""
|
||||||
|
for host in self.hosts.keys():
|
||||||
|
attrs = self.hosts[host]
|
||||||
|
rep = rep + "machine "+ host + "\n\tlogin " + repr(attrs[0]) + "\n"
|
||||||
|
if attrs[1]:
|
||||||
|
rep = rep + "account " + repr(attrs[1])
|
||||||
|
rep = rep + "\tpassword " + repr(attrs[2]) + "\n"
|
||||||
|
for macro in self.macros.keys():
|
||||||
|
rep = rep + "macdef " + macro + "\n"
|
||||||
|
for line in self.macros[macro]:
|
||||||
|
rep = rep + line
|
||||||
|
rep = rep + "\n"
|
||||||
|
return rep
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
print netrc()
|
||||||
|
|
119
Lib/shlex.py
Normal file
119
Lib/shlex.py
Normal file
@ -0,0 +1,119 @@
|
|||||||
|
#!/usr/bin/python
|
||||||
|
# Module and documentation by Eric S. Raymond, 21 Dec 1998
|
||||||
|
|
||||||
|
import sys, os, string
|
||||||
|
|
||||||
|
class shlex:
|
||||||
|
"A lexical analyzer class for simple shell-like syntaxes."
|
||||||
|
def __init__(self, instream=None):
|
||||||
|
if instream:
|
||||||
|
self.instream = instream
|
||||||
|
else:
|
||||||
|
self.instream = sys.stdin
|
||||||
|
self.commenters = '#'
|
||||||
|
self.wordchars = 'abcdfeghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_'
|
||||||
|
self.whitespace = ' \t\r\n'
|
||||||
|
self.quotes = '\'"'
|
||||||
|
self.state = ' '
|
||||||
|
self.pushback = [];
|
||||||
|
self.lineno = 1
|
||||||
|
self.debug = 0
|
||||||
|
self.token = ''
|
||||||
|
|
||||||
|
def push_token(self, tok):
|
||||||
|
"Push a token onto the stack popped by the get_token method"
|
||||||
|
if (self.debug >= 1):
|
||||||
|
print "Pushing " + tok
|
||||||
|
self.pushback = [tok] + self.pushback;
|
||||||
|
|
||||||
|
def get_token(self):
|
||||||
|
"Get a token from the input stream (or from stack if it's monempty)"
|
||||||
|
if self.pushback:
|
||||||
|
tok = self.pushback[0]
|
||||||
|
self.pushback = self.pushback[1:]
|
||||||
|
if (self.debug >= 1):
|
||||||
|
print "Popping " + tok
|
||||||
|
return tok
|
||||||
|
tok = ''
|
||||||
|
while 1:
|
||||||
|
nextchar = self.instream.read(1);
|
||||||
|
if nextchar == '\n':
|
||||||
|
self.lineno = self.lineno + 1
|
||||||
|
if self.debug >= 3:
|
||||||
|
print "In state " + repr(self.state) + " I see character: " + repr(nextchar)
|
||||||
|
if self.state == None:
|
||||||
|
return ''
|
||||||
|
elif self.state == ' ':
|
||||||
|
if not nextchar:
|
||||||
|
self.state = None; # end of file
|
||||||
|
break
|
||||||
|
elif nextchar in self.whitespace:
|
||||||
|
if self.debug >= 2:
|
||||||
|
print "I see whitespace in whitespace state"
|
||||||
|
if self.token:
|
||||||
|
break # emit current token
|
||||||
|
else:
|
||||||
|
continue
|
||||||
|
elif nextchar in self.commenters:
|
||||||
|
self.instream.readline()
|
||||||
|
self.lineno = self.lineno + 1
|
||||||
|
elif nextchar in self.wordchars:
|
||||||
|
self.token = nextchar
|
||||||
|
self.state = 'a'
|
||||||
|
elif nextchar in self.quotes:
|
||||||
|
self.token = nextchar
|
||||||
|
self.state = nextchar
|
||||||
|
else:
|
||||||
|
self.token = nextchar
|
||||||
|
if self.token:
|
||||||
|
break # emit current token
|
||||||
|
else:
|
||||||
|
continue
|
||||||
|
elif self.state in self.quotes:
|
||||||
|
self.token = self.token + nextchar
|
||||||
|
if nextchar == self.state:
|
||||||
|
self.state = ' '
|
||||||
|
break
|
||||||
|
elif self.state == 'a':
|
||||||
|
if not nextchar:
|
||||||
|
self.state = None; # end of file
|
||||||
|
break
|
||||||
|
elif nextchar in self.whitespace:
|
||||||
|
if self.debug >= 2:
|
||||||
|
print "I see whitespace in word state"
|
||||||
|
self.state = ' '
|
||||||
|
if self.token:
|
||||||
|
break # emit current token
|
||||||
|
else:
|
||||||
|
continue
|
||||||
|
elif nextchar in self.commenters:
|
||||||
|
self.instream.readline()
|
||||||
|
self.lineno = self.lineno + 1
|
||||||
|
elif nextchar in self.wordchars or nextchar in self.quotes:
|
||||||
|
self.token = self.token + nextchar
|
||||||
|
else:
|
||||||
|
self.pushback = [nextchar] + self.pushback
|
||||||
|
if self.debug >= 2:
|
||||||
|
print "I see punctuation in word state"
|
||||||
|
state = ' '
|
||||||
|
if self.token:
|
||||||
|
break # emit current token
|
||||||
|
else:
|
||||||
|
continue
|
||||||
|
|
||||||
|
result = self.token
|
||||||
|
self.token = ''
|
||||||
|
if self.debug >= 1:
|
||||||
|
print "Token: " + result
|
||||||
|
return result
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
|
||||||
|
lexer = shlex()
|
||||||
|
while 1:
|
||||||
|
tt = lexer.get_token()
|
||||||
|
if tt != None:
|
||||||
|
print "Token: " + repr(tt)
|
||||||
|
else:
|
||||||
|
break
|
||||||
|
|
Loading…
x
Reference in New Issue
Block a user