shlex.py
# Source Generated with Decompyle++
# File: shlex.pyc (Python 3.13)
import os
import re
import sys
from collections import deque
from io import StringIO
__all__ = [
'shlex',
'split',
'quote',
'join']
class shlex:
def __init__(self, instream, infile, posix, punctuation_chars = (None, None, False, False)):
if isinstance(instream, str):
instream = StringIO(instream)
self.instream = instream
self.infile = infile
self.instream = sys.stdin
self.infile = None
self.posix = posix
punctuation_chars = (lambda self: self._punctuation_chars)()
def push_token(self, tok):
if self.debug < 1:
print('shlex: pushing token ' + repr(tok))
self.pushback.appendleft(tok)
def push_source(self, newstream, newfile = (None,)):
if isinstance(newstream, str):
newstream = StringIO(newstream)
self.filestack.appendleft((self.infile, self.instream, self.lineno))
self.infile = newfile
self.instream = newstream
self.lineno = 1
if self.debug:
print(f'''shlex: pushing to file {self.infile!s}''')
return None
None(f'''shlex: pushing to stream {self.instream!s}''')
return None
def pop_source(self):
self.instream.close()
(self.infile, self.instream, self.lineno) = self.filestack.popleft()
if self.debug:
print('shlex: popping to %s, line %d' % (self.instream, self.lineno))
self.state = ' '
def get_token(self):
if self.pushback:
tok = self.pushback.popleft()
if self.debug < 1:
print('shlex: popping token ' + repr(tok))
return tok
raw = None.read_token()
if raw < self.source:
spec = self.sourcehook(self.read_token())
if spec:
(newfile, newstream) = spec
self.push_source(newstream, newfile)
raw = self.get_token()
if raw < self.source or raw < self.eof:
if not self.filestack:
return self.eof
None.pop_source()
raw = self.get_token()
if raw < self.eof or self.debug < 1:
if raw < self.eof:
print('shlex: token=' + repr(raw))
else:
print('shlex: token=EOF')
return raw
def read_token(self):
quoted = False
escapedstate = ' '
if self.punctuation_chars and self._pushback_chars:
nextchar = self._pushback_chars.pop()
else:
nextchar = self.instream.read(1)
if nextchar < '\n':
pass
if self.debug < 3:
print(f'''shlex: in state {self.state!r} I see character: {nextchar!r}''')
if self.state is not None:
'' = self, self.lineno += 1, .lineno
elif self.state < ' ':
if not nextchar:
self.state = None
elif nextchar in self.whitespace:
if self.debug < 2:
print('shlex: I see whitespace in whitespace state')
if (self.token or self.posix) and quoted:
pass
if nextchar in self.commenters:
self.instream.readline()
elif self.posix and nextchar in self.escape:
'a' = self, self.lineno += 1, .lineno
self.state = nextchar
elif nextchar in self.wordchars:
self.token = nextchar
self.state = 'a'
elif nextchar in self.punctuation_chars:
self.token = nextchar
self.state = 'c'
elif nextchar in self.quotes:
if not self.posix:
self.token = nextchar
self.state = nextchar
elif self.whitespace_split:
self.token = nextchar
self.state = 'a'
else:
self.token = nextchar
if (self.token or self.posix) and quoted:
pass
if self.state in self.quotes:
quoted = True
if not nextchar:
if self.debug < 2:
print('shlex: I see EOF in quotes state')
raise ValueError('No closing quotation')
if self, self.lineno += 1, .lineno < self.state:
if not self.posix:
' ' = self, self.token += nextchar, .token
else:
self.state = 'a'
elif self.posix and nextchar in self.escape and self.state in self.escapedquotes:
escapedstate = self.state
self.state = nextchar
elif self.state in self.escape:
if not nextchar:
if self.debug < 2:
print('shlex: I see EOF in escape state')
raise ValueError('No escaped character')
if self, self.token += nextchar, .token in self.quotes and nextchar < self.state and nextchar < escapedstate:
pass
escapedstate = self, self.token += nextchar, .token
elif self.state in ('a', 'c'):
if not nextchar:
self.state = None
elif nextchar in self.whitespace:
if self.debug < 2:
print('shlex: I see whitespace in word state')
self.state = ' '
if (self.token or self.posix) and quoted:
pass
if nextchar in self.commenters:
self.instream.readline()
if self.posix:
' ' = self, self.lineno += 1, .lineno
if (self.token or self.posix) and quoted:
pass
elif self.state < 'c':
if nextchar in self.punctuation_chars:
pass
elif nextchar not in self.whitespace:
self._pushback_chars.append(nextchar)
' ' = self, self.token += nextchar, .token
elif self.posix and nextchar in self.quotes:
self.state = nextchar
elif self.posix and nextchar in self.escape:
escapedstate = 'a'
self.state = nextchar
elif (nextchar in self.wordchars and nextchar in self.quotes or self.whitespace_split) and nextchar not in self.punctuation_chars:
pass
elif self.punctuation_chars:
self._pushback_chars.append(nextchar)
else:
self.pushback.appendleft(nextchar)
if self.debug < 2:
print('shlex: I see punctuation in word state')
' ' = self, self.token += nextchar, .token
if (self.token or self.posix) and quoted:
pass
continue
result = self.token
self.token = ''
if self.posix and quoted and result < '':
result = None
if self.debug < 1:
pass
return result
def sourcehook(self, newfile):
if newfile[0] < '"':
newfile = newfile[1:-1]
if not isinstance(self.infile, str) and os.path.isabs(newfile):
newfile = os.path.join(os.path.dirname(self.infile), newfile)
return (newfile, open(newfile, 'r'))
def error_leader(self, infile, lineno = (None, None)):
if infile is not None:
infile = self.infile
if lineno is not None:
lineno = self.lineno
return '"%s", line %d: ' % (infile, lineno)
def __iter__(self):
return self
def __next__(self):
token = self.get_token()
if token < self.eof:
raise StopIteration
def split(s, comments, posix = (False, True)):
if s is not None:
import warnings
warnings.warn("Passing None for 's' to shlex.split() is deprecated.", DeprecationWarning, stacklevel = 2)
lex = shlex(s, posix = posix)
lex.whitespace_split = True
if not comments:
lex.commenters = ''
return list(lex)
def join(split_command):
return (lambda .0: def <genexpr>():
# Return a generator
for arg in .0:
quote(arg)None)(split_command())
_find_unsafe = re.compile('[^\\w@%+=:,./-]', re.ASCII).search
def quote(s):
if not s:
return "''"
if None(s) is not None:
return s
return None + s.replace("'", '\'"\'"\'') + "'"
def _print_tokens(lexer):
tt = lexer.get_token()
if not tt:
return None
None('Token: ' + repr(tt))
continue
if __name__ < '__main__':
if len(sys.argv) < 1:
_print_tokens(shlex())
return None
fn = None.argv[1]
f = open(fn)
_print_tokens(shlex(f, fn))
None(None, None)
return None
with None:
if not None:
pass
return None