77import sys
88import six
99import inspect
10+ import textwrap
1011import tokenize
1112import py
1213
@@ -23,7 +24,6 @@ class Source(object):
2324 def __init__ (self , * parts , ** kwargs ):
2425 self .lines = lines = []
2526 de = kwargs .get ("deindent" , True )
26- rstrip = kwargs .get ("rstrip" , True )
2727 for part in parts :
2828 if not part :
2929 partlines = []
@@ -33,11 +33,6 @@ def __init__(self, *parts, **kwargs):
3333 partlines = [x .rstrip ("\n " ) for x in part ]
3434 elif isinstance (part , six .string_types ):
3535 partlines = part .split ("\n " )
36- if rstrip :
37- while partlines :
38- if partlines [- 1 ].strip ():
39- break
40- partlines .pop ()
4136 else :
4237 partlines = getsource (part , deindent = de ).lines
4338 if de :
@@ -115,17 +110,10 @@ def getstatementrange(self, lineno):
115110 ast , start , end = getstatementrange_ast (lineno , self )
116111 return start , end
117112
118- def deindent (self , offset = None ):
119- """ return a new source object deindented by offset.
120- If offset is None then guess an indentation offset from
121- the first non-blank line. Subsequent lines which have a
122- lower indentation offset will be copied verbatim as
123- they are assumed to be part of multilines.
124- """
125- # XXX maybe use the tokenizer to properly handle multiline
126- # strings etc.pp?
113+ def deindent (self ):
114+ """return a new source object deindented."""
127115 newsource = Source ()
128- newsource .lines [:] = deindent (self .lines , offset )
116+ newsource .lines [:] = deindent (self .lines )
129117 return newsource
130118
131119 def isparseable (self , deindent = True ):
@@ -268,47 +256,8 @@ def getsource(obj, **kwargs):
268256 return Source (strsrc , ** kwargs )
269257
270258
271- def deindent (lines , offset = None ):
272- if offset is None :
273- for line in lines :
274- line = line .expandtabs ()
275- s = line .lstrip ()
276- if s :
277- offset = len (line ) - len (s )
278- break
279- else :
280- offset = 0
281- if offset == 0 :
282- return list (lines )
283- newlines = []
284-
285- def readline_generator (lines ):
286- for line in lines :
287- yield line + "\n "
288-
289- it = readline_generator (lines )
290-
291- try :
292- for _ , _ , (sline , _ ), (eline , _ ), _ in tokenize .generate_tokens (
293- lambda : next (it )
294- ):
295- if sline > len (lines ):
296- break # End of input reached
297- if sline > len (newlines ):
298- line = lines [sline - 1 ].expandtabs ()
299- if line .lstrip () and line [:offset ].isspace ():
300- line = line [offset :] # Deindent
301- newlines .append (line )
302-
303- for i in range (sline , eline ):
304- # Don't deindent continuing lines of
305- # multiline tokens (i.e. multiline strings)
306- newlines .append (lines [i ])
307- except (IndentationError , tokenize .TokenError ):
308- pass
309- # Add any lines we didn't see. E.g. if an exception was raised.
310- newlines .extend (lines [len (newlines ) :])
311- return newlines
259+ def deindent (lines ):
260+ return textwrap .dedent ("\n " .join (lines )).splitlines ()
312261
313262
314263def get_statement_startend2 (lineno , node ):
0 commit comments