@@ -46,7 +46,7 @@ def __init__(self, fromdir, todir, replacements=None):
4646 for key , val in (replacements or {}).items ():
4747 self .token_replacements [key ] = val
4848
49- def match (self , filepath ):
49+ def _match (self , filepath ):
5050 """Determines if a Rule matches a given filepath and if so
5151 returns a higher comparable value if the match is more specific.
5252 """
@@ -70,15 +70,15 @@ def unasync_file(self, filepath):
7070 encoding , _ = std_tokenize .detect_encoding (f .readline )
7171 write_kwargs ["encoding" ] = encoding
7272 f .seek (0 )
73- tokens = tokenize (f )
74- tokens = self .unasync_tokens (tokens )
75- result = untokenize (tokens )
73+ tokens = _tokenize (f )
74+ tokens = self ._unasync_tokens (tokens )
75+ result = _untokenize (tokens )
7676 outfilepath = filepath .replace (self .fromdir , self .todir )
77- makedirs_existok (os .path .dirname (outfilepath ))
77+ _makedirs_existok (os .path .dirname (outfilepath ))
7878 with open (outfilepath , "w" , ** write_kwargs ) as f :
7979 print (result , file = f , end = "" )
8080
81- def unasync_tokens (self , tokens ):
81+ def _unasync_tokens (self , tokens ):
8282 # TODO __await__, ...?
8383 used_space = None
8484 for space , toknum , tokval in tokens :
@@ -90,16 +90,16 @@ def unasync_tokens(self, tokens):
9090 used_space = space
9191 else :
9292 if toknum == std_tokenize .NAME :
93- tokval = self .unasync_name (tokval )
93+ tokval = self ._unasync_name (tokval )
9494 elif toknum == std_tokenize .STRING :
9595 left_quote , name , right_quote = tokval [0 ], tokval [1 :- 1 ], tokval [- 1 ]
96- tokval = left_quote + self .unasync_name (name ) + right_quote
96+ tokval = left_quote + self ._unasync_name (name ) + right_quote
9797 if used_space is None :
9898 used_space = space
9999 yield (used_space , tokval )
100100 used_space = None
101101
102- def unasync_name (self , name ):
102+ def _unasync_name (self , name ):
103103 if name in self .token_replacements :
104104 return self .token_replacements [name ]
105105 # Convert classes prefixed with 'Async' into 'Sync'
@@ -111,7 +111,7 @@ def unasync_name(self, name):
111111Token = collections .namedtuple ("Token" , ["type" , "string" , "start" , "end" , "line" ])
112112
113113
114- def get_tokens (f ):
114+ def _get_tokens (f ):
115115 if sys .version_info [0 ] == 2 :
116116 for tok in std_tokenize .generate_tokens (f .readline ):
117117 type_ , string , start , end , line = tok
@@ -123,9 +123,9 @@ def get_tokens(f):
123123 yield tok
124124
125125
126- def tokenize (f ):
126+ def _tokenize (f ):
127127 last_end = (1 , 0 )
128- for tok in get_tokens (f ):
128+ for tok in _get_tokens (f ):
129129 if last_end [0 ] < tok .start [0 ]:
130130 yield ("" , std_tokenize .STRING , " \\ \n " )
131131 last_end = (tok .start [0 ], 0 )
@@ -141,11 +141,11 @@ def tokenize(f):
141141 last_end = (tok .end [0 ] + 1 , 0 )
142142
143143
144- def untokenize (tokens ):
144+ def _untokenize (tokens ):
145145 return "" .join (space + tokval for space , tokval in tokens )
146146
147147
148- def makedirs_existok (dir ):
148+ def _makedirs_existok (dir ):
149149 try :
150150 os .makedirs (dir )
151151 except OSError as e :
@@ -184,7 +184,7 @@ def run(self):
184184 found_weight = None
185185
186186 for rule in rules :
187- weight = rule .match (f )
187+ weight = rule ._match (f )
188188 if weight and (found_weight is None or weight > found_weight ):
189189 found_rule = rule
190190 found_weight = weight
0 commit comments