3535 SemanticTokenTypes ,
3636)
3737from ...common .text_document import TextDocument
38- from ..diagnostics .namespace import Namespace
3938from ..utils .ast import HasTokens , Token , iter_nodes , token_in_range , tokenize_variables
4039
4140if TYPE_CHECKING :
@@ -258,9 +257,7 @@ async def generate_sem_sub_tokens(
258257 else :
259258 yield SemTokenInfo .from_token (token , sem_type , sem_mod , col_offset , length )
260259
261- async def generate_sem_tokens (
262- self , namespace : Namespace , token : Token , node : ast .AST
263- ) -> AsyncGenerator [SemTokenInfo , None ]:
260+ async def generate_sem_tokens (self , token : Token , node : ast .AST ) -> AsyncGenerator [SemTokenInfo , None ]:
264261 from robot .parsing .lexer .tokens import Token as RobotToken
265262
266263 if token .type in {* RobotToken .ALLOW_VARIABLES , RobotToken .KEYWORD }:
@@ -273,18 +270,19 @@ async def generate_sem_tokens(
273270
274271 elif token .type == RobotToken .KEYWORD :
275272 is_builtin = False
276- if namespace .initialized :
277- try :
278- libdoc = await namespace .find_keyword (token .value )
279- if (
280- libdoc is not None
281- and libdoc .libname is not None
282- and libdoc .libname .casefold () == "builtin" .casefold ()
283- ):
284-
285- is_builtin = True
286- except BaseException :
287- pass
273+ # TODO tag builtin keywords
274+ # if namespace.initialized:
275+ # try:
276+ # libdoc = await namespace.find_keyword(token.value)
277+ # if (
278+ # libdoc is not None
279+ # and libdoc.libname is not None
280+ # and libdoc.libname.casefold() == "builtin".casefold()
281+ # ):
282+
283+ # is_builtin = True
284+ # except BaseException:
285+ # pass
288286
289287 async for e in self .generate_sem_sub_tokens (token , node ):
290288 if is_builtin :
@@ -297,7 +295,7 @@ async def generate_sem_tokens(
297295 yield e
298296
299297 async def collect (
300- self , namespace : Namespace , model : ast .AST , range : Optional [Range ], cancel_token : CancelationToken
298+ self , model : ast .AST , range : Optional [Range ], cancel_token : CancelationToken
301299 ) -> Union [SemanticTokens , SemanticTokensPartialResult , None ]:
302300
303301 data = []
@@ -321,7 +319,7 @@ def get_tokens() -> Generator[Tuple[Token, ast.AST], None, None]:
321319 ):
322320 cancel_token .throw_if_canceled ()
323321
324- async for token in self .generate_sem_tokens (namespace , robot_token , robot_node ):
322+ async for token in self .generate_sem_tokens (robot_token , robot_node ):
325323 current_line = token .lineno - 1
326324
327325 data .append (current_line - last_line )
@@ -356,15 +354,14 @@ async def collect_threading(
356354 ) -> Union [SemanticTokens , SemanticTokensPartialResult , None ]:
357355 try :
358356 model = await self .parent .documents_cache .get_model (document )
359- namespace = await self .parent .documents_cache .get_namespace (document )
360- await namespace .ensure_initialized ()
357+ # namespace = await self.parent.documents_cache.get_namespace(document)
358+ # await namespace.ensure_initialized()
361359
362360 cancel_token = CancelationToken ()
363361 return await asyncio .get_running_loop ().run_in_executor (
364362 None ,
365363 asyncio .run ,
366364 self .collect (
367- namespace ,
368365 model ,
369366 range ,
370367 cancel_token ,
0 commit comments