Skip to content

Commit

Permalink
perf(analyzer): speed up the creation of libdocs from resource files
Browse files Browse the repository at this point in the history
  • Loading branch information
d-biehl committed Oct 26, 2024
1 parent 7613bb2 commit 2dfc91b
Show file tree
Hide file tree
Showing 7 changed files with 38 additions and 99 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -70,11 +70,10 @@ def resolve(self, sender: Any, code_lens: CodeLens) -> Optional[CodeLens]:

namespace = self.parent.documents_cache.get_namespace(document)

name = code_lens.data["name"]
line = code_lens.data["line"]

if self.parent.diagnostics.workspace_loaded_event.is_set():
kw_doc = self.get_keyword_definition_at_line(namespace.get_library_doc(), name, line)
kw_doc = self.get_keyword_definition_at_line(namespace.get_library_doc(), line)

if kw_doc is not None and not kw_doc.is_error_handler:
if not self.parent.robot_references.has_cached_keyword_references(
Expand Down
Original file line number Diff line number Diff line change
@@ -1,8 +1,7 @@
from concurrent.futures import CancelledError
from logging import CRITICAL
from pathlib import Path
from threading import Event
from typing import TYPE_CHECKING, Any, List, Optional
from typing import TYPE_CHECKING, Any, Optional

from robotcode.core.ignore_spec import DEFAULT_SPEC_RULES, GIT_IGNORE_FILE, ROBOT_IGNORE_FILE, IgnoreSpec, iter_files
from robotcode.core.language import language_id
Expand Down Expand Up @@ -60,8 +59,6 @@ def on_get_analysis_progress_mode(self, sender: Any, uri: Uri) -> Optional[Analy
def load_workspace_documents(self, sender: Any) -> None:
with self._logger.measure_time(lambda: "loading workspace documents", context_name="load_workspace_documents"):
try:
result: List[Path] = []

for folder in self.parent.workspace.workspace_folders:
config = self.parent.workspace.get_configuration(RobotCodeConfig, folder.uri)

Expand All @@ -84,9 +81,10 @@ def load_workspace_documents(self, sender: Any) -> None:
)
)

result.extend(files)

canceled = False
self._logger.debug(
lambda: f"Loading {len(files)} workspace documents", context_name="load_workspace_documents"
)
with self.parent.window.progress(
"Load workspace", current=0, max=len(files), start=False, cancellable=False
) as progress:
Expand Down
64 changes: 29 additions & 35 deletions packages/robot/src/robotcode/robot/diagnostics/library_doc.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,12 +76,12 @@
from robotcode.robot.utils.ast import (
cached_isinstance,
get_variable_token,
iter_nodes,
range_from_token,
strip_variable_token,
)
from robotcode.robot.utils.markdownformatter import MarkDownFormatter
from robotcode.robot.utils.match import normalize, normalize_namespace
from robotcode.robot.utils.stubs import HasError, HasErrors

from ..utils.variables import contains_variable

Expand Down Expand Up @@ -239,7 +239,6 @@ def __init__(
self._can_have_embedded = can_have_embedded and not is_namespace
self._is_namespace = is_namespace
self._normalized_name: Optional[str] = None
self._embedded_arguments: Any = None

@property
def normalized_name(self) -> str:
Expand All @@ -248,15 +247,12 @@ def normalized_name(self) -> str:

return self._normalized_name

@property
@functools.cached_property
def embedded_arguments(self) -> Any:
if self._embedded_arguments is None:
if self._can_have_embedded:
self._embedded_arguments = _get_embedded_arguments(self.name)
else:
self._embedded_arguments = ()
if self._can_have_embedded:
return _get_embedded_arguments(self.name) or ()

return self._embedded_arguments
return ()

if get_robot_version() >= (6, 0):

Expand All @@ -269,7 +265,7 @@ def __match_embedded(self, name: str) -> bool:
return self.embedded_arguments.name.match(name) is not None

def __eq__(self, o: object) -> bool:
if cached_isinstance(o, KeywordMatcher):
if type(o) is KeywordMatcher:
if self._is_namespace != o._is_namespace:
return False

Expand Down Expand Up @@ -667,13 +663,11 @@ def __post_init__(self) -> None:
def __str__(self) -> str:
return f"{self.name}({', '.join(str(arg) for arg in self.arguments)})"

@property
@functools.cached_property
def matcher(self) -> KeywordMatcher:
if not hasattr(self, "__matcher"):
self.__matcher = KeywordMatcher(self.name)
return self.__matcher
return KeywordMatcher(self.name)

@property
@functools.cached_property
def is_deprecated(self) -> bool:
return self.deprecated or DEPRECATED_PATTERN.match(self.doc) is not None

Expand All @@ -685,13 +679,13 @@ def is_resource_keyword(self) -> bool:
def is_library_keyword(self) -> bool:
return self.libtype == "LIBRARY"

@property
@functools.cached_property
def deprecated_message(self) -> str:
if (m := DEPRECATED_PATTERN.match(self.doc)) is not None:
return m.group("message").strip()
return ""

@property
@functools.cached_property
def name_range(self) -> Range:
if self.name_token is not None:
return range_from_token(self.name_token)
Expand All @@ -709,7 +703,7 @@ def is_private(self) -> bool:

return "robot:private" in self.normalized_tags()

@property
@functools.cached_property
def range(self) -> Range:
if self.name_token is not None:
return range_from_token(self.name_token)
Expand Down Expand Up @@ -820,7 +814,7 @@ def escape_pipe(s: str) -> str:

return result

@property
@functools.cached_property
def signature(self) -> str:
return (
f'({self.type}) "{self.name}": ('
Expand Down Expand Up @@ -2716,15 +2710,16 @@ def get_model_doc(
append_model_errors: bool = True,
) -> LibraryDoc:
errors: List[Error] = []
keyword_name_nodes: List[KeywordName] = []
keywords_nodes: List[Keyword] = []
for node in ast.walk(model):
if isinstance(node, Keyword):
keywords_nodes.append(node)
if isinstance(node, KeywordName):
keyword_name_nodes.append(node)

error = node.error if isinstance(node, HasError) else None
keyword_name_nodes: Dict[int, KeywordName] = {}
keywords_nodes: Dict[int, Keyword] = {}
for node in iter_nodes(model):
if cached_isinstance(node, Keyword):
node.lineno
keywords_nodes[node.lineno] = node
if cached_isinstance(node, KeywordName):
keyword_name_nodes[node.lineno] = node

error = getattr(node, "error", None)
if error is not None:
errors.append(
Error(
Expand All @@ -2735,7 +2730,7 @@ def get_model_doc(
)
)
if append_model_errors:
node_errors = node.errors if isinstance(node, HasErrors) else None
node_errors = getattr(node, "errors", None)
if node_errors is not None:
for e in node_errors:
errors.append(
Expand All @@ -2748,16 +2743,15 @@ def get_model_doc(
)

def get_keyword_name_token_from_line(line: int) -> Optional[Token]:
for keyword_name in keyword_name_nodes:
if keyword_name.lineno == line:
return cast(Token, keyword_name.get_token(RobotToken.KEYWORD_NAME))

return None
keyword_name = keyword_name_nodes.get(line, None)
if keyword_name is None:
return None
return cast(Token, keyword_name.get_token(RobotToken.KEYWORD_NAME))

def get_argument_definitions_from_line(
line: int,
) -> List[ArgumentDefinition]:
keyword_node = next((k for k in keywords_nodes if k.lineno == line), None)
keyword_node = keywords_nodes.get(line, None)
if keyword_node is None:
return []

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -665,12 +665,12 @@ def is_bdd_token(cls, namespace: "Namespace", token: Token) -> bool:

@classmethod
def get_keyword_definition_at_token(cls, library_doc: LibraryDoc, token: Token) -> Optional[KeywordDoc]:
return cls.get_keyword_definition_at_line(library_doc, token.value, token.lineno)
return cls.get_keyword_definition_at_line(library_doc, token.lineno)

@classmethod
def get_keyword_definition_at_line(cls, library_doc: LibraryDoc, value: str, line: int) -> Optional[KeywordDoc]:
def get_keyword_definition_at_line(cls, library_doc: LibraryDoc, line: int) -> Optional[KeywordDoc]:
return next(
(k for k in library_doc.keywords.iter_all(value) if k.line_no == line),
(k for k in library_doc.keywords.keywords if k.line_no == line),
None,
)

Expand Down
7 changes: 0 additions & 7 deletions packages/robot/src/robotcode/robot/utils/ast.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,13 +38,6 @@ def cached_isinstance(obj: Any, *expected_types: Type[_T]) -> TypeGuard[Union[_T
return False


# def cached_isinstance(obj: Any, *expected_types: type) -> bool:
# try:
# return isinstance(obj, expected_types)
# except TypeError:
# return False


def iter_nodes(node: ast.AST, descendants: bool = True) -> Iterator[ast.AST]:
for _field, value in ast.iter_fields(node):
if cached_isinstance(value, list):
Expand Down
20 changes: 1 addition & 19 deletions packages/robot/src/robotcode/robot/utils/stubs.py
Original file line number Diff line number Diff line change
@@ -1,22 +1,4 @@
from __future__ import annotations

from typing import Any, Dict, Iterator, List, Optional, Protocol, Set, runtime_checkable


@runtime_checkable
class HasError(Protocol):
error: Optional[str]


@runtime_checkable
class HasErrors(Protocol):
errors: Optional[List[str]]


@runtime_checkable
class HeaderAndBodyBlock(Protocol):
header: Any
body: List[Any]
from typing import Any, Dict, Iterator, List, Protocol, Set, runtime_checkable


@runtime_checkable
Expand Down
27 changes: 0 additions & 27 deletions packages/robot/src/robotcode/robot/utils/visitor.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@
from abc import ABC
from typing import (
Any,
AsyncIterator,
Callable,
Dict,
Iterator,
Expand Down Expand Up @@ -37,32 +36,6 @@ def iter_field_values(node: ast.AST) -> Iterator[Any]:
pass


def iter_child_nodes(node: ast.AST) -> Iterator[ast.AST]:
for _name, field in iter_fields(node):
if isinstance(field, ast.AST):
yield field
elif isinstance(field, list):
for item in field:
if isinstance(item, ast.AST):
yield item


async def iter_nodes(node: ast.AST) -> AsyncIterator[ast.AST]:
for _name, value in iter_fields(node):
if isinstance(value, list):
for item in value:
if isinstance(item, ast.AST):
yield item
async for n in iter_nodes(item):
yield n

elif isinstance(value, ast.AST):
yield value

async for n in iter_nodes(value):
yield n


class VisitorFinder(ABC):
__cls_finder_cache__: Dict[Type[Any], Optional[Callable[..., Any]]]

Expand Down

0 comments on commit 2dfc91b

Please sign in to comment.