Skip to content

Commit

Permalink
refactor: some performance tweaks
Browse files Browse the repository at this point in the history
  • Loading branch information
d-biehl committed Oct 2, 2024
1 parent 21f9910 commit d3b39be
Show file tree
Hide file tree
Showing 7 changed files with 162 additions and 132 deletions.
6 changes: 6 additions & 0 deletions packages/core/src/robotcode/core/ignore_spec.py
Original file line number Diff line number Diff line change
Expand Up @@ -288,6 +288,7 @@ def iter_files(
include_hidden: bool = True,
parent_spec: Optional[IgnoreSpec] = None,
verbose_callback: Optional[Callable[[str], None]] = None,
verbose_trace: bool = False,
) -> Iterator[Path]:
if isinstance(paths, Path):
paths = [paths]
Expand All @@ -300,6 +301,7 @@ def iter_files(
include_hidden=include_hidden,
parent_spec=parent_spec,
verbose_callback=verbose_callback,
verbose_trace=verbose_trace,
)


Expand All @@ -310,7 +312,10 @@ def _iter_files(
include_hidden: bool = True,
parent_spec: Optional[IgnoreSpec] = None,
verbose_callback: Optional[Callable[[str], None]] = None,
verbose_trace: bool = False,
) -> Iterator[Path]:
if verbose_callback is not None and verbose_trace:
verbose_callback(f"iter_files: {path}")

if root is None:
root = path if path.is_dir() else path.parent
Expand Down Expand Up @@ -368,6 +373,7 @@ def _iter_files(
include_hidden=include_hidden,
parent_spec=spec,
verbose_callback=verbose_callback,
verbose_trace=verbose_trace,
)
elif p.is_file():
yield p
Original file line number Diff line number Diff line change
Expand Up @@ -65,12 +65,6 @@ class DiagnosticsResult:
skipped: bool = False


@dataclass
class WorkspaceDocumentsResult:
name: Optional[str]
document: TextDocument


@dataclass
class DiagnosticsData:
lock: RLock
Expand Down Expand Up @@ -158,7 +152,7 @@ def collect(
@event
def load_workspace_documents(
sender,
) -> Optional[List[WorkspaceDocumentsResult]]: ...
) -> None: ...

@event
def on_workspace_loaded(sender: Any) -> None: ...
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -329,8 +329,7 @@ def handle_LibraryImport( # noqa: N802

except (SystemExit, KeyboardInterrupt):
raise
except BaseException as e:
self._logger.exception(e)
except BaseException:
return None

arguments = library_node.get_tokens(RobotToken.ARGUMENT)
Expand Down Expand Up @@ -373,8 +372,7 @@ def handle_VariablesImport( # noqa: N802

except (SystemExit, KeyboardInterrupt):
raise
except BaseException as e:
self._logger.exception(e)
except BaseException:
return None

arguments = library_node.get_tokens(RobotToken.ARGUMENT)
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import time
from concurrent.futures import CancelledError
from logging import CRITICAL
from pathlib import Path
from threading import Event
from typing import TYPE_CHECKING, Any, List, Optional

Expand All @@ -12,7 +13,6 @@
from robotcode.language_server.common.parts.diagnostics import (
AnalysisProgressMode,
DiagnosticsMode,
WorkspaceDocumentsResult,
)
from robotcode.language_server.robotframework.configuration import AnalysisConfig
from robotcode.robot.diagnostics.library_doc import (
Expand Down Expand Up @@ -58,10 +58,10 @@ def on_get_analysis_progress_mode(self, sender: Any, uri: Uri) -> Optional[Analy
config = self.parent.workspace.get_configuration(AnalysisConfig, uri)
return config.progress_mode

def load_workspace_documents(self, sender: Any) -> List[WorkspaceDocumentsResult]:
def load_workspace_documents(self, sender: Any) -> None:
start = time.monotonic()
try:
result: List[WorkspaceDocumentsResult] = []
result: List[Path] = []

for folder in self.parent.workspace.workspace_folders:
config = self.parent.workspace.get_configuration(RobotCodeConfig, folder.uri)
Expand All @@ -79,10 +79,14 @@ def load_workspace_documents(self, sender: Any) -> List[WorkspaceDocumentsResult
[*DEFAULT_SPEC_RULES, *(config.workspace.exclude_patterns or [])],
folder.uri.to_path(),
),
verbose_callback=self._logger.debug,
verbose_trace=False,
),
)
)

result.extend(files)

canceled = False
with self.parent.window.progress(
"Load workspace", current=0, max=len(files), start=False, cancellable=False
Expand Down Expand Up @@ -111,13 +115,11 @@ def load_workspace_documents(self, sender: Any) -> List[WorkspaceDocumentsResult
except BaseException as e:
ex = e
self._logger.exception(lambda: f"Can't load document {f}: {ex}", level=CRITICAL)

if canceled:
return []

return result
finally:
self._logger.info(lambda: f"Workspace loaded {len(result)} documents in {time.monotonic() - start}s")
if canceled:
self._logger.info(lambda: "Workspace loading canceled")
else:
self._logger.info(lambda: f"Workspace loaded {len(result)} documents in {time.monotonic() - start}s")

@rpc_method(name="robot/cache/clear", threaded=True)
def robot_cache_clear(self) -> None:
Expand Down
4 changes: 2 additions & 2 deletions packages/robot/src/robotcode/robot/diagnostics/entities.py
Original file line number Diff line number Diff line change
Expand Up @@ -160,7 +160,7 @@ def __eq__(self, o: object) -> bool:
return False

def __hash__(self) -> int:
return hash(self.name)
return hash(self.normalized_name)

def __str__(self) -> str:
return self.name
Expand Down Expand Up @@ -252,7 +252,7 @@ class BuiltInVariableDefinition(VariableDefinition):

@single_call
def __hash__(self) -> int:
return hash((type(self), self.name, self.type))
return hash((type(self), self.name, self.type, None, None))


@dataclass
Expand Down
141 changes: 73 additions & 68 deletions packages/robot/src/robotcode/robot/diagnostics/imports_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
import shutil
import sys
import threading
import time
import weakref
import zlib
from abc import ABC, abstractmethod
Expand Down Expand Up @@ -1322,82 +1323,86 @@ def _get_variables_libdoc(
)

self._logger.debug(lambda: f"Load variables {source}{args!r}")
if meta is not None:
meta_file = Path(
self.variables_doc_cache_path,
meta.filepath_base + ".meta.json",
)
if meta_file.exists():
try:
spec_path = None
try:
saved_meta = from_json(meta_file.read_text("utf-8"), LibraryMetaData)
if saved_meta == meta:
spec_path = Path(
self.variables_doc_cache_path,
meta.filepath_base + ".spec.json",
)
return from_json(spec_path.read_text("utf-8"), VariablesDoc)
except (SystemExit, KeyboardInterrupt):
raise
except BaseException as e:
raise RuntimeError(
f"Failed to load library meta data for library {name} from {spec_path}"
) from e
except (SystemExit, KeyboardInterrupt):
raise
except BaseException as e:
self._logger.exception(e)

executor = ProcessPoolExecutor(max_workers=1, mp_context=mp.get_context("spawn"))
try:
result = executor.submit(
get_variables_doc,
name,
args,
working_dir,
base_dir,
self.get_resolvable_command_line_variables() if resolve_command_line_vars else None,
variables,
).result(LOAD_LIBRARY_TIME_OUT)
except (SystemExit, KeyboardInterrupt):
raise
except BaseException as e:
self._logger.exception(e)
raise
finally:
executor.shutdown(True)

if result.stdout:
self._logger.warning(lambda: f"stdout captured at loading variables {name}{args!r}:\n{result.stdout}")

start_time = time.monotonic()
try:
if meta is not None:
meta_file = Path(
self.variables_doc_cache_path,
meta.filepath_base + ".meta.json",
)
spec_file = Path(
self.variables_doc_cache_path,
meta.filepath_base + ".spec.json",
)
spec_file.parent.mkdir(parents=True, exist_ok=True)
if meta_file.exists():
try:
spec_path = None
try:
saved_meta = from_json(meta_file.read_text("utf-8"), LibraryMetaData)
if saved_meta == meta:
spec_path = Path(
self.variables_doc_cache_path,
meta.filepath_base + ".spec.json",
)
return from_json(spec_path.read_text("utf-8"), VariablesDoc)
except (SystemExit, KeyboardInterrupt):
raise
except BaseException as e:
raise RuntimeError(
f"Failed to load library meta data for library {name} from {spec_path}"
) from e
except (SystemExit, KeyboardInterrupt):
raise
except BaseException as e:
self._logger.exception(e)

try:
spec_file.write_text(as_json(result), "utf-8")
except (SystemExit, KeyboardInterrupt):
raise
except BaseException as e:
raise RuntimeError(f"Cannot write spec file for variables '{name}' to '{spec_file}'") from e
meta_file.write_text(as_json(meta), "utf-8")
else:
self._logger.debug(lambda: f"Skip caching variables {name}{args!r}")
except (SystemExit, KeyboardInterrupt):
raise
except BaseException as e:
self._logger.exception(e)
executor = ProcessPoolExecutor(max_workers=1, mp_context=mp.get_context("spawn"))
try:
result = executor.submit(
get_variables_doc,
name,
args,
working_dir,
base_dir,
self.get_resolvable_command_line_variables() if resolve_command_line_vars else None,
variables,
).result(LOAD_LIBRARY_TIME_OUT)
except (SystemExit, KeyboardInterrupt):
raise
except BaseException as e:
self._logger.exception(e)
raise
finally:
executor.shutdown(True)

if result.stdout:
self._logger.warning(lambda: f"stdout captured at loading variables {name}{args!r}:\n{result.stdout}")

try:
if meta is not None:
meta_file = Path(
self.variables_doc_cache_path,
meta.filepath_base + ".meta.json",
)
spec_file = Path(
self.variables_doc_cache_path,
meta.filepath_base + ".spec.json",
)
spec_file.parent.mkdir(parents=True, exist_ok=True)

return result
try:
spec_file.write_text(as_json(result), "utf-8")
except (SystemExit, KeyboardInterrupt):
raise
except BaseException as e:
raise RuntimeError(f"Cannot write spec file for variables '{name}' to '{spec_file}'") from e
meta_file.write_text(as_json(meta), "utf-8")
else:
self._logger.debug(lambda: f"Skip caching variables {name}{args!r}")
except (SystemExit, KeyboardInterrupt):
raise
except BaseException as e:
self._logger.exception(e)

return result
finally:
self._logger.debug(lambda: f"Load variables {source}{args!r} took {time.monotonic() - start_time} seconds")

@_logger.call
def get_libdoc_for_variables_import(
Expand Down
Loading

0 comments on commit d3b39be

Please sign in to comment.