diff --git a/.actions/assistant.py b/.actions/assistant.py index f54d828f5dd21..1956a17a16c9a 100644 --- a/.actions/assistant.py +++ b/.actions/assistant.py @@ -234,7 +234,7 @@ def _download_frontend(pkg_path: str, version: str = "v0.0.0"): response = urllib.request.urlopen(frontend_release_url) file = tarfile.open(fileobj=response, mode="r|gz") - file.extractall(path=download_dir) + file.extractall(path=download_dir) # noqa: S202 shutil.move(download_dir, frontend_dir) print("The Lightning UI has successfully been downloaded!") @@ -457,7 +457,7 @@ def pull_docs_files( raise RuntimeError(f"Requesting file '{zip_url}' does not exist or it is just unavailable.") with zipfile.ZipFile(zip_file, "r") as zip_ref: - zip_ref.extractall(tmp) + zip_ref.extractall(tmp) # noqa: S202 zip_dirs = [d for d in glob.glob(os.path.join(tmp, "*")) if os.path.isdir(d)] # check that the extracted archive has only repo folder diff --git a/docs/source-pytorch/extensions/strategy.rst b/docs/source-pytorch/extensions/strategy.rst index 2dd69859dd1a1..b62013dfbc9b1 100644 --- a/docs/source-pytorch/extensions/strategy.rst +++ b/docs/source-pytorch/extensions/strategy.rst @@ -77,7 +77,7 @@ The below table lists all relevant strategies available in Lightning with their - Strategy for multi-process single-device training on one or multiple nodes. :ref:`Learn more. ` * - ddp_spawn - :class:`~lightning.pytorch.strategies.DDPStrategy` - - Same as "ddp" but launches processes using :func:`torch.multiprocessing.spawn` method and joins processes after training finishes. :ref:`Learn more. ` + - Same as "ddp" but launches processes using ``torch.multiprocessing.spawn`` method and joins processes after training finishes. :ref:`Learn more. ` * - deepspeed - :class:`~lightning.pytorch.strategies.DeepSpeedStrategy` - Provides capabilities to run training using the DeepSpeed library, with training optimizations for large billion parameter models. :doc:`Learn more. <../advanced/model_parallel/deepspeed>` diff --git a/examples/app/hpo/utils.py b/examples/app/hpo/utils.py index 9d27c726b0e5e..a07ae73f8fd3e 100644 --- a/examples/app/hpo/utils.py +++ b/examples/app/hpo/utils.py @@ -48,7 +48,7 @@ def extract_tarfile(file_path: str, extract_path: str, mode: str): if ".zip" in local_filename: if os.path.exists(local_filename): with zipfile.ZipFile(local_filename, "r") as zip_ref: - zip_ref.extractall(path) + zip_ref.extractall(path) # noqa: S202 elif local_filename.endswith(".tar.gz") or local_filename.endswith(".tgz"): extract_tarfile(local_filename, path, "r:gz") elif local_filename.endswith(".tar.bz2") or local_filename.endswith(".tbz"): diff --git a/pyproject.toml b/pyproject.toml index 83d5b2e76e2c8..cf609493b40a1 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -79,6 +79,7 @@ ignore-init-module-imports = true ".actions/*" = ["S101", "S310"] "setup.py" = ["S101"] "examples/**" = [ + "F841", # Local variable is assigned to but never used "S101", # Use of `assert` detected "S113", # todo: Probable use of requests call without "S104", # Possible binding to all interface @@ -88,6 +89,7 @@ ignore-init-module-imports = true "S108", # Probable insecure usage of temporary file or directory: "/tmp/data/MNIST" ] "src/**" = [ + "F841", # Local variable is assigned to but never used "S101", # todo: Use of `assert` detected "S105", "S106", "S107", # todo: Possible hardcoded password: ... "S113", # todo: Probable use of requests call without timeout @@ -103,12 +105,14 @@ ignore-init-module-imports = true "RET503", ] "tests/**" = [ + "F841", # Local variable is assigned to but never used "S101", # Use of `assert` detected "S105", "S106", # todo: Possible hardcoded password: ... "S301", # `pickle` and modules that wrap it can be unsafe when used to deserialize untrusted data, possible security issue "S113", # todo: Probable use of requests call without timeout "S311", # todo: Standard pseudo-random generators are not suitable for cryptographic purposes "S108", # todo: Probable insecure usage of temporary file or directory: "/tmp/sys-customizations-sync" + "S202", # Uses of `tarfile.extractall()` "S403", # `pickle`, `cPickle`, `dill`, and `shelve` modules are possibly insecure "S404", # `subprocess` module is possibly insecure "S602", # todo: `subprocess` call with `shell=True` identified, security issue diff --git a/src/lightning/app/CHANGELOG.md b/src/lightning/app/CHANGELOG.md index a06951c327c29..3bad6f982032b 100644 --- a/src/lightning/app/CHANGELOG.md +++ b/src/lightning/app/CHANGELOG.md @@ -5,6 +5,13 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/). +## [2.1.4] - 2024-01-31 + +### Changed + +- Remove torch distributed for the Dataset Optimizer ([#19182](https://github.com/Lightning-AI/lightning/pull/19182)) + + ## [2.1.3] - 2023-12-21 ### Changed diff --git a/src/lightning/app/cli/cmd_pl_init.py b/src/lightning/app/cli/cmd_pl_init.py index b59ac93280e6c..2436c28179ef2 100644 --- a/src/lightning/app/cli/cmd_pl_init.py +++ b/src/lightning/app/cli/cmd_pl_init.py @@ -121,7 +121,7 @@ def download_frontend(destination: Path) -> None: with TemporaryDirectory() as download_dir: response = urllib.request.urlopen(url) # noqa: S310 file = tarfile.open(fileobj=response, mode="r|gz") - file.extractall(path=download_dir) + file.extractall(path=download_dir) # noqa: S202 shutil.move(str(Path(download_dir, build_dir_name)), destination) diff --git a/src/lightning/app/plugin/plugin.py b/src/lightning/app/plugin/plugin.py index 6f5d7fce72434..68bcec0ed22e3 100644 --- a/src/lightning/app/plugin/plugin.py +++ b/src/lightning/app/plugin/plugin.py @@ -154,7 +154,7 @@ def _run_plugin(run: _Run) -> Dict[str, Any]: logger.info("Extracting plugin source.") with tarfile.open(download_path, "r:gz") as tf: - tf.extractall(source_path) + tf.extractall(source_path) # noqa: S202 except Exception as ex: raise HTTPException( status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, diff --git a/src/lightning/app/storage/drive.py b/src/lightning/app/storage/drive.py index 892b0b1c7f91e..b3f5f8f56726c 100644 --- a/src/lightning/app/storage/drive.py +++ b/src/lightning/app/storage/drive.py @@ -334,7 +334,7 @@ def __str__(self) -> str: def _maybe_create_drive(component_name: str, state: Dict) -> Union[Dict, Drive]: - if state.get("type", None) == Drive.__IDENTIFIER__: + if state.get("type") == Drive.__IDENTIFIER__: drive = Drive.from_dict(state) drive.component_name = component_name return drive diff --git a/src/lightning/app/utilities/packaging/cloud_compute.py b/src/lightning/app/utilities/packaging/cloud_compute.py index ab6b05e4004b4..e4f30aee14a63 100644 --- a/src/lightning/app/utilities/packaging/cloud_compute.py +++ b/src/lightning/app/utilities/packaging/cloud_compute.py @@ -156,7 +156,7 @@ def from_dict(cls, d: dict) -> "CloudCompute": f"mounts argument must be one of [None, Mount, List[Mount]], " f"received {mounts} of type {type(mounts)}" ) - _verify_mount_root_dirs_are_unique(d.get("mounts", None)) + _verify_mount_root_dirs_are_unique(d.get("mounts")) return cls(**d) @property @@ -183,6 +183,6 @@ def _verify_mount_root_dirs_are_unique(mounts: Union[None, Mount, List[Mount], T def _maybe_create_cloud_compute(state: Dict) -> Union[CloudCompute, Dict]: - if state and state.get("type", None) == __CLOUD_COMPUTE_IDENTIFIER__: + if state and state.get("type") == __CLOUD_COMPUTE_IDENTIFIER__: return CloudCompute.from_dict(state) return state diff --git a/src/lightning/app/utilities/packaging/lightning_utils.py b/src/lightning/app/utilities/packaging/lightning_utils.py index e8846b382e49e..4dbba499625eb 100644 --- a/src/lightning/app/utilities/packaging/lightning_utils.py +++ b/src/lightning/app/utilities/packaging/lightning_utils.py @@ -52,7 +52,7 @@ def download_frontend(root: str = _PROJECT_ROOT): response = urllib.request.urlopen(LIGHTNING_FRONTEND_RELEASE_URL) # noqa: S310 file = tarfile.open(fileobj=response, mode="r|gz") - file.extractall(path=download_dir) + file.extractall(path=download_dir) # noqa: S202 shutil.move(os.path.join(download_dir, build_dir), frontend_dir) print("The Lightning UI has successfully been downloaded!") diff --git a/src/lightning/data/CHANGELOG.md b/src/lightning/data/CHANGELOG.md index e76152c358297..cb560ea1a5002 100644 --- a/src/lightning/data/CHANGELOG.md +++ b/src/lightning/data/CHANGELOG.md @@ -5,6 +5,34 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/). +## [2.1.4] - 2024-01-31 + +### Added + +- Added support for nested folders in map operator ([#19366](https://github.com/Lightning-AI/lightning/pull/19366)) +- Added support for weights to evenly distributed works among workers for map operator ([#19365](https://github.com/Lightning-AI/lightning/pull/19365)) +- Added profiling support to StreamingDataloader ([#19338](https://github.com/Lightning-AI/lightning/pull/19338)) +- Allow any AWS authentication method in studios ([#19336](https://github.com/Lightning-AI/lightning/pull/19336)) +- Added walk operator #19333 +- Added intra node shuffling to accelerate second epoch in StreamingDataset ([#19296](https://github.com/Lightning-AI/lightning/pull/19296)) +- Enabled map over inputs without files input ([#19285](https://github.com/Lightning-AI/lightning/pull/19285)) +- Added Fault Tolerance v2 ([#19196](https://github.com/Lightning-AI/lightning/pull/19196), [#19201](https://github.com/Lightning-AI/lightning/pull/19201)) + +### Changed + +- Switched map operator arguments order ([#19345](https://github.com/Lightning-AI/lightning/pull/19345)) +- Removed torch distributed for the Dataset Optimizer ([#19182](https://github.com/Lightning-AI/lightning/pull/19182)) +- Remove `__len__` from CombinedStreamingDataset ([#19321](https://github.com/Lightning-AI/lightning/pull/19321)) + +### Fixed + +- Fixed race condition in downloader ([#19348](https://github.com/Lightning-AI/lightning/pull/19348)) +- Fixed serializer `io.bytes` image in JPEGSerializer ([#19369](https://github.com/Lightning-AI/lightning/pull/19369)) +- Fixed several bugs found in Studio Data Processor ([#19309](https://github.com/Lightning-AI/lightning/pull/19309)) +- Fixed handling queue errors in streaming dataset reader ([#19167](https://github.com/Lightning-AI/lightning/pull/19167)) +- Fixed chunks eviction in StreamingDataset ([#19214](https://github.com/Lightning-AI/lightning/pull/19214)) + + ## [2.1.3] - 2023-12-21 ### Added diff --git a/src/lightning/fabric/CHANGELOG.md b/src/lightning/fabric/CHANGELOG.md index fa63512b13f82..0db98ed2f2ee2 100644 --- a/src/lightning/fabric/CHANGELOG.md +++ b/src/lightning/fabric/CHANGELOG.md @@ -5,6 +5,14 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/). +## [2.1.4] - 2024-01-31 + +### Fixed + +- Fixed an issue preventing Fabric to run on CPU when the system's CUDA driver is outdated or broken ([#19234](https://github.com/Lightning-AI/lightning/pull/19234)) +- Fixed typo in kwarg in SpikeDetection ([#19282](https://github.com/Lightning-AI/lightning/pull/19282)) + + ## [2.1.3] - 2023-12-21 ### Fixed diff --git a/src/lightning/fabric/strategies/launchers/subprocess_script.py b/src/lightning/fabric/strategies/launchers/subprocess_script.py index 5ce37e1212033..f925460162234 100644 --- a/src/lightning/fabric/strategies/launchers/subprocess_script.py +++ b/src/lightning/fabric/strategies/launchers/subprocess_script.py @@ -160,10 +160,11 @@ def _basic_subprocess_cmd() -> Sequence[str]: def _hydra_subprocess_cmd(local_rank: int) -> Tuple[Sequence[str], str]: - import __main__ # local import to avoid https://github.com/Lightning-AI/lightning/issues/15218 from hydra.core.hydra_config import HydraConfig from hydra.utils import get_original_cwd, to_absolute_path + import __main__ # local import to avoid https://github.com/Lightning-AI/lightning/issues/15218 + # when user is using hydra find the absolute path if __main__.__spec__ is None: # pragma: no-cover command = [sys.executable, to_absolute_path(sys.argv[0])] diff --git a/src/lightning/pytorch/CHANGELOG.md b/src/lightning/pytorch/CHANGELOG.md index 4506a82c0114d..cfc766a5916f5 100644 --- a/src/lightning/pytorch/CHANGELOG.md +++ b/src/lightning/pytorch/CHANGELOG.md @@ -5,6 +5,19 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/). +## [2.1.4] - 2024-01-31 + +### Fixed + +- Fixed `Trainer` not expanding the `default_root_dir` if it has the `~` (home) prefix ([#19179](https://github.com/Lightning-AI/lightning/pull/19179)) +- Fixed warning for Dataloader if `num_workers=1` and CPU count is 1 ([#19224](https://github.com/Lightning-AI/lightning/pull/19224)) +- Fixed `WandbLogger.watch()` method annotation to accept `None` for the log parameter ([#19237](https://github.com/Lightning-AI/lightning/pull/19237)) +- Fixed an issue preventing the Trainer to run on CPU when the system's CUDA driver is outdated or broken ([#19234](https://github.com/Lightning-AI/lightning/pull/19234)) +- Fixed an issue with the ModelCheckpoint callback not saving relative symlinks with `ModelCheckpoint(save_last="link")` ([#19303](https://github.com/Lightning-AI/lightning/pull/19303)) +- Fixed issue where the `_restricted_classmethod_impl` would incorrectly raise a TypeError on inspection rather than on call ([#19332](https://github.com/Lightning-AI/lightning/pull/19332)) +- Fixed exporting `__version__` in `__init__` ([#19221](https://github.com/Lightning-AI/lightning/pull/19221)) + + ## [2.1.3] - 2023-12-21 ### Changed @@ -23,9 +36,6 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/). - Fixed the tensor conversion in `self.log` to respect the default dtype ([#19046](https://github.com/Lightning-AI/lightning/issues/19046)) -- Fixed `Trainer` not expanding the `default_root_dir` if it has the `~` (home) prefix ([#19179](https://github.com/Lightning-AI/lightning/pull/19179)) - - ## [2.1.2] - 2023-11-15 ### Fixed diff --git a/src/lightning/pytorch/utilities/model_helpers.py b/src/lightning/pytorch/utilities/model_helpers.py index 928f899ddfbc7..4b455451891c5 100644 --- a/src/lightning/pytorch/utilities/model_helpers.py +++ b/src/lightning/pytorch/utilities/model_helpers.py @@ -14,7 +14,7 @@ import functools import inspect import os -from typing import TYPE_CHECKING, Any, Callable, Dict, Generic, Optional, Type, TypeVar +from typing import TYPE_CHECKING, Any, Callable, Generic, Optional, Type, TypeVar from lightning_utilities.core.imports import RequirementCache from torch import nn diff --git a/src/version.info b/src/version.info index ac2cdeba0137a..7d2ed7c702057 100644 --- a/src/version.info +++ b/src/version.info @@ -1 +1 @@ -2.1.3 +2.1.4 diff --git a/tests/tests_app/core/test_lightning_app.py b/tests/tests_app/core/test_lightning_app.py index d529b373e4df0..452e66793cf18 100644 --- a/tests/tests_app/core/test_lightning_app.py +++ b/tests/tests_app/core/test_lightning_app.py @@ -1115,7 +1115,7 @@ def __init__(self, flow): def test_cloud_compute_binding(): cloud_compute.ENABLE_MULTIPLE_WORKS_IN_NON_DEFAULT_CONTAINER = True - assert {} == cloud_compute._CLOUD_COMPUTE_STORE + assert cloud_compute._CLOUD_COMPUTE_STORE == {} flow = FlowCC() assert len(cloud_compute._CLOUD_COMPUTE_STORE) == 2 assert cloud_compute._CLOUD_COMPUTE_STORE["default"].component_names == ["root.work_c"] diff --git a/tests/tests_pytorch/strategies/test_fsdp.py b/tests/tests_pytorch/strategies/test_fsdp.py index c21c11d499492..a5fe13f505e94 100644 --- a/tests/tests_pytorch/strategies/test_fsdp.py +++ b/tests/tests_pytorch/strategies/test_fsdp.py @@ -114,7 +114,7 @@ def __init__(self, wrap_min_params: int = 2): self.save_hyperparameters() self.layer = torch.nn.Sequential(torch.nn.Linear(32, 32), torch.nn.ReLU(), torch.nn.Linear(32, 2)) - self.should_be_wrapped = [(32 * 32 + 32) > wrap_min_params, None, (32 * 2 + 2) > wrap_min_params] + self.should_be_wrapped = [wrap_min_params < (32 * 32 + 32), None, wrap_min_params < (32 * 2 + 2)] def configure_optimizers(self): parameters = self.parameters() if _TORCH_GREATER_EQUAL_2_0 else self.trainer.model.parameters() diff --git a/tests/tests_pytorch/utilities/test_model_helpers.py b/tests/tests_pytorch/utilities/test_model_helpers.py index 5d7321fe324a3..99dd814e5de87 100644 --- a/tests/tests_pytorch/utilities/test_model_helpers.py +++ b/tests/tests_pytorch/utilities/test_model_helpers.py @@ -12,7 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. import inspect -import logging import pytest from lightning.pytorch import LightningDataModule