Skip to content

Commit

Permalink
[Fix] Fix a bug when module is missing in low version of bitsandbytes (
Browse files Browse the repository at this point in the history
  • Loading branch information
Ben-Louis authored Oct 31, 2023
1 parent b0c701a commit e0cf958
Showing 1 changed file with 12 additions and 10 deletions.
22 changes: 12 additions & 10 deletions mmengine/optim/optimizer/builder.py
Original file line number Diff line number Diff line change
Expand Up @@ -132,6 +132,10 @@ def register_sophia_optimizers() -> List[str]:
def register_bitsandbytes_optimizers() -> List[str]:
"""Register optimizers in ``bitsandbytes`` to the ``OPTIMIZERS`` registry.
In the `bitsandbytes` library, optimizers that have the same name as the
default optimizers in PyTorch are prefixed with ``bnb_``. For example,
``bnb_Adagrad``.
Returns:
List[str]: A list of registered optimizers' name.
"""
Expand All @@ -141,16 +145,14 @@ def register_bitsandbytes_optimizers() -> List[str]:
except ImportError:
pass
else:
for module_name in [
'AdamW8bit', 'Adam8bit', 'Adagrad8bit', 'PagedAdam8bit',
'PagedAdamW8bit', 'LAMB8bit', 'LARS8bit', 'RMSprop8bit',
'Lion8bit', 'PagedLion8bit', 'SGD8bit'
]:
_optim = getattr(bnb.optim, module_name)
if inspect.isclass(_optim) and issubclass(_optim,
torch.optim.Optimizer):
OPTIMIZERS.register_module(module=_optim)
dadaptation_optimizers.append(module_name)
optim_classes = inspect.getmembers(
bnb.optim, lambda _optim: (inspect.isclass(_optim) and issubclass(
_optim, torch.optim.Optimizer)))
for name, optim_cls in optim_classes:
if name in OPTIMIZERS:
name = f'bnb_{name}'
OPTIMIZERS.register_module(module=optim_cls, name=name)
dadaptation_optimizers.append(name)
return dadaptation_optimizers


Expand Down

0 comments on commit e0cf958

Please sign in to comment.