Skip to content

Commit

Permalink
Downgrade ligthning to before bitsandbytes upgrade (Lightning-AI#1104)
Browse files Browse the repository at this point in the history
  • Loading branch information
carmocca authored and awaelchli committed Mar 15, 2024
1 parent c80d260 commit e120da1
Show file tree
Hide file tree
Showing 3 changed files with 7 additions and 16 deletions.
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ license = { file = "LICENSE" }

dependencies = [
"torch>=2.2.0",
"lightning @ git+https://github.com/Lightning-AI/lightning@f23b3b1e7fdab1d325f79f69a28706d33144f27e",
"lightning @ git+https://github.com/Lightning-AI/lightning@b19c3a961c79028d7c39a4f1ff1c2df991406d1d",
# TODO: install from PyPI when https://github.com/omni-us/jsonargparse/pull/466 is released
"jsonargparse[signatures] @ git+https://github.com/omni-us/jsonargparse",
]
Expand Down
17 changes: 4 additions & 13 deletions tests/test_ci.py
Original file line number Diff line number Diff line change
@@ -1,19 +1,10 @@
# Copyright Lightning AI. Licensed under the Apache License 2.0, see LICENSE file.

# this file is just to validate on the CI logs that these tests were run
from lightning.fabric.plugins.precision.bitsandbytes import _BITSANDBYTES_AVAILABLE

from conftest import RunIf


@RunIf(min_cuda_gpus=1)
def test_runif_min_cuda_gpus():
assert True


@RunIf(min_cuda_gpus=1, standalone=True)
def test_runif_min_cuda_gpus_standalone():
assert True


@RunIf(standalone=True)
def test_runif_standalone():
assert True
def test_gpu_ci_installs_bitsandbytes():
assert _BITSANDBYTES_AVAILABLE, str(_BITSANDBYTES_AVAILABLE)
4 changes: 2 additions & 2 deletions tests/test_lora.py
Original file line number Diff line number Diff line change
Expand Up @@ -689,8 +689,8 @@ def test_lora_bitsandbytes(monkeypatch, tmp_path, fake_checkpoint_dir, alpaca_pa
},
}

assert {p.name for p in tmp_path.rglob("*.pth")} == {"lit_model.pth"}
state_dict = torch.load(tmp_path / "final" / "lit_model.pth")
assert {p.name for p in tmp_path.rglob("*.lora")} == {"lit_model.pth.lora"}
state_dict = torch.load(tmp_path / "final" / "lit_model.pth.lora")
assert len(state_dict) == 1
dtype_to_name = {"torch.float16": set()}
for name, layer in state_dict["model"].items():
Expand Down

0 comments on commit e120da1

Please sign in to comment.