forked from Lightning-AI/litgpt
-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
Showing
10 changed files
with
195 additions
and
45 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -2,6 +2,7 @@ __pycache__ | |
.idea | ||
.DS_Store | ||
*.egg-info | ||
build | ||
|
||
# data | ||
data | ||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file was deleted.
Oops, something went wrong.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,119 @@ | ||
import functools | ||
import subprocess | ||
import sys | ||
from contextlib import redirect_stdout | ||
from io import StringIO | ||
from pathlib import Path | ||
from unittest import mock | ||
from unittest.mock import Mock, PropertyMock, call, ANY | ||
|
||
import pytest | ||
import torch | ||
|
||
wd = Path(__file__).parent.parent.absolute() | ||
|
||
|
||
@functools.lru_cache(maxsize=1) | ||
def load_generate_script(): | ||
sys.path.append(str(wd)) | ||
|
||
import generate | ||
|
||
return generate | ||
|
||
|
||
@pytest.mark.parametrize("B", (1, 2)) | ||
def test_generate(B): | ||
generate = load_generate_script() | ||
|
||
T, C = 5, 3 | ||
logits = torch.randn(B, T, C) | ||
input_idx = torch.randint(10, size=(B, T)) | ||
|
||
model = Mock(return_value=logits) | ||
max_new_tokens = 20 | ||
|
||
multinomial_results = [] | ||
original_multinomial = torch.multinomial | ||
|
||
def multinomial(*args, **kwargs): | ||
out = original_multinomial(*args, **kwargs) | ||
multinomial_results.append(out) | ||
return out | ||
|
||
with mock.patch("torch.multinomial", multinomial): | ||
out = generate.generate(model, input_idx, max_new_tokens, max_seq_length=10) | ||
|
||
assert out.shape == (B, T + max_new_tokens) | ||
multinomial_results = torch.hstack(multinomial_results) | ||
expected = torch.cat((input_idx, multinomial_results), dim=1) | ||
assert out.shape == expected.shape | ||
torch.testing.assert_close(out, expected) | ||
|
||
|
||
def test_main(tmp_path, monkeypatch): | ||
generate = load_generate_script() | ||
|
||
checkpoint_path = tmp_path / "ckpt" | ||
checkpoint_path.touch() | ||
tokenizer_path = tmp_path / "tokenizer" | ||
tokenizer_path.touch() | ||
|
||
class FabricMock(PropertyMock): | ||
@property | ||
def device(self): | ||
return torch.device("cpu") | ||
|
||
fabric_mock = FabricMock() | ||
monkeypatch.setattr(generate.L, "Fabric", fabric_mock) | ||
model_mock = Mock() | ||
monkeypatch.setattr(generate.LLaMA, "from_name", model_mock) | ||
load_mock = Mock() | ||
monkeypatch.setattr(generate.torch, "load", load_mock) | ||
tokenizer_mock = Mock() | ||
tokenizer_mock.return_value.encode.return_value = torch.tensor([[1, 2, 3]]) | ||
tokenizer_mock.return_value.decode.return_value = "foo bar baz" | ||
monkeypatch.setattr(generate, "Tokenizer", tokenizer_mock) | ||
generate_mock = Mock() | ||
generate_mock.return_value = torch.tensor([[3, 2, 1]]) | ||
monkeypatch.setattr(generate, "generate", generate_mock) | ||
|
||
num_samples = 2 | ||
out = StringIO() | ||
with redirect_stdout(out): | ||
generate.main( | ||
checkpoint_path=checkpoint_path, | ||
tokenizer_path=tokenizer_path, | ||
model_size="1T", | ||
accelerator="litpu", | ||
temperature=2.0, | ||
top_k=2, | ||
num_samples=num_samples, | ||
) | ||
|
||
model_mock.assert_called_once_with("1T") | ||
load_mock.assert_called_once_with(checkpoint_path) | ||
tokenizer_mock.assert_called_once_with(tokenizer_path) | ||
assert len(tokenizer_mock.return_value.decode.mock_calls) == num_samples | ||
assert torch.allclose(tokenizer_mock.return_value.decode.call_args[0][0], generate_mock.return_value) | ||
model = model_mock.return_value | ||
assert fabric_mock.mock_calls == [ | ||
call(accelerator="litpu", devices=1), | ||
call().device.__enter__(), | ||
call().device.__exit__(None, None, None), | ||
call().setup_module(model), | ||
] | ||
model = fabric_mock.return_value.setup_module.return_value | ||
assert ( | ||
generate_mock.mock_calls | ||
== [call(model, ANY, 50, model.config.block_size, temperature=2.0, top_k=2)] * num_samples | ||
) | ||
# only the generated result is printed to stdout | ||
assert out.getvalue() == "foo bar baz\n" * num_samples | ||
|
||
|
||
def test_cli(): | ||
cli_path = wd / "generate.py" | ||
output = subprocess.check_output([sys.executable, cli_path, "-h"]) | ||
output = str(output.decode()) | ||
assert "Generates text samples" in output |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,23 @@ | ||
import os | ||
import subprocess | ||
import sys | ||
from pathlib import Path | ||
|
||
wd = (Path(__file__).parent.parent / "scripts").absolute() | ||
|
||
|
||
def test_prepare(tmp_path): | ||
sys.path.append(str(wd)) | ||
|
||
import prepare_shakespeare | ||
|
||
prepare_shakespeare.prepare(tmp_path) | ||
|
||
assert set(os.listdir(tmp_path)) == {"train.bin", "tokenizer.model", "tokenizer.vocab", "input.txt", "val.bin"} | ||
|
||
|
||
def test_cli(): | ||
cli_path = wd / "prepare_shakespeare.py" | ||
output = subprocess.check_output([sys.executable, cli_path, "-h"]) | ||
output = str(output.decode()) | ||
assert 'Prepare the "Tiny Shakespeare"' in output |