Skip to content

Commit

Permalink
Removed import position
Browse files Browse the repository at this point in the history
  • Loading branch information
AbdulDridi committed Aug 2, 2024
1 parent ac94c15 commit 74ec6af
Show file tree
Hide file tree
Showing 2 changed files with 13 additions and 16 deletions.
28 changes: 13 additions & 15 deletions dsp/modules/llama.py
Original file line number Diff line number Diff line change
@@ -1,30 +1,28 @@
try:
import llama_cpp
from llama_cpp import Llama
except ImportError as exc:
raise ModuleNotFoundError(
"""You need to install llama_cpp library to use gguf models.
CPU - pip install llama-cpp-python
CUDA - pip install llama-cpp-python --extra-index-url https://abetlen.github.io/llama-cpp-python/whl/<cuda-version> e.g. cu121 for CUDA 12.1
METAL(Mac) - pip install llama-cpp-python --extra-index-url https://abetlen.github.io/llama-cpp-python/whl/metal
others: https://pypi.org/project/llama-cpp-python/""",
) from exc

from typing import Any, Literal

from dsp.modules.lm import LM


class LlamaCpp(LM):
def __init__(
self,
model: str, # "llama" or the actual model name
llama_model: Llama,
llama_model: Any = None,
model_type: Literal["chat", "text"] = None,
**kwargs,
):
super().__init__(model)

try:
import llama_cpp
from llama_cpp import Llama
except ImportError as exc:
raise ModuleNotFoundError(
"""You need to install the llama_cpp library to use gguf models.
CPU - pip install llama-cpp-python
CUDA - pip install llama-cpp-python --extra-index-url https://abetlen.github.io/llama-cpp-python/whl/<cuda-version> e.g. cu121 for CUDA 12.1
METAL(Mac) - pip install llama-cpp-python --extra-index-url https://abetlen.github.io/llama-cpp-python/whl/metal
others: https://pypi.org/project/llama-cpp-python/""",
) from exc

default_model_type = "text"
self.model_type = model_type if model_type else default_model_type
self.provider = "llama"
Expand Down
1 change: 0 additions & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -126,7 +126,6 @@ pgvector = { version = "^0.2.5", optional = true }
structlog = "^24.1.0"
llama-index = {version = "^0.10.30", optional = true}
snowflake-snowpark-python = { version = "*",optional=true, python = ">=3.9,<3.12" }
llama-cpp-python = { version = "^0.2.82", optional = true }
jinja2 = "^3.1.3"


Expand Down

0 comments on commit 74ec6af

Please sign in to comment.