forked from sinaptik-ai/pandas-ai
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathlangchain.py
60 lines (41 loc) · 1.57 KB
/
langchain.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
from __future__ import annotations
try:
from langchain_core.language_models import BaseLanguageModel
from langchain_core.language_models.chat_models import BaseChatModel
except ImportError:
from unittest.mock import Mock
# Fallback definitions if langchain_core is not installed
BaseLanguageModel = BaseChatModel = Mock
from typing import TYPE_CHECKING
from pandasai.prompts.base import BasePrompt
from .base import LLM
if TYPE_CHECKING:
from pandasai.pipelines.pipeline_context import PipelineContext
"""Langchain LLM
This module is to run LLM using LangChain framework.
Example:
Use below example to call LLM
>>> from pandasai.llm.langchain import LangchainLLm
"""
def is_langchain_llm(llm) -> bool:
return isinstance(llm, BaseLanguageModel)
class LangchainLLM(LLM):
"""
Class to wrap Langchain LLMs and make PandasAI interoperable
with LangChain.
"""
langchain_llm: BaseLanguageModel
def __init__(self, langchain_llm: BaseLanguageModel):
self.langchain_llm = langchain_llm
def call(
self, instruction: BasePrompt, context: PipelineContext = None, suffix: str = ""
) -> str:
prompt = instruction.to_string() + suffix
memory = context.memory if context else None
prompt = self.prepend_system_prompt(prompt, memory)
self.last_prompt = prompt
res = self.langchain_llm.invoke(prompt)
return res.content if isinstance(self.langchain_llm, BaseChatModel) else res
@property
def type(self) -> str:
return f"langchain_{self.langchain_llm._llm_type}"