Skip to content

Commit

Permalink
Fix r2ai_singleton, add r2.ai() method and update funcall example
Browse files Browse the repository at this point in the history
  • Loading branch information
radare committed May 24, 2024
1 parent 9a8996a commit 3b505d6
Show file tree
Hide file tree
Showing 4 changed files with 87 additions and 50 deletions.
57 changes: 31 additions & 26 deletions examples/funcall.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,3 @@
from llama_cpp import LlamaGrammar, Llama




# Define a python function and parse it into a grammar
def get_current_weather( location, unit, source):
Expand Down Expand Up @@ -83,27 +79,36 @@ def get_current_weather( location, unit, source):
}]
<</SYS>> [/INST]
[INST]
{
'''
leprompt += f' "prompt": "{question}"'
leprompt += r'''
}
[/INST]
'''
###[INST]
###{
###'''
###leprompt += f' "prompt": "{question}"'
###leprompt += r'''
###}
###[/INST]
###'''


p = leprompt.replace("\n", "")
# print(p)
r2.ai(f"-r {p}")
# print(question)
# r2.ai(question)

#model_name = "llama-2-7b-chat-codeCherryPop.Q5_K_M.gguf"
model_name = "mistral-7b-instruct-v0.1.Q2_K.gguf"
# model_name = "dolphin-2_6-phi-2.Q5_K_M.gguf"
# model_name = "codellama-7b-instruct.Q4_K_M.gguf"
# model_name = "codellama-34b-instruct.Q4_K_M.gguf"
# model_name = "Wizard-Vicuna-7B-Uncensored.Q2_K.gguf"
model_path = f"/Users/pancake/Library/Application Support/r2ai/models/{model_name}"
# grammar = SchemaConverter.from_function(get_current_weather)
llm = Llama(model_path, max_tokens=4096, n_ctx=4096, max_length=4096, verbose=False, temperature=0.04) # , top_p=0)
print(leprompt)
# print(llm(prompt="### User: What is the weather in London today? ### Assistant:")["choices"][0]["text"])
res = llm(prompt=leprompt)
# print(res)
print(res["choices"][0]["text"])
# print(llm(prompt=leprompt)["choices"])
def old():
#model_name = "llama-2-7b-chat-codeCherryPop.Q5_K_M.gguf"
model_name = "mistral-7b-instruct-v0.1.Q2_K.gguf"
# model_name = "dolphin-2_6-phi-2.Q5_K_M.gguf"
# model_name = "codellama-7b-instruct.Q4_K_M.gguf"
# model_name = "codellama-34b-instruct.Q4_K_M.gguf"
# model_name = "Wizard-Vicuna-7B-Uncensored.Q2_K.gguf"
model_path = f"/Users/pancake/Library/Application Support/r2ai/models/{model_name}"
# grammar = SchemaConverter.from_function(get_current_weather)
llm = Llama(model_path, max_tokens=4096, n_ctx=4096, max_length=4096, verbose=False, temperature=0.04) # , top_p=0)
print(leprompt)
# print(llm(prompt="### User: What is the weather in London today? ### Assistant:")["choices"][0]["text"])
res = llm(prompt=leprompt)
# print(res)
print(res["choices"][0]["text"])
# print(llm(prompt=leprompt)["choices"])
8 changes: 5 additions & 3 deletions r2ai/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
import time
import builtins
import traceback
from r2ai.repl import r2ai_singleton

os.environ["TOKENIZERS_PARALLELISM"] = "false"

Expand Down Expand Up @@ -93,13 +94,14 @@ def run_rcfile():
for line in lines.split("\n"):
if line.strip() != "":
if ai is None:
ai = Interpreter()
ai = r2ai_singleton() # Interpreter()
runline(ai, line)
except:
pass
if ai is None:
from r2ai.interpreter import Interpreter
ai = Interpreter()
ai = r2ai_singleton() # Interpreter()
# from r2ai.interpreter import Interpreter
# ai = Interpreter()

rcfile_loaded = False
def run_rcfile_once():
Expand Down
12 changes: 12 additions & 0 deletions r2ai/pipe.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,21 @@
import traceback
have_rlang = False
r2lang = None

class FakeLang:
def __init__(self, r2):
self.r2 = r2
def ai(self, x):
try:
from r2ai.repl import r2ai_singleton, runline2
ai = r2ai_singleton()
if ai is None:
print("No global r2ai instance found")
return ""
return runline2(ai, x)
except:
traceback.print_exc()
return None
def cmd(self, x):
r = self.r2.cmd(x)
return r
Expand Down
60 changes: 39 additions & 21 deletions r2ai/repl.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,11 +14,19 @@
tab_init()

print_buffer = ""
ais = {}
ais = []
autoai = None
from .pipe import have_rlang, r2lang, r2singleton
r2 = r2singleton()

def r2ai_singleton():
global ais
if len(ais) == 0:
from r2ai.interpreter import Interpreter
ai = Interpreter()
ais.append(R2AI(ai))
return ais[0].ai

def r2_cmd(x):
global have_rlang, ai, r2, r2_file
have_rlang = True
Expand Down Expand Up @@ -71,7 +79,7 @@ def r2_cmd(x):
def myprint(msg, file=None):
global print_buffer
builtins.print(msg)
print_buffer += msg
print_buffer += str(msg)

def runline2(ai, usertext):
global print
Expand Down Expand Up @@ -140,11 +148,17 @@ def run_script(ai, script):
except:
pass

class R2AI:
def __init__(self,ai):
self.ai = ai
def cmd(x):
return runline2(self.ai, cmd)

def runline(ai, usertext):
global print
global autoai
if ai == None:
ai = ais[0];
ai = ais[0].ai
usertext = usertext.strip()
if usertext == "" or usertext.startswith("#"):
return
Expand Down Expand Up @@ -325,18 +339,23 @@ def runline(ai, usertext):
que = input("[Query]> ")
ai.chat(res)
elif usertext.startswith("-n"):
if len(ais.keys()) == 0:
ais[0] = ai
if len(ais) == 0:
ais.append(R2AI(ai))
if usertext == "-n":
for a in ais.keys():
model = ais[a].model
print(f"{a} - {model}")
pos = 0
for a in ais:
model = a.ai.model
print(f"{pos} - {model}")
pos += 1
else:
index = int(usertext[2:])
if index not in ais:
ais[index] = r2ai.Interpreter()
ais[index].model = ai.model
ai = ais[index]
if index < len(ais):
ai = ais[index].ai
else:
from r2ai.interpreter import Interpreter
ai0 = Interpreter()
ai0.model = ai.model
ais.append(R2AI(ai0))
elif usertext.startswith("-c"):
words = usertext[2:].strip().split(" ", 1)
res = r2_cmd(words[0])
Expand All @@ -354,20 +373,19 @@ def runline(ai, usertext):
elif usertext[0] == "!":
os.system(usertext[1:])
elif usertext[0] == ".":
if len(usertext) > 1 and usertext[1] == ".": # ".." - run user plugins
runplugin(ai, usertext[2:].strip())
return
#if len(usertext) > 1 and usertext[1] == ".": # ".." - run user plugins
# runplugin(ai, usertext[2:].strip())
# return
try:
filename = usertext[1:].strip()
file = slurp(filename)
if filename.endswith(".py"):
exec(file, globals())
exec(file, globals())
else:
for line in file.split("\n"):
runline(ai, line)
for line in file.split("\n"):
runline(ai, line)
except Exception as e:
# traceback.print_exc()
print(e)
traceback.print_exc()
pass
elif usertext.startswith("' "):
if not autoai:
Expand All @@ -378,7 +396,7 @@ def runline(ai, usertext):
if r2 is None:
print("r2 is not available", file=sys.stderr)
else:
print(r2_cmd(usertext[1:]))
builtins.print(r2_cmd(usertext[1:]))
elif usertext.startswith("-"):
print("Unknown flag. See 'r2ai -h' for help", file=sys.stderr)
else:
Expand Down

0 comments on commit 3b505d6

Please sign in to comment.