Skip to content

Commit

Permalink
Introducing Llama Stack distributions (meta-llama#37)
Browse files Browse the repository at this point in the history
* A single distro url to rule them all

* fix typo

* gut out the llama_agentic_system implementation and move it to toolchain

there is `llama_agentic_system` sub-directory anymore

* custom tool naming

* updated var names to refer to distribution; updated requirements

* make test run consistently

* Updated README

* Remove FP8 section, since it is rolled in now

* Show safety configuration

* Shorter subtitle lol

* kill the dev dependencies

* added ollama distro details to readme

* Clarify Ollama instructions

---------

Co-authored-by: Hardik Shah <[email protected]>
Co-authored-by: dltn <[email protected]>
  • Loading branch information
3 people authored Aug 8, 2024
1 parent ce0c902 commit afa0630
Show file tree
Hide file tree
Showing 46 changed files with 290 additions and 3,494 deletions.
1 change: 0 additions & 1 deletion MANIFEST.in
Original file line number Diff line number Diff line change
@@ -1,2 +1 @@
include requirements.txt
include llama_agentic_system/data/*.yaml
411 changes: 243 additions & 168 deletions README.md

Large diffs are not rendered by default.

6 changes: 3 additions & 3 deletions app/chat_moderation_with_llama_guard.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,15 +8,15 @@
from utils.chat import chat, State
from utils.client import ClientManager

from utils.common import DISABLE_SAFETY, INFERENCE_HOST, INFERENCE_PORT, on_attach
from utils.common import DISABLE_SAFETY, DISTRIBUTION_HOST, DISTRIBUTION_PORT, on_attach

from utils.transform import transform


client_manager = ClientManager()
client_manager.init_client(
inference_port=INFERENCE_PORT,
host=INFERENCE_HOST,
inference_port=DISTRIBUTION_PORT,
host=DISTRIBUTION_HOST,
custom_tools=[],
disable_safety=DISABLE_SAFETY,
)
Expand Down
6 changes: 3 additions & 3 deletions app/chat_with_custom_tools.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,14 +9,14 @@
from examples.custom_tools.ticker_data import TickerDataTool
from utils.chat import chat, State
from utils.client import ClientManager
from utils.common import DISABLE_SAFETY, INFERENCE_HOST, INFERENCE_PORT, on_attach
from utils.common import DISABLE_SAFETY, DISTRIBUTION_HOST, DISTRIBUTION_PORT, on_attach
from utils.transform import transform


client_manager = ClientManager()
client_manager.init_client(
inference_port=INFERENCE_PORT,
host=INFERENCE_HOST,
inference_port=DISTRIBUTION_PORT,
host=DISTRIBUTION_HOST,
custom_tools=[TickerDataTool()],
disable_safety=DISABLE_SAFETY,
)
Expand Down
6 changes: 3 additions & 3 deletions app/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,14 +8,14 @@

from utils.chat import chat, State
from utils.client import ClientManager
from utils.common import DISABLE_SAFETY, INFERENCE_HOST, INFERENCE_PORT, on_attach
from utils.common import DISABLE_SAFETY, DISTRIBUTION_HOST, DISTRIBUTION_PORT, on_attach
from utils.transform import transform


client_manager = ClientManager()
client_manager.init_client(
inference_port=INFERENCE_PORT,
host=INFERENCE_HOST,
inference_port=DISTRIBUTION_PORT,
host=DISTRIBUTION_HOST,
custom_tools=[],
disable_safety=DISABLE_SAFETY,
)
Expand Down
4 changes: 2 additions & 2 deletions app/utils/chat.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@

from llama_toolchain.inference.api import * # noqa: F403
from dotenv import load_dotenv
from llama_agentic_system.api.datatypes import StepType
from llama_toolchain.agentic_system.api.datatypes import StepType
from llama_toolchain.safety.api.datatypes import ShieldResponse

MAX_VIOLATIONS = 3
Expand Down Expand Up @@ -431,7 +431,7 @@ def render_tool(op_uuid: str, op: RenderableOutputType):
key=f"{len(state.output)}",
style=me.Style(
color=_COLOR_BUTTON,
)
),
)

me.slide_toggle(label="Debug Mode", on_change=on_debug_mode_change)
2 changes: 1 addition & 1 deletion app/utils/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@

import asyncio

from llama_agentic_system.utils import get_agent_system_instance
from llama_toolchain.agentic_system.utils import get_agent_system_instance

global CLIENT
CLIENT = None
Expand Down
4 changes: 2 additions & 2 deletions app/utils/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,8 +22,8 @@

UPLOADS_DIR = "app/uploads/"
CHUNK_SIZE = 1024
INFERENCE_PORT = os.environ.get("INFERENCE_PORT", 5000)
INFERENCE_HOST = os.environ.get("INFERENCE_HOST", "localhost")
DISTRIBUTION_PORT = os.environ.get("DISTRIBUTION_PORT", 5000)
DISTRIBUTION_HOST = os.environ.get("DISTRIBUTION_HOST", "localhost")
DISABLE_SAFETY = bool(int(os.environ.get("DISABLE_SAFETY", "0")))


Expand Down
7 changes: 2 additions & 5 deletions app/utils/transform.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,8 @@
import uuid

import mesop as me
from llama_agentic_system.api.datatypes import (
AgenticSystemTurnResponseEventType,
from llama_toolchain.agentic_system.api import (
AgenticSystemTurnResponseEventType as EventType,
StepType,
)

Expand All @@ -24,9 +24,6 @@
EVENT_LOOP = asyncio.new_event_loop()


EventType = AgenticSystemTurnResponseEventType


def transform(content: InterleavedTextAttachment):
state = me.state(State)

Expand Down
4 changes: 3 additions & 1 deletion examples/custom_tools/ticker_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,10 @@
from typing import Dict

import yfinance as yf
from llama_agentic_system.tools.custom import SingleMessageCustomTool
from llama_models.llama3_1.api.datatypes import ToolParamDefinition
from llama_toolchain.agentic_system.tools.custom.datatypes import (
SingleMessageCustomTool,
)


class TickerDataTool(SingleMessageCustomTool):
Expand Down
2 changes: 1 addition & 1 deletion examples/notebooks/Cybersecurity_demo.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@
"metadata": {},
"outputs": [],
"source": [
"from llama_agentic_system.import CodeInterpreterTool, with_safety\n",
"from llama_toolchain.agentic_system import CodeInterpreterTool, with_safety\n",
"\n",
"from llama_models.llama3_1.api import Message\n",
"\n",
Expand Down
2 changes: 1 addition & 1 deletion examples/notebooks/LlamaGuard.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@
"\n",
"from llama_toolchain.spec import *\n",
"\n",
"from llama_agentic_system.utils import get_agent_system_instance\n",
"from llama_toolchain.agentic_system.utils import get_agent_system_instance\n",
"\n",
"\n",
"async def get_assistant(host: str, port: int, disable_safety: bool = False):\n",
Expand Down
14 changes: 8 additions & 6 deletions examples/scripts/multi_turn.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,14 +10,16 @@

from dotenv import load_dotenv

from llama_agentic_system.client import execute_with_custom_tools
from llama_agentic_system.event_logger import EventLogger
from llama_models.llama3_1.api.datatypes import * # noqa: F403

from llama_agentic_system.tools.custom import CustomTool
from llama_agentic_system.utils import get_agent_system_instance
from llama_toolchain.agentic_system.event_logger import EventLogger
from llama_toolchain.agentic_system.tools.custom.datatypes import CustomTool

from llama_models.llama3_1.api.datatypes import * # noqa: F403
from llama_agentic_system.api import * # noqa: F403
from llama_toolchain.agentic_system.tools.custom.execute import (
execute_with_custom_tools,
)
from llama_toolchain.agentic_system.utils import get_agent_system_instance
from llama_toolchain.agentic_system.api import * # noqa: F403

from termcolor import cprint

Expand Down
5 changes: 0 additions & 5 deletions llama_agentic_system/__init__.py

This file was deleted.

Loading

0 comments on commit afa0630

Please sign in to comment.