From 204273c15a0a36e6b663c4146a876f6fbbc90034 Mon Sep 17 00:00:00 2001 From: sudoskys Date: Fri, 24 Feb 2023 14:36:22 +0800 Subject: [PATCH] feat:docs --- README.md | 63 +++++++++++++++++------------------------------- README_ZH.md | 68 ++++++++++++++++++---------------------------------- 2 files changed, 45 insertions(+), 86 deletions(-) diff --git a/README.md b/README.md index c268a504c..f195f50e8 100644 --- a/README.md +++ b/README.md @@ -59,30 +59,29 @@ curl -LO https://raw.kgithub.com/LLMKira/Openaibot/main/setup.sh && sh setup.sh ### 🍽 Configure -- init +- set Redis ```shell -cp Config/app_exp.toml Config/app.toml - -nano Config/app.toml +apt-get install redis +systemctl enable redis.service --now ``` -- Data +- edit bot config ```shell -apt-get install redis -systemctl enable redis.service --now -``` +cp Config/app_exp.toml Config/app.toml -- Config/app.toml +nano Config/app.toml +``` ```toml # Comment out which part you don't want to start +# 注释你不需要的部分 # QQ Bot [Controller.QQ] -master = [114, 514] # master user id -account = 0 +master = [114, 514] # QQ number +account = 0 # Bot s QQ number http_host = 'http://localhost:8080' # Mirai http Server ws_host = 'http://localhost:8080' # Mirai Websocket Server verify_key = "" @@ -90,49 +89,28 @@ trigger = false # Proactive response when appropriate INTRO = "POWER BY OPENAI" # Suffixes for replies ABOUT = "Created by github.com/LLMKira/Openaibot" # /about WHITE = "Group NOT in WHITE list" # Whitelist/Blacklist tips - # Proxy set, but does not proxy openai api, only bot proxy = { status = false, url = "http://127.0.0.1:7890" } # Telegram Bot [Controller.Telegram] -master = [114, 514] # master user id +master = [114, 514] # User Id @JsonDumpBot botToken = '' # Bot Token @botfather trigger = false INTRO = "POWER BY OPENAI" ABOUT = "Created by github.com/LLMKira/Openaibot" WHITE = "Group NOT in WHITE list" - -# 设置的代理,但是不代理 openai api, 只代理 bot +# 设置的代理,只代理 bot openai api->service.json proxy = { status = false, url = "http://127.0.0.1:7890" } -# 基础对话事件服务器,Web支持或者音箱用 +# 基础对话事件服务器,Web支持或者音箱用&Use by Voice Assistant [Controller.BaseServer] host = "127.0.0.1" port = 9559 ``` -- Config/service.json - -```json5 -{ - // ....other config - - // ******Models - "backend": { - "type": "openai", - // TYPE! - "openai": { - "model": "text-davinci-003", - "token_limit": 4000 - }, - "chatgpt": { - "api": null, - "agree": false - } - }, -} -``` +If you want configure the backend or openai proxy. Please +Check [Deploy Docs](https://llmkira.github.io/Docs/guide/service) ### 🪶 App Token @@ -162,16 +140,19 @@ python3 main.py # run bot pm2 start pm.json -# monitor bot status -pm2 monit + pm2 status # stop bot pm2 stop pm2.json -pm2 stop [id] - +pm2 stop xx(id) +pm2 restart x(id) ``` +Once configured, send a message and use the `/add_white_user` command to add your platform ID returned by the bot to the +whitelist and you will be able to talk. +Or use `/close_group_white_mode` to turn off the bot's *group whitelist* mode. + ### 🎤 Or Run Voice Assistant In addition to the robot, we also have a voice assistant. diff --git a/README_ZH.md b/README_ZH.md index b651fff6e..4570a0768 100644 --- a/README_ZH.md +++ b/README_ZH.md @@ -49,7 +49,7 @@ curl -LO https://raw.githubusercontent.com/LLMKira/Openaibot/main/setup.sh && sh setup.sh ``` -给中国用户 +连不上 Github 请使用下面的镜像 ```shell curl -LO https://raw.kgithub.com/LLMKira/Openaibot/main/setup.sh && sh setup.sh @@ -57,15 +57,7 @@ curl -LO https://raw.kgithub.com/LLMKira/Openaibot/main/setup.sh && sh setup.sh ### 🍽 Configure -- 初始化 - -```shell -cp Config/app_exp.toml Config/app.toml - -nano Config/app.toml -``` - -- 数据 +- 配置数据服务器 ```shell apt-get install redis @@ -74,13 +66,19 @@ systemctl start redis.service - 配置/app.toml +```shell +cp Config/app_exp.toml Config/app.toml + +nano Config/app.toml +``` + ```toml # Comment out which part you don't want to start # QQ Bot [Controller.QQ] -master = [114, 514] # master user id -account = 0 +master = [114, 514] # 你的QQ号码 +account = 0 # 机器人的 QQ 号码 http_host = 'http://localhost:8080' # Mirai http Server ws_host = 'http://localhost:8080' # Mirai Websocket Server verify_key = "" @@ -88,19 +86,17 @@ trigger = false # Proactive response when appropriate INTRO = "POWER BY OPENAI" # Suffixes for replies ABOUT = "Created by github.com/LLMKira/Openaibot" # /about WHITE = "Group NOT in WHITE list" # Whitelist/Blacklist tips - # Proxy set, but does not proxy openai api, only bot proxy = { status = false, url = "http://127.0.0.1:7890" } # Telegram Bot [Controller.Telegram] -master = [114, 514] # master user id +master = [114, 514] # 你的用户 ID 找 @JsonDumpBot 看 message from id botToken = '' # Bot Token @botfather trigger = false INTRO = "POWER BY OPENAI" ABOUT = "Created by github.com/LLMKira/Openaibot" WHITE = "Group NOT in WHITE list" - # 设置的代理,但是不代理 openai api, 只代理 bot proxy = { status = false, url = "http://127.0.0.1:7890" } @@ -110,27 +106,7 @@ host = "127.0.0.1" port = 9559 ``` -- Config/service.json - -```json5 -{ - // ....other config - - // ******Models - "backend": { - "type": "openai", - // TYPE! - "openai": { - "model": "text-davinci-003", - "token_limit": 4000 - }, - "chatgpt": { - "api": null, - "agree": false - } - }, -} -``` +如果你想要配置其他模型或者配置 OpenaiApi 的代理,请查看 [Deploy Docs](https://llmkira.github.io/Docs/guide/service) ### 🪶 App Token @@ -146,7 +122,7 @@ port = 9559 ### 🌻 Run Bot -我们的机器人可以多线程运行 +我们的机器人可以多进程运行 ```shell apt install npm @@ -159,20 +135,22 @@ python3 main.py # run bot pm2 start pm.json -``` - -查看机器人的运行状况 -``` -pm2 monit -``` +# 查看机器人的运行状况 -停止运行机器人 +pm2 status -``` +# 停止 pm2 stop pm2.json + +# 重启 +pm2 restart 任务id + ``` +配置后,发一条消息,使用 `/add_white_user` 命令加入机器人返回的你的平台ID到白名单,就可以对话啦。 +或者使用 `/close_group_white_mode` 关闭机器人的 *群组白名单* 模式。 + ### 🎤 Or Run Voice Assistant 除了机器人,我们还有语音助手.