Skip to content

Commit

Permalink
Merge pull request guangzhengli#29 from guangzhengli/dev
Browse files Browse the repository at this point in the history
Add file size valid and good example
  • Loading branch information
guangzhengli authored Apr 11, 2023
2 parents 59164bd + d1ccaa2 commit eef42c7
Show file tree
Hide file tree
Showing 20 changed files with 532 additions and 17 deletions.
47 changes: 47 additions & 0 deletions chatfiles-ui/.dockerignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
# flyctl launch added from .gitignore
# See https://help.github.com/articles/ignoring-files/ for more about ignoring files.

# dependencies
node_modules
.pnp
**/.pnp.js

# testing
coverage

# next.js
.next
out
dist

# production
build

# misc
**/.DS_Store
**/*.pem

# debug
**/npm-debug.log*
**/yarn-debug.log*
**/yarn-error.log*
**/.pnpm-debug.log*

# local env files
**/.env*.local

# vercel
**/.vercel

# typescript
**/*.tsbuildinfo
**/next-env.d.ts
**/.idea

# flyctl launch added from .idea/.gitignore
# Default ignored files
.idea/shelf
.idea/workspace.xml
# Editor-based HTTP Client requests
.idea/httpRequests
fly.toml
44 changes: 43 additions & 1 deletion chatfiles-ui/components/Chat/Chat.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,8 @@ import {ChatMessage} from './ChatMessage';
import {ErrorMessageDiv} from './ErrorMessageDiv';
import {ModelSelect} from './ModelSelect';
import {Upload} from "@/components/Chat/Upload";
import {CHAT_FILES_MAX_SIZE} from "@/utils/app/const";
import {humanFileSize} from "@/utils/app/files";

interface Props {
conversation: Conversation;
Expand Down Expand Up @@ -47,6 +49,8 @@ export const Chat: FC<Props> = memo(
const [autoScrollEnabled, setAutoScrollEnabled] = useState(true);
const [showSettings, setShowSettings] = useState<boolean>(false);
const [isUploading, setIsUploading] = useState<boolean>(false);
const [errorMsg, setErrorMsg] = useState<string>();
const [isUploadSuccess, setIsUploadSuccess] = useState(true);

const messagesEndRef = useRef<HTMLDivElement>(null);
const chatContainerRef = useRef<HTMLDivElement>(null);
Expand All @@ -60,6 +64,14 @@ export const Chat: FC<Props> = memo(
setIsUploading(isUploading);
}

const handleIsUploadSuccess = (isUploadSuccess: boolean) => {
setIsUploadSuccess(isUploadSuccess);
}

const handleUploadError = (errorMsg: string) => {
setErrorMsg(errorMsg);
}

const onClearAll = () => {
if (confirm(t<string>('Are you sure you want to clear all messages?'))) {
onUpdateConversation(conversation, { key: 'messages', value: [] });
Expand Down Expand Up @@ -138,13 +150,43 @@ export const Chat: FC<Props> = memo(
>
{(conversation.index?.indexName.length === 0) && (conversation.messages.length === 0) ? (
<>
{!isUploadSuccess ? (
<>
<div id="alert-2" className="flex p-4 mb-4 text-red-800 rounded-lg bg-red-50 dark:bg-gray-800 dark:text-red-400" role="alert">
<svg aria-hidden="true" className="flex-shrink-0 w-5 h-5" fill="currentColor" viewBox="0 0 20 20" xmlns="http://www.w3.org/2000/svg"><path fillRule="evenodd" d="M18 10a8 8 0 11-16 0 8 8 0 0116 0zm-7-4a1 1 0 11-2 0 1 1 0 012 0zM9 9a1 1 0 000 2v3a1 1 0 001 1h1a1 1 0 100-2v-3a1 1 0 00-1-1H9z" clipRule="evenodd"></path></svg>
<span className="sr-only">Error</span>
<div className="ml-3 text-sm font-medium">
{errorMsg}.
</div>
<button type="button" onClick={() => handleIsUploadSuccess(true)} className="ml-auto -mx-1.5 -my-1.5 bg-red-50 text-red-500 rounded-lg focus:ring-2 focus:ring-red-400 p-1.5 hover:bg-red-200 inline-flex h-8 w-8 dark:bg-gray-800 dark:text-red-400 dark:hover:bg-gray-700" data-dismiss-target="#alert-2" aria-label="Close">
<span className="sr-only">Close</span>
<svg className="w-5 h-5" fill="currentColor" viewBox="0 0 20 20" xmlns="http://www.w3.org/2000/svg"><path fillRule="evenodd" d="M4.293 4.293a1 1 0 011.414 0L10 8.586l4.293-4.293a1 1 0 111.414 1.414L11.414 10l4.293 4.293a1 1 0 01-1.414 1.414L10 11.414l-4.293 4.293a1 1 0 01-1.414-1.414L8.586 10 4.293 5.707a1 1 0 010-1.414z" clipRule="evenodd"></path></svg>
</button>
</div>
</>
): undefined}

<div className="mx-auto flex w-[350px] flex-col space-y-10 pt-12 sm:w-[600px]">
<div className="flex h-full flex-col space-y-4 rounded border border-neutral-200 p-4 dark:border-neutral-600">
<Upload onIndexChange={(index) =>
onUpdateConversation(conversation, {
key: 'index',
value: index,})}
handleIsUploading={handleIsUploading}/>
handleIsUploading={handleIsUploading}
handleIsUploadSuccess={handleIsUploadSuccess}
handleUploadError={handleUploadError}
/>
{CHAT_FILES_MAX_SIZE != 0 &&
<>
<p className="mt-2 px-8 text-xs text-gray-500 dark:text-gray-400">This environment is only for trial and supports a maximum file size of {humanFileSize(CHAT_FILES_MAX_SIZE)}.</p>
<p className="mt-2 px-8 text-xs text-gray-500 dark:text-gray-400">Here are some good starting questions:
<a className="text-xs text-gray-500 dark:text-gray-400 underline" href="https://github.com/guangzhengli/ChatFiles/blob/dev/doc/Example.md"> Good Examples .</a>
</p>
<p className="mt-2 px-8 text-xs text-gray-500 dark:text-gray-400">If you need to upload larger files, please deploy your own chatfiles by:
<a className="text-xs text-gray-500 dark:text-gray-400 underline" href="https://github.com/guangzhengli/ChatFiles"> Chatfiles .</a>
</p>
</>
}
</div>
</div>
{isUploading ? (
Expand Down
14 changes: 14 additions & 0 deletions chatfiles-ui/components/Chat/ChatInput.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -155,6 +155,20 @@ export const ChatInput: FC<Props> = ({
</button>
</div>
</div>
<div className="px-3 pt-2 pb-3 text-center text-[12px] text-black/50 dark:text-white/50 md:px-4 md:pt-3 md:pb-6">
<a
href="https://github.com/guangzhengli/ChatFiles"
target="_blank"
rel="noreferrer"
className="underline"
>
ChatFiles
</a>
{' '}
{t(
"aims to establish embeddings for ChatGPT and facilitate its ability to engage in document-based conversations.",
)}
</div>
</div>
);
};
26 changes: 23 additions & 3 deletions chatfiles-ui/components/Chat/Upload.tsx
Original file line number Diff line number Diff line change
@@ -1,12 +1,21 @@
import {LlamaIndex} from "@/types";
import { CHAT_FILES_MAX_SIZE } from "@/utils/app/const";
import {humanFileSize} from "@/utils/app/files";

interface Props {
onIndexChange: (index: LlamaIndex) => void;
handleIsUploading: (isUploading: boolean) => void;
handleIsUploadSuccess: (isUploadSuccess: boolean) => void;
handleUploadError: (error: string) => void;
}
export const Upload = ({onIndexChange, handleIsUploading}: Props) => {
export const Upload = ({onIndexChange, handleIsUploading, handleIsUploadSuccess, handleUploadError}: Props) => {

const handleFile = async (file: File) => {
if (!validateFile(file)) {
handleIsUploadSuccess(false);
return;
}

handleIsUploading(true);

try {
Expand All @@ -23,13 +32,24 @@ export const Upload = ({onIndexChange, handleIsUploading}: Props) => {
console.log("import file index json name:", indexName)
onIndexChange({ indexName: indexName });
handleIsUploading(false);
handleIsUploadSuccess(true)
} catch (e) {
console.error(e);
handleUploadError((e as Error).message);
handleIsUploading(false);
handleIsUploadSuccess(false)
}
};


const validateFile = (file: File) => {
console.log(`select a file size: ${humanFileSize(file.size)}`);
console.log(`file max size: ${humanFileSize(CHAT_FILES_MAX_SIZE)}`);
if (CHAT_FILES_MAX_SIZE != 0 && file.size > CHAT_FILES_MAX_SIZE) {
handleUploadError(`Please select a file smaller than ${humanFileSize(CHAT_FILES_MAX_SIZE)}`);
return false;
}
return true;
};

return (
<div className="flex items-center justify-center w-full">
Expand All @@ -43,7 +63,7 @@ export const Upload = ({onIndexChange, handleIsUploading}: Props) => {
</svg>
<p className="mb-2 text-sm text-gray-500 dark:text-gray-400"><span className="font-semibold">Click to upload</span> or
drag and drop</p>
<p className="text-xs text-gray-500 dark:text-gray-400">TXT, PDF, EPUB...</p>
<p className="text-xs text-gray-500 dark:text-gray-400">File supported types: TXT, PDF, EPUB, Markdown...</p>
</div>
<input id="dropzone-file" type="file" className="hidden" onChange={(e) => {
if (e.target.files && e.target.files[0]) {
Expand Down
26 changes: 26 additions & 0 deletions chatfiles-ui/fly.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
# fly.toml file generated for chatfiles-ui on 2023-04-09T21:13:37+08:00

app = "chatfiles-ui"
kill_signal = "SIGINT"
kill_timeout = 5
mounts = []
primary_region = "lax"
processes = []

[[services]]
internal_port = 3000
processes = ["app"]
protocol = "tcp"
[services.concurrency]
hard_limit = 25
soft_limit = 20
type = "connections"

[[services.ports]]
force_https = true
handlers = ["http"]
port = 80

[[services.ports]]
handlers = ["tls", "http"]
port = 443
9 changes: 9 additions & 0 deletions chatfiles-ui/package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions chatfiles-ui/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
"dependencies": {
"@dqbd/tiktoken": "^1.0.2",
"@tabler/icons-react": "^2.9.0",
"dotenv": "^16.0.3",
"eventsource-parser": "^0.1.0",
"flowbite": "^1.6.4",
"formidable": "^2.1.1",
Expand Down
6 changes: 5 additions & 1 deletion chatfiles-ui/utils/app/const.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,4 +5,8 @@ export const OPENAI_API_HOST =
process.env.OPENAI_API_HOST || 'https://api.openai.com';

export const CHAT_FILES_SERVER_HOST =
process.env.CHAT_FILES_SERVER_HOST || 'http://127.0.0.1:5000';
process.env.CHAT_FILES_SERVER_HOST || 'http://127.0.0.1:5000';

export const CHAT_FILES_MAX_SIZE =
parseInt(process.env.NEXT_PUBLIC_CHAT_FILES_MAX_SIZE || '') || 0;

9 changes: 9 additions & 0 deletions chatfiles-ui/utils/app/files.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
export const humanFileSize = (size: number): string => {
const units = ["Bytes", "KB", "MB", "GB", "TB", "PB", "EB", "ZB", "YB"];
let i = 0;
while (size >= 1024 && i < units.length - 1) {
size /= 1024;
i++;
}
return `${size.toFixed(0)} ${units[i]}`;
}
5 changes: 5 additions & 0 deletions chatfiles-ui/yarn.lock
Original file line number Diff line number Diff line change
Expand Up @@ -882,6 +882,11 @@ domexception@^4.0.0:
dependencies:
webidl-conversions "^7.0.0"

dotenv@^16.0.3:
version "16.0.3"
resolved "https://registry.npmjs.org/dotenv/-/dotenv-16.0.3.tgz"
integrity sha512-7GO6HghkA5fYG9TYnNxi14/7K9f5occMlp3zXAuSxn7CKCxt9xbNWG7yF8hTCSUchlfWSe3uLmlPfigevRItzQ==

electron-to-chromium@^1.4.284:
version "1.4.341"
resolved "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.341.tgz"
Expand Down
22 changes: 22 additions & 0 deletions chatfiles/.dockerignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
# flyctl launch added from .gitignore
**/__pycache__
**/*documents
**/*pure

# Environments
**/.env

# flyctl launch added from .idea/.gitignore
# Default ignored files
.idea/shelf
.idea/workspace.xml
# Editor-based HTTP Client requests
.idea/httpRequests
# Datasource local storage ignored files
.idea/dataSources
.idea/dataSources.local.xml

# flyctl launch added from pure/.gitignore
# created by virtualenv automatically
pure/**/*
fly.toml
5 changes: 2 additions & 3 deletions chatfiles/chat.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
from llama_index import GPTSimpleVectorIndex, SimpleDirectoryReader

from file import get_index_filepath, get_index_name_from_file_name, check_index_file_exists, \
get_index_name_without_json_extension
from file import get_index_filepath, get_index_name_from_file_name, check_index_file_exists
from llm import get_index_by_index_name
from prompt import get_prompt

Expand All @@ -16,7 +15,7 @@ def create_llama_index(filepath):
documents = SimpleDirectoryReader(input_files=[filepath]).load_data()
index = GPTSimpleVectorIndex.from_documents(documents)
index.save_to_disk(get_index_filepath(index_name))
return get_index_name_without_json_extension(index_name)
return index_name


def get_answer_from_llama_index(text, index_name):
Expand Down
26 changes: 26 additions & 0 deletions chatfiles/fly.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
# fly.toml file generated for chatfiles-test on 2023-04-09T21:34:21+08:00

app = "chatfiles"
kill_signal = "SIGINT"
kill_timeout = 60
mounts = []
primary_region = "lax"
processes = []

[[services]]
internal_port = 5000
processes = ["app"]
protocol = "tcp"
[services.concurrency]
hard_limit = 25
soft_limit = 20
type = "connections"

[[services.ports]]
force_https = true
handlers = ["http"]
port = 80

[[services.ports]]
handlers = ["tls", "http"]
port = 443
Loading

0 comments on commit eef42c7

Please sign in to comment.