Skip to content

Commit

Permalink
Merge remote-tracking branch 'connectai/feature-cache-storage' into f…
Browse files Browse the repository at this point in the history
…eature-cache-storage
  • Loading branch information
lloydzhou committed Jul 19, 2024
2 parents 1610b48 + 7237d33 commit ac470a6
Show file tree
Hide file tree
Showing 4 changed files with 11 additions and 10 deletions.
3 changes: 1 addition & 2 deletions app/client/platforms/anthropic.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@ import { ChatOptions, getHeaders, LLMApi, MultimodalContent } from "../api";
import { useAccessStore, useAppConfig, useChatStore } from "@/app/store";
import { getClientConfig } from "@/app/config/client";
import { DEFAULT_API_HOST } from "@/app/constant";
import { RequestMessage } from "@/app/typing";
import {
EventStreamContentType,
fetchEventSource,
Expand Down Expand Up @@ -95,7 +94,7 @@ export class ClaudeApi implements LLMApi {
};

// try get base64image from local cache image_url
const messages = [];
const messages: ChatOptions["messages"] = [];
for (const v of options.messages) {
const content = await preProcessImageContent(v.content);
messages.push({ role: v.role, content });
Expand Down
2 changes: 1 addition & 1 deletion app/client/platforms/google.ts
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@ export class GeminiProApi implements LLMApi {
let multimodal = false;

// try get base64image from local cache image_url
const _messages = [];
const _messages: ChatOptions["messages"] = [];
for (const v of options.messages) {
const content = await preProcessImageContent(v.content);
_messages.push({ role: v.role, content });
Expand Down
2 changes: 1 addition & 1 deletion app/client/platforms/openai.ts
Original file line number Diff line number Diff line change
Expand Up @@ -106,7 +106,7 @@ export class ChatGPTApi implements LLMApi {

async chat(options: ChatOptions) {
const visionModel = isVisionModel(options.config.model);
const messages = [];
const messages: ChatOptions["messages"] = [];
for (const v of options.messages) {
const content = visionModel
? await preProcessImageContent(v.content)
Expand Down
14 changes: 8 additions & 6 deletions app/utils/chat.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
import { CACHE_URL_PREFIX, UPLOAD_URL } from "@/app/constant";
// import heic2any from "heic2any";
import { RequestMessage } from "@/app/client/api";

export function compressImage(file: File, maxSize: number): Promise<string> {
export function compressImage(file: Blob, maxSize: number): Promise<string> {
return new Promise((resolve, reject) => {
const reader = new FileReader();
reader.onload = (readerEvent: any) => {
Expand Down Expand Up @@ -43,10 +44,10 @@ export function compressImage(file: File, maxSize: number): Promise<string> {
if (file.type.includes("heic")) {
const heic2any = require("heic2any");
heic2any({ blob: file, toType: "image/jpeg" })
.then((blob) => {
reader.readAsDataURL(blob as Blob);
.then((blob: Blob) => {
reader.readAsDataURL(blob);
})
.catch((e) => {
.catch((e: any) => {
reject(e);
});
}
Expand All @@ -73,7 +74,7 @@ export async function preProcessImageContent(
return result;
}

const imageCaches = {};
const imageCaches: Record<string, string> = {};
export function cacheImageToBase64Image(imageUrl: string) {
if (imageUrl.includes(CACHE_URL_PREFIX)) {
if (!imageCaches[imageUrl]) {
Expand All @@ -85,7 +86,8 @@ export function cacheImageToBase64Image(imageUrl: string) {
})
.then((res) => res.blob())
.then(
(blob) => (imageCaches[imageUrl] = compressImage(blob, 256 * 1024)),
async (blob) =>
(imageCaches[imageUrl] = await compressImage(blob, 256 * 1024)),
); // compressImage
}
return Promise.resolve(imageCaches[imageUrl]);
Expand Down

0 comments on commit ac470a6

Please sign in to comment.