generated from obsidianmd/obsidian-sample-plugin
-
Notifications
You must be signed in to change notification settings - Fork 65
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge pull request #124 from bramses/add-ollama
- Added model name to the assistant divider. - Improved error messaging in chat. - Fixed `inferTitle` to use local models. - Introduced `AIService` and `IAIService` interface for simplification. - Added `aiService` parameter. - Made AI services more concise and readable. - Fixed non-stream fetch for Ollama. - Cleaned up role handling in messages for better readability. - Added constants for roles and role identifiers. - Introduced `OllamaService` prototype.
- Loading branch information
Showing
15 changed files
with
811 additions
and
603 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,23 +1,43 @@ | ||
export const HORIZONTAL_RULE_CLASS = "__chatgpt_plugin"; | ||
export const ROLE_USER = "user"; | ||
export const ROLE_ASSISTANT = "assistant"; | ||
export const ROLE_DEVELOPER = "developer"; | ||
export const ROLE_SYSTEM = "system"; | ||
export const CHAT_FOLDER_TYPE = "chatFolder"; | ||
export const CHAT_TEMPLATE_FOLDER_TYPE = "chatTemplateFolder"; | ||
export const HORIZONTAL_LINE_MD = `<hr class="${HORIZONTAL_RULE_CLASS}">`; | ||
export const COMMENT_BLOCK_START = `=begin-chatgpt-md-comment\n\n`; | ||
export const COMMENT_BLOCK_END = `=end-chatgpt-md-comment`; | ||
export const DEFAULT_HEADING_LEVEL = 0; | ||
export const CALL_CHATGPT_API_COMMAND_ID = "call-chatgpt-api"; | ||
export const ADD_HR_COMMAND_ID = "add-hr"; | ||
export const AI_SERVICE_OLLAMA = "ollama"; | ||
export const AI_SERVICE_OPENAI = "openai"; | ||
|
||
export const ADD_COMMENT_BLOCK_COMMAND_ID = "add-comment-block"; | ||
export const ADD_HR_COMMAND_ID = "add-hr"; | ||
export const CALL_CHATGPT_API_COMMAND_ID = "call-chatgpt-api"; | ||
export const STOP_STREAMING_COMMAND_ID = "stop-streaming"; | ||
export const INFER_TITLE_COMMAND_ID = "infer-title"; | ||
export const MOVE_TO_CHAT_COMMAND_ID = "move-to-chat"; | ||
export const INFER_TITLE_COMMAND_ID = "infer-title"; | ||
export const CHOOSE_CHAT_TEMPLATE_COMMAND_ID = "choose-chat-template"; | ||
export const CLEAR_CHAT_COMMAND_ID = "clear-chat"; | ||
export const ROLE_IDENTIFIER = "role::"; | ||
|
||
export const ROLE_HEADER = (headingPrefix: string, role: string) => | ||
`\n\n${HORIZONTAL_LINE_MD}\n\n${headingPrefix}${ROLE_IDENTIFIER}${role}\n\n`; | ||
export const CHAT_ERROR_MESSAGE_401 = | ||
"I am sorry. There was an authorization issue with the external API (Status 401).\nPlease check your API key in the settings"; | ||
export const CHAT_ERROR_MESSAGE_NO_CONNECTION = | ||
"I am sorry. There was an issue reaching the network.\nPlease check your network connection."; | ||
export const CHAT_ERROR_MESSAGE_404 = | ||
"I am sorry, your request looks wrong. Please check your URL or model name in the settings or frontmatter."; | ||
export const CHAT_ERROR_RESPONSE = | ||
"I am sorry, I could not answer your request because of an error, here is what went wrong:"; | ||
|
||
export const CHAT_FOLDER_TYPE = "chatFolder"; | ||
export const CHAT_TEMPLATE_FOLDER_TYPE = "chatTemplateFolder"; | ||
|
||
export const NEWLINE = "\n\n"; | ||
|
||
export const COMMENT_BLOCK_START = `=begin-chatgpt-md-comment${NEWLINE}`; | ||
export const COMMENT_BLOCK_END = `=end-chatgpt-md-comment`; | ||
|
||
export const DEFAULT_HEADING_LEVEL = 0; | ||
export const MAX_HEADING_LEVEL = 6; | ||
export const MIN_AUTO_INFER_MESSAGES = 4; | ||
|
||
export const ERROR_NO_CONNECTION = "Failed to fetch"; | ||
|
||
export const HORIZONTAL_LINE_CLASS = "__chatgpt_plugin"; | ||
export const HORIZONTAL_LINE_MD = `<hr class="${HORIZONTAL_LINE_CLASS}">`; | ||
|
||
export const ROLE_IDENTIFIER = "role::"; | ||
export const ROLE_ASSISTANT = "assistant"; | ||
export const ROLE_DEVELOPER = "developer"; | ||
export const ROLE_SYSTEM = "system"; | ||
export const ROLE_USER = "user"; |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,36 @@ | ||
import { Message } from "src/Models/Message"; | ||
import { Editor, MarkdownView } from "obsidian"; | ||
import { OpenAIConfig, OpenAiService } from "src/Services/OpenAiService"; | ||
import { StreamManager } from "src/stream"; | ||
import { AI_SERVICE_OLLAMA, AI_SERVICE_OPENAI } from "src/Constants"; | ||
import { OllamaConfig, OllamaService } from "src/Services/OllamaService"; | ||
import { EditorService } from "src/Services/EditorService"; | ||
|
||
export interface IAiApiService { | ||
callAIAPI( | ||
messages: Message[], | ||
options: Partial<OpenAIConfig> | Partial<OllamaConfig>, | ||
headingPrefix: string, | ||
editor?: Editor, | ||
setAtCursor?: boolean, | ||
apiKey?: string | ||
): Promise<any>; | ||
|
||
inferTitle( | ||
view: MarkdownView, | ||
settings: Partial<OpenAIConfig> | Partial<OllamaConfig>, | ||
messages: string[], | ||
editorService: EditorService | ||
): any; | ||
} | ||
|
||
export const getAiApiService = (streamManager: StreamManager, settings: any): IAiApiService => { | ||
switch (settings) { | ||
case AI_SERVICE_OPENAI: | ||
return new OpenAiService(streamManager); | ||
case AI_SERVICE_OLLAMA: | ||
return new OllamaService(streamManager); | ||
default: | ||
throw new Error("Unsupported API type"); | ||
} | ||
}; |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Oops, something went wrong.