Skip to content

Commit

Permalink
Merge pull request #124 from bramses/add-ollama
Browse files Browse the repository at this point in the history
- Added model name to the assistant divider.
- Improved error messaging in chat.
- Fixed `inferTitle` to use local models.
- Introduced `AIService` and `IAIService` interface for simplification.
- Added `aiService` parameter.
- Made AI services more concise and readable.
- Fixed non-stream fetch for Ollama.
- Cleaned up role handling in messages for better readability.
- Added constants for roles and role identifiers.
- Introduced `OllamaService` prototype.
  • Loading branch information
DenizOkcu authored Feb 3, 2025
2 parents a4145e9 + 8eb5992 commit d028170
Show file tree
Hide file tree
Showing 15 changed files with 811 additions and 603 deletions.
3 changes: 2 additions & 1 deletion .eslintrc
Original file line number Diff line number Diff line change
Expand Up @@ -16,8 +16,9 @@
"rules": {
"no-unused-vars": "off",
"@typescript-eslint/no-unused-vars": ["error", { "args": "none" }],
"@typescript-eslint/no-explicit-any": "off",
"@typescript-eslint/ban-ts-comment": "off",
"no-prototype-builtins": "off",
"@typescript-eslint/no-empty-function": "off"
}
}
}
2 changes: 1 addition & 1 deletion CHANELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@

- Centralized statusBar updates and improved response processing (@Deniz Okcu)
- Added command IDs and other important strings as constants (@Deniz Okcu)
- Moved helper methods to EditorService and OpenAIService (@Deniz Okcu)
- Moved helper methods to EditorService and OpenAiService (@Deniz Okcu)
- Improved code organization and imports (@Deniz Okcu)
- Added more configuration values (@Deniz Okcu)
- Extracted data structures to Models (@Deniz Okcu)
Expand Down
13 changes: 5 additions & 8 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -12,18 +12,15 @@
"author": "",
"license": "MIT",
"devDependencies": {
"@types/node": "^22.10.2",
"@typescript-eslint/eslint-plugin": "8.18.1",
"@typescript-eslint/parser": "8.18.1",
"@types/node": "^22.10.7",
"@typescript-eslint/eslint-plugin": "8.20.0",
"@typescript-eslint/parser": "8.20.0",
"builtin-modules": "4.0.0",
"esbuild": "0.24.2",
"eslint": "^9.17.0",
"eslint": "^9.18.0",
"obsidian": "latest",
"prettier": "^3.4.2",
"tslib": "2.8.1",
"typescript": "5.7.2"
},
"dependencies": {
"sse": "github:mpetazzoni/sse.js#v2.5.0"
"typescript": "5.7.3"
}
}
54 changes: 37 additions & 17 deletions src/Constants.ts
Original file line number Diff line number Diff line change
@@ -1,23 +1,43 @@
export const HORIZONTAL_RULE_CLASS = "__chatgpt_plugin";
export const ROLE_USER = "user";
export const ROLE_ASSISTANT = "assistant";
export const ROLE_DEVELOPER = "developer";
export const ROLE_SYSTEM = "system";
export const CHAT_FOLDER_TYPE = "chatFolder";
export const CHAT_TEMPLATE_FOLDER_TYPE = "chatTemplateFolder";
export const HORIZONTAL_LINE_MD = `<hr class="${HORIZONTAL_RULE_CLASS}">`;
export const COMMENT_BLOCK_START = `=begin-chatgpt-md-comment\n\n`;
export const COMMENT_BLOCK_END = `=end-chatgpt-md-comment`;
export const DEFAULT_HEADING_LEVEL = 0;
export const CALL_CHATGPT_API_COMMAND_ID = "call-chatgpt-api";
export const ADD_HR_COMMAND_ID = "add-hr";
export const AI_SERVICE_OLLAMA = "ollama";
export const AI_SERVICE_OPENAI = "openai";

export const ADD_COMMENT_BLOCK_COMMAND_ID = "add-comment-block";
export const ADD_HR_COMMAND_ID = "add-hr";
export const CALL_CHATGPT_API_COMMAND_ID = "call-chatgpt-api";
export const STOP_STREAMING_COMMAND_ID = "stop-streaming";
export const INFER_TITLE_COMMAND_ID = "infer-title";
export const MOVE_TO_CHAT_COMMAND_ID = "move-to-chat";
export const INFER_TITLE_COMMAND_ID = "infer-title";
export const CHOOSE_CHAT_TEMPLATE_COMMAND_ID = "choose-chat-template";
export const CLEAR_CHAT_COMMAND_ID = "clear-chat";
export const ROLE_IDENTIFIER = "role::";

export const ROLE_HEADER = (headingPrefix: string, role: string) =>
`\n\n${HORIZONTAL_LINE_MD}\n\n${headingPrefix}${ROLE_IDENTIFIER}${role}\n\n`;
export const CHAT_ERROR_MESSAGE_401 =
"I am sorry. There was an authorization issue with the external API (Status 401).\nPlease check your API key in the settings";
export const CHAT_ERROR_MESSAGE_NO_CONNECTION =
"I am sorry. There was an issue reaching the network.\nPlease check your network connection.";
export const CHAT_ERROR_MESSAGE_404 =
"I am sorry, your request looks wrong. Please check your URL or model name in the settings or frontmatter.";
export const CHAT_ERROR_RESPONSE =
"I am sorry, I could not answer your request because of an error, here is what went wrong:";

export const CHAT_FOLDER_TYPE = "chatFolder";
export const CHAT_TEMPLATE_FOLDER_TYPE = "chatTemplateFolder";

export const NEWLINE = "\n\n";

export const COMMENT_BLOCK_START = `=begin-chatgpt-md-comment${NEWLINE}`;
export const COMMENT_BLOCK_END = `=end-chatgpt-md-comment`;

export const DEFAULT_HEADING_LEVEL = 0;
export const MAX_HEADING_LEVEL = 6;
export const MIN_AUTO_INFER_MESSAGES = 4;

export const ERROR_NO_CONNECTION = "Failed to fetch";

export const HORIZONTAL_LINE_CLASS = "__chatgpt_plugin";
export const HORIZONTAL_LINE_MD = `<hr class="${HORIZONTAL_LINE_CLASS}">`;

export const ROLE_IDENTIFIER = "role::";
export const ROLE_ASSISTANT = "assistant";
export const ROLE_DEVELOPER = "developer";
export const ROLE_SYSTEM = "system";
export const ROLE_USER = "user";
2 changes: 1 addition & 1 deletion src/Models/Config.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import { DEFAULT_OPENAI_CONFIG } from "src/Services/OpenAIService";
import { DEFAULT_OPENAI_CONFIG } from "src/Services/OpenAiService";

export const DEFAULT_CHAT_FRONT_MATTER = `---
system_commands: ['I am a helpful assistant.']
Expand Down
36 changes: 36 additions & 0 deletions src/Services/AiService.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
import { Message } from "src/Models/Message";
import { Editor, MarkdownView } from "obsidian";
import { OpenAIConfig, OpenAiService } from "src/Services/OpenAiService";
import { StreamManager } from "src/stream";
import { AI_SERVICE_OLLAMA, AI_SERVICE_OPENAI } from "src/Constants";
import { OllamaConfig, OllamaService } from "src/Services/OllamaService";
import { EditorService } from "src/Services/EditorService";

export interface IAiApiService {
callAIAPI(
messages: Message[],
options: Partial<OpenAIConfig> | Partial<OllamaConfig>,
headingPrefix: string,
editor?: Editor,
setAtCursor?: boolean,
apiKey?: string
): Promise<any>;

inferTitle(
view: MarkdownView,
settings: Partial<OpenAIConfig> | Partial<OllamaConfig>,
messages: string[],
editorService: EditorService
): any;
}

export const getAiApiService = (streamManager: StreamManager, settings: any): IAiApiService => {
switch (settings) {
case AI_SERVICE_OPENAI:
return new OpenAiService(streamManager);
case AI_SERVICE_OLLAMA:
return new OllamaService(streamManager);
default:
throw new Error("Unsupported API type");
}
};
93 changes: 51 additions & 42 deletions src/Services/EditorService.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ import { App, Editor, MarkdownView, Notice } from "obsidian";
import { createFolderModal } from "src/Utilities/ModalHelpers";
import {
extractRoleAndMessage,
getHeaderRole,
getHeadingPrefix,
parseSettingsFrontmatter,
removeCommentsFromMessages,
Expand All @@ -11,18 +12,23 @@ import {
} from "src/Utilities/TextHelpers";
import { ChatGPT_MDSettings } from "src/Models/Config";
import { ChatTemplates } from "src/Views/ChatTemplates";
import { DEFAULT_OPENAI_CONFIG, inferTitleFromMessages } from "src/Services/OpenAIService";
import { DEFAULT_OPENAI_CONFIG } from "src/Services/OpenAiService";
import {
AI_SERVICE_OLLAMA,
AI_SERVICE_OPENAI,
CHAT_FOLDER_TYPE,
CHAT_TEMPLATE_FOLDER_TYPE,
DEFAULT_HEADING_LEVEL,
HORIZONTAL_RULE_CLASS,
HORIZONTAL_LINE_CLASS,
MAX_HEADING_LEVEL,
NEWLINE,
ROLE_ASSISTANT,
ROLE_DEVELOPER,
ROLE_HEADER,
ROLE_IDENTIFIER,
ROLE_SYSTEM,
ROLE_USER,
} from "src/Constants";
import { DEFAULT_OLLAMA_API_CONFIG } from "src/Services/OllamaService";

export class EditorService {
constructor(private app: App) {}
Expand Down Expand Up @@ -71,11 +77,9 @@ export class EditorService {
}

addHorizontalRule(editor: Editor, role: string, headingLevel: number): void {
const NEWLINE = "\n\n";

const formattedContent = [
NEWLINE,
`<hr class="${HORIZONTAL_RULE_CLASS}">`,
`<hr class="${HORIZONTAL_LINE_CLASS}">`,
NEWLINE,
`${getHeadingPrefix(headingLevel)}${ROLE_IDENTIFIER}${role}`,
NEWLINE,
Expand Down Expand Up @@ -111,7 +115,7 @@ export class EditorService {

const newFile = await this.app.vault.create(
`${settings.chatFolder}/${this.getDate(new Date(), settings.dateFormat)}.md`,
`${settings.defaultChatFrontmatter}\n\n${selectedText}`
`${settings.defaultChatFrontmatter}${NEWLINE}${selectedText}`
);

// open new file
Expand All @@ -131,8 +135,8 @@ export class EditorService {
}
}

appendMessage(editor: Editor, role: string, message: string, headingLevel: number): void {
const newLine = `${ROLE_HEADER(getHeadingPrefix(headingLevel), role)}${message}${ROLE_HEADER(getHeadingPrefix(headingLevel), ROLE_USER)}`;
appendMessage(editor: Editor, message: string, headingLevel: number): void {
const newLine = `${getHeaderRole(getHeadingPrefix(headingLevel), ROLE_ASSISTANT)}${message}${getHeaderRole(getHeadingPrefix(headingLevel), ROLE_USER)}`;
editor.replaceRange(newLine, editor.getCursor());
}

Expand Down Expand Up @@ -204,7 +208,7 @@ export class EditorService {
messagesWithRole.unshift(
...systemCommands.map((command: string) => {
return {
role: ROLE_DEVELOPER,
role: frontmatter.aiService == AI_SERVICE_OPENAI ? ROLE_DEVELOPER : ROLE_SYSTEM,
content: command,
};
})
Expand Down Expand Up @@ -267,6 +271,23 @@ export class EditorService {
.replace("ss", paddedSecond);
}

aiProviderFromUrl(url?: string, model?: string): string {
const trimmedUrl = (url ?? "").trim().toLowerCase();
const trimmedModel = (model ?? "").trim().toLowerCase();

if (trimmedModel.includes("@")) {
const provider = trimmedModel.split("@")[0];
if (["local", AI_SERVICE_OLLAMA].includes(provider)) return AI_SERVICE_OLLAMA;
if (provider === AI_SERVICE_OPENAI) return AI_SERVICE_OPENAI;
}

if (trimmedUrl.startsWith("http://localhost") || trimmedUrl.startsWith("http://127.0.0.1")) {
return AI_SERVICE_OLLAMA;
}

return AI_SERVICE_OPENAI;
}

getFrontmatter(view: MarkdownView | null, settings: ChatGPT_MDSettings, app: App) {
const activeFile = view?.file || app.workspace.getActiveFile();
if (!activeFile) {
Expand All @@ -276,55 +297,42 @@ export class EditorService {
// get the settings frontmatter
const settingsFrontmatter = parseSettingsFrontmatter(settings.defaultChatFrontmatter);
// merge with frontmatter from current file
const noteFrontmatter = app.metadataCache.getFileCache(activeFile)?.frontmatter || {};
const metaMatter = {
...settingsFrontmatter,
...(app.metadataCache.getFileCache(activeFile)?.frontmatter || {}),
...noteFrontmatter,
};

if (!noteFrontmatter.url) {
delete metaMatter.url;
}

const aiService = this.aiProviderFromUrl(metaMatter.url, metaMatter.model);

const defaultConfig = aiService == AI_SERVICE_OPENAI ? DEFAULT_OPENAI_CONFIG : DEFAULT_OLLAMA_API_CONFIG;

return {
...DEFAULT_OPENAI_CONFIG,
...defaultConfig,
...metaMatter,
stream: metaMatter.stream ?? settings.stream ?? DEFAULT_OPENAI_CONFIG.stream,
title: view?.file?.basename ?? DEFAULT_OPENAI_CONFIG.title,
model: metaMatter.model.split("@")[1] || metaMatter.model,
aiService: aiService,
stream: metaMatter.stream ?? settings.stream ?? defaultConfig.stream,
title: view?.file?.basename ?? defaultConfig.title,
};
}

getHeadingPrefix(headingLevel: number): string {
if (headingLevel === DEFAULT_HEADING_LEVEL) {
return "";
} else if (headingLevel > 6) {
return "#".repeat(6) + " ";
} else if (headingLevel > MAX_HEADING_LEVEL) {
return "#".repeat(MAX_HEADING_LEVEL) + " ";
}
return "#".repeat(headingLevel) + " ";
}

async inferTitle(
editor: Editor,
view: MarkdownView,
settings: ChatGPT_MDSettings,
apiKey: string,
messages: string[]
): Promise<void> {
if (!view.file) {
throw new Error("No active file found");
}

console.log("[ChatGPT MD] auto inferring title from messages");

const inferredTitle = await inferTitleFromMessages(apiKey, messages, settings.inferTitleLanguage);
if (inferredTitle) {
console.log(`[ChatGPT MD] automatically inferred title: ${inferredTitle}. Changing file name...`);
await this.writeInferredTitle(view, settings.chatFolder, inferredTitle);
} else {
new Notice("[ChatGPT MD] Could not infer title", 5000);
}
}

async processResponse(editor: Editor, response: any, settings: ChatGPT_MDSettings) {
let responseStr = response;
if (response.mode === "streaming") {
responseStr = response.fullstr;
const newLine = ROLE_HEADER(this.getHeadingPrefix(settings.headingLevel), ROLE_USER);
const newLine = getHeaderRole(this.getHeadingPrefix(settings.headingLevel), ROLE_USER);
editor.replaceRange(newLine, editor.getCursor());

// move cursor to end of completion
Expand All @@ -335,11 +343,12 @@ export class EditorService {
};
editor.setCursor(newCursor);
} else {
let responseStr = response;
if (unfinishedCodeBlock(responseStr)) {
responseStr = responseStr + "\n```";
}

this.appendMessage(editor, ROLE_ASSISTANT, responseStr, settings.headingLevel);
this.appendMessage(editor, responseStr, settings.headingLevel);
}
}
}
Loading

0 comments on commit d028170

Please sign in to comment.