Skip to content

Commit

Permalink
fix: malicious redirections & security risk
Browse files Browse the repository at this point in the history
  • Loading branch information
louis-jan committed Dec 21, 2023
1 parent aeab8ed commit 8d5fce9
Show file tree
Hide file tree
Showing 6 changed files with 6 additions and 9 deletions.
1 change: 1 addition & 0 deletions docs/src/containers/Banner/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -71,6 +71,7 @@ export default function AnnoncementBanner() {
key={i}
href={social.href}
target="_blank"
rel="noopener"
>
{social.icon}
</a>
Expand Down
1 change: 1 addition & 0 deletions docs/src/containers/Footer/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -109,6 +109,7 @@ export default function Footer() {
key={i}
href={social.href}
target="_blank"
rel="noopener"
>
{social.icon}
</a>
Expand Down
3 changes: 1 addition & 2 deletions extensions/inference-openai-extension/src/@types/global.d.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
import { Model } from "@janhq/core";

declare const MODULE: string;
declare const OPENAI_DOMAIN: string;

declare interface EngineSettings {
full_url?: string;
Expand Down
3 changes: 1 addition & 2 deletions extensions/inference-openai-extension/src/helpers/sse.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
import { Observable } from "rxjs";
import { EngineSettings, OpenAIModel } from "../@types/global";

/**
* Sends a request to the inference server to generate a response based on the recent messages.
Expand All @@ -16,7 +15,7 @@ export function requestInference(
): Observable<string> {
return new Observable((subscriber) => {
let model_id: string = model.id;
if (engine.full_url.includes("openai.azure.com")) {
if (engine.full_url.includes(OPENAI_DOMAIN)) {
model_id = engine.full_url.split("/")[5];
}
const requestBody = JSON.stringify({
Expand Down
2 changes: 1 addition & 1 deletion extensions/inference-openai-extension/src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,6 @@ import { InferenceExtension } from "@janhq/core";
import { requestInference } from "./helpers/sse";
import { ulid } from "ulid";
import { join } from "path";
import { EngineSettings, OpenAIModel } from "./@types/global";

/**
* A class that implements the InferenceExtension interface from the @janhq/core package.
Expand Down Expand Up @@ -108,6 +107,7 @@ export default class JanInferenceOpenAIExtension implements InferenceExtension {
*/
async inference(data: MessageRequest): Promise<ThreadMessage> {
const timestamp = Date.now();

const message: ThreadMessage = {
thread_id: data.threadId,
created: timestamp,
Expand Down
5 changes: 1 addition & 4 deletions extensions/inference-openai-extension/webpack.config.js
Original file line number Diff line number Diff line change
Expand Up @@ -18,10 +18,7 @@ module.exports = {
plugins: [
new webpack.DefinePlugin({
MODULE: JSON.stringify(`${packageJson.name}/${packageJson.module}`),
INFERENCE_URL: JSON.stringify(
process.env.INFERENCE_URL ||
"http://127.0.0.1:3928/inferences/llamacpp/chat_completion"
),
OPENAI_DOMAIN: JSON.stringify("openai.azure.com"),
}),
],
output: {
Expand Down

0 comments on commit 8d5fce9

Please sign in to comment.