Skip to content

Commit

Permalink
fix: correct image upload field for ollama llama3.2-vision
Browse files Browse the repository at this point in the history
- Fix image data not being passed to correct field in API request
- Enable proper image recognition functionality for llama3.2-vision model

fix lobehub#4642
Related lobehub#3888
  • Loading branch information
samurai00 committed Nov 25, 2024
1 parent 35e228f commit 585971d
Showing 1 changed file with 15 additions and 9 deletions.
24 changes: 15 additions & 9 deletions src/libs/agent-runtime/ollama/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ import { ClientOptions } from 'openai';

import { OpenAIChatMessage } from '@/libs/agent-runtime';
import { ChatModelCard } from '@/types/llm';
import { imageUrlToBase64 } from '@/utils/imageToBase64';

import { LobeRuntimeAI } from '../BaseAI';
import { AgentRuntimeErrorType } from '../error';
Expand Down Expand Up @@ -40,15 +41,12 @@ export class LobeOllamaAI implements LobeRuntimeAI {
options?.signal?.addEventListener('abort', abort);

const response = await this.client.chat({
messages: this.buildOllamaMessages(payload.messages),
messages: await this.buildOllamaMessages(payload.messages),
model: payload.model,
options: {
frequency_penalty: payload.frequency_penalty,
presence_penalty: payload.presence_penalty,
temperature:
payload.temperature !== undefined
? payload.temperature / 2
: undefined,
temperature: payload.temperature !== undefined ? payload.temperature / 2 : undefined,
top_p: payload.top_p,
},
stream: true,
Expand All @@ -75,11 +73,15 @@ export class LobeOllamaAI implements LobeRuntimeAI {
}));
}

private buildOllamaMessages(messages: OpenAIChatMessage[]) {
return messages.map((message) => this.convertContentToOllamaMessage(message));
private async buildOllamaMessages(messages: OpenAIChatMessage[]): Promise<OllamaMessage[]> {
return await Promise.all(
messages.map(async (message) => this.convertContentToOllamaMessage(message)),
);
}

private convertContentToOllamaMessage = (message: OpenAIChatMessage): OllamaMessage => {
private convertContentToOllamaMessage = async (
message: OpenAIChatMessage,
): Promise<OllamaMessage> => {
if (typeof message.content === 'string') {
return { content: message.content, role: message.role };
}
Expand All @@ -97,10 +99,14 @@ export class LobeOllamaAI implements LobeRuntimeAI {
break;
}
case 'image_url': {
const { base64 } = parseDataUri(content.image_url.url);
const { base64, type } = parseDataUri(content.image_url.url);
if (base64) {
ollamaMessage.images ??= [];
ollamaMessage.images.push(base64);
} else if (type === 'url') {
const { base64 } = await imageUrlToBase64(content.image_url.url);
ollamaMessage.images ??= [];
ollamaMessage.images.push(base64);
}
break;
}
Expand Down

0 comments on commit 585971d

Please sign in to comment.