Skip to content

Commit fc810a4

Browse files
committed
feature #339 [Platform][Ollama] Support streaming output (logicalor)
This PR was squashed before being merged into the main branch. Discussion ---------- [Platform][Ollama] Support streaming output | Q | A | ------------- | --- | Bug fix? | yes | New feature? | yes | Docs? | yes/no | Issues | Fix #338 | License | MIT Both a bug fix and a feature? Support streaming output from Ollama, preventing an error being thrown if streaming output is sent with the chat request. Commits ------- 09335ce [Platform][Ollama] Support streaming output
2 parents ff296bf + 09335ce commit fc810a4

File tree

5 files changed

+201
-0
lines changed

5 files changed

+201
-0
lines changed

examples/ollama/stream.php

Lines changed: 36 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,36 @@
1+
<?php
2+
3+
/*
4+
* This file is part of the Symfony package.
5+
*
6+
* (c) Fabien Potencier <[email protected]>
7+
*
8+
* For the full copyright and license information, please view the LICENSE
9+
* file that was distributed with this source code.
10+
*/
11+
12+
use Symfony\AI\Agent\Agent;
13+
use Symfony\AI\Platform\Bridge\Ollama\Ollama;
14+
use Symfony\AI\Platform\Bridge\Ollama\PlatformFactory;
15+
use Symfony\AI\Platform\Message\Message;
16+
use Symfony\AI\Platform\Message\MessageBag;
17+
18+
require_once dirname(__DIR__).'/bootstrap.php';
19+
20+
$platform = PlatformFactory::create(env('OLLAMA_HOST_URL'), http_client());
21+
$model = new Ollama();
22+
23+
$agent = new Agent($platform, $model, logger: logger());
24+
$messages = new MessageBag(
25+
Message::forSystem('You are a helpful assistant.'),
26+
Message::ofUser('Tina has one brother and one sister. How many sisters do Tina\'s siblings have?'),
27+
);
28+
29+
// Stream the response
30+
$result = $agent->call($messages, ['stream' => true]);
31+
32+
// Emit each chunk as it is received
33+
foreach ($result->getContent() as $chunk) {
34+
echo $chunk->getContent();
35+
}
36+
echo \PHP_EOL;

src/platform/CHANGELOG.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -60,5 +60,6 @@ CHANGELOG
6060
* Add InMemoryPlatform and InMemoryRawResult for testing Platform without external Providers calls
6161
* Add tool calling support for Ollama platform
6262
* Allow beta feature flags to be passed into Anthropic model options
63+
* Add Ollama streaming output support
6364

6465

Lines changed: 45 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,45 @@
1+
<?php
2+
3+
/*
4+
* This file is part of the Symfony package.
5+
*
6+
* (c) Fabien Potencier <[email protected]>
7+
*
8+
* For the full copyright and license information, please view the LICENSE
9+
* file that was distributed with this source code.
10+
*/
11+
12+
namespace Symfony\AI\Platform\Bridge\Ollama;
13+
14+
/**
15+
* @author Shaun Johnston <[email protected]>
16+
*/
17+
final readonly class OllamaMessageChunk
18+
{
19+
/**
20+
* @param array<string, mixed> $message
21+
*/
22+
public function __construct(
23+
public readonly string $model,
24+
public readonly \DateTimeImmutable $created_at,
25+
public readonly array $message,
26+
public readonly bool $done,
27+
) {
28+
}
29+
30+
public function __toString(): string
31+
{
32+
// Return the assistant's message content if available
33+
return $this->message['content'] ?? '';
34+
}
35+
36+
public function getContent(): ?string
37+
{
38+
return $this->message['content'] ?? null;
39+
}
40+
41+
public function getRole(): ?string
42+
{
43+
return $this->message['role'] ?? null;
44+
}
45+
}

src/platform/src/Bridge/Ollama/OllamaResultConverter.php

Lines changed: 31 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -15,12 +15,17 @@
1515
use Symfony\AI\Platform\Model;
1616
use Symfony\AI\Platform\Result\RawResultInterface;
1717
use Symfony\AI\Platform\Result\ResultInterface;
18+
use Symfony\AI\Platform\Result\StreamResult;
1819
use Symfony\AI\Platform\Result\TextResult;
1920
use Symfony\AI\Platform\Result\ToolCall;
2021
use Symfony\AI\Platform\Result\ToolCallResult;
2122
use Symfony\AI\Platform\Result\VectorResult;
2223
use Symfony\AI\Platform\ResultConverterInterface;
2324
use Symfony\AI\Platform\Vector\Vector;
25+
use Symfony\Component\HttpClient\Chunk\FirstChunk;
26+
use Symfony\Component\HttpClient\Chunk\LastChunk;
27+
use Symfony\Component\HttpClient\EventSourceHttpClient;
28+
use Symfony\Contracts\HttpClient\ResponseInterface;
2429

2530
/**
2631
* @author Christopher Hertel <[email protected]>
@@ -34,6 +39,10 @@ public function supports(Model $model): bool
3439

3540
public function convert(RawResultInterface $result, array $options = []): ResultInterface
3641
{
42+
if ($options['stream'] ?? false) {
43+
return new StreamResult($this->convertStream($result->getObject()));
44+
}
45+
3746
$data = $result->getData();
3847

3948
return \array_key_exists('embeddings', $data)
@@ -83,4 +92,26 @@ public function doConvertEmbeddings(array $data): ResultInterface
8392
),
8493
);
8594
}
95+
96+
private function convertStream(ResponseInterface $result): \Generator
97+
{
98+
foreach ((new EventSourceHttpClient())->stream($result) as $chunk) {
99+
if ($chunk instanceof FirstChunk || $chunk instanceof LastChunk) {
100+
continue;
101+
}
102+
103+
try {
104+
$data = json_decode($chunk->getContent(), true, 512, \JSON_THROW_ON_ERROR);
105+
} catch (\JsonException $e) {
106+
throw new RuntimeException('Failed to decode JSON: '.$e->getMessage());
107+
}
108+
109+
yield new OllamaMessageChunk(
110+
$data['model'],
111+
new \DateTimeImmutable($data['created_at']),
112+
$data['message'],
113+
$data['done'],
114+
);
115+
}
116+
}
86117
}

src/platform/tests/Bridge/Ollama/OllamaClientTest.php

Lines changed: 88 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -16,9 +16,12 @@
1616
use PHPUnit\Framework\TestCase;
1717
use Symfony\AI\Platform\Bridge\Ollama\Ollama;
1818
use Symfony\AI\Platform\Bridge\Ollama\OllamaClient;
19+
use Symfony\AI\Platform\Bridge\Ollama\PlatformFactory;
1920
use Symfony\AI\Platform\Model;
21+
use Symfony\AI\Platform\Result\StreamResult;
2022
use Symfony\Component\HttpClient\MockHttpClient;
2123
use Symfony\Component\HttpClient\Response\JsonMockResponse;
24+
use Symfony\Component\HttpClient\Response\MockResponse;
2225

2326
#[CoversClass(OllamaClient::class)]
2427
#[UsesClass(Ollama::class)]
@@ -87,4 +90,89 @@ public function testOutputStructureIsSupported()
8790
'done' => true,
8891
], $response->getData());
8992
}
93+
94+
public function testStreamingIsSupported()
95+
{
96+
$httpClient = new MockHttpClient([
97+
new JsonMockResponse([
98+
'capabilities' => ['completion'],
99+
]),
100+
new MockResponse('data: '.json_encode([
101+
'model' => 'llama3.2',
102+
'created_at' => '2025-08-23T10:00:00Z',
103+
'message' => ['role' => 'assistant', 'content' => 'Hello world'],
104+
'done' => true,
105+
])."\n\n", [
106+
'response_headers' => [
107+
'content-type' => 'text/event-stream',
108+
],
109+
]),
110+
], 'http://127.0.0.1:1234');
111+
112+
$platform = PlatformFactory::create('http://127.0.0.1:1234', $httpClient);
113+
$response = $platform->invoke(new Ollama(), [
114+
'messages' => [
115+
[
116+
'role' => 'user',
117+
'content' => 'Say hello world',
118+
],
119+
],
120+
'model' => 'llama3.2',
121+
], [
122+
'stream' => true,
123+
]);
124+
125+
$result = $response->getResult();
126+
127+
$this->assertInstanceOf(StreamResult::class, $result);
128+
$this->assertInstanceOf(\Generator::class, $result->getContent());
129+
$this->assertSame(2, $httpClient->getRequestsCount());
130+
}
131+
132+
public function testStreamingConverterWithDirectResponse()
133+
{
134+
$streamingData = 'data: '.json_encode([
135+
'model' => 'llama3.2',
136+
'created_at' => '2025-08-23T10:00:00Z',
137+
'message' => ['role' => 'assistant', 'content' => 'Hello'],
138+
'done' => false,
139+
])."\n\n".
140+
'data: '.json_encode([
141+
'model' => 'llama3.2',
142+
'created_at' => '2025-08-23T10:00:01Z',
143+
'message' => ['role' => 'assistant', 'content' => ' world'],
144+
'done' => true,
145+
])."\n\n";
146+
147+
$mockHttpClient = new MockHttpClient([
148+
new MockResponse($streamingData, [
149+
'response_headers' => [
150+
'content-type' => 'text/event-stream',
151+
],
152+
]),
153+
]);
154+
155+
$mockResponse = $mockHttpClient->request('GET', 'http://test.example');
156+
$rawResult = new \Symfony\AI\Platform\Result\RawHttpResult($mockResponse);
157+
$converter = new \Symfony\AI\Platform\Bridge\Ollama\OllamaResultConverter();
158+
159+
$result = $converter->convert($rawResult, ['stream' => true]);
160+
161+
$this->assertInstanceOf(StreamResult::class, $result);
162+
$this->assertInstanceOf(\Generator::class, $result->getContent());
163+
164+
$regularMockHttpClient = new MockHttpClient([
165+
new JsonMockResponse([
166+
'model' => 'llama3.2',
167+
'message' => ['role' => 'assistant', 'content' => 'Hello world'],
168+
'done' => true,
169+
]),
170+
]);
171+
172+
$regularMockResponse = $regularMockHttpClient->request('GET', 'http://test.example');
173+
$regularRawResult = new \Symfony\AI\Platform\Result\RawHttpResult($regularMockResponse);
174+
$regularResult = $converter->convert($regularRawResult, ['stream' => false]);
175+
176+
$this->assertNotInstanceOf(StreamResult::class, $regularResult);
177+
}
90178
}

0 commit comments

Comments
 (0)