From 7ccdfc8052813ff5b598152ae7ba20fd2da53865 Mon Sep 17 00:00:00 2001 From: Guillaume Loulier Date: Thu, 9 Oct 2025 17:52:57 +0200 Subject: [PATCH] [Platform][Ollama] Introduce `OllamaApiCatalog` --- docs/components/platform.rst | 28 +++++++ src/ai-bundle/config/options.php | 3 + src/ai-bundle/src/AiBundle.php | 17 +++- .../DependencyInjection/AiBundleTest.php | 40 ++++++++++ .../src/Bridge/Ollama/ModelCatalog.php | 5 +- .../src/Bridge/Ollama/OllamaApiCatalog.php | 78 +++++++++++++++++++ .../src/Bridge/Ollama/OllamaClient.php | 17 +--- src/platform/src/Capability.php | 6 ++ .../Bridge/Ollama/OllamaApiCatalogTest.php | 40 ++++++++++ .../tests/Bridge/Ollama/OllamaClientTest.php | 16 ++-- 10 files changed, 223 insertions(+), 27 deletions(-) create mode 100644 src/platform/src/Bridge/Ollama/OllamaApiCatalog.php create mode 100644 src/platform/tests/Bridge/Ollama/OllamaApiCatalogTest.php diff --git a/docs/components/platform.rst b/docs/components/platform.rst index fba725232..f3937530d 100644 --- a/docs/components/platform.rst +++ b/docs/components/platform.rst @@ -77,6 +77,34 @@ You can also combine size variants with query parameters:: // Get model with size variant and query parameters $model = $catalog->getModel('qwen3:32b?temperature=0.5&top_p=0.9'); +Custom models +~~~~~~~~~~~~~ + +For providers like Ollama, you can use custom models (built on top of ``Modelfile``), as those models are not listed in +the default catalog, you can use the built-in ``OllamaApiCatalog`` to query the model information from the API rather +than the default catalog:: + + use Symfony\AI\Platform\Bridge\Ollama\OllamaApiCatalog; + use Symfony\AI\Platform\Bridge\Ollama\PlatformFactory; + use Symfony\AI\Platform\Message\Message; + use Symfony\AI\Platform\Message\MessageBag; + + $platform = PlatformFactory::create('http://127.0.0.11434', HttpClient::create(), new OllamaApiCatalog( + 'http://127.0.0.11434', + HttpClient::create(), + )); + + $platform->invoke('your_custom_model_name', new MessageBag( + Message::ofUser(...) + )); + +When using the bundle, the usage of ``OllamaApiCatalog`` is available via the ``api_catalog`` option:: + + ai: + platform: + ollama: + api_catalog: true + Supported Models & Platforms ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/src/ai-bundle/config/options.php b/src/ai-bundle/config/options.php index f973a97ee..77787af9d 100644 --- a/src/ai-bundle/config/options.php +++ b/src/ai-bundle/config/options.php @@ -160,6 +160,9 @@ ->defaultValue('http_client') ->info('Service ID of the HTTP client to use') ->end() + ->booleanNode('api_catalog') + ->info('If set, the Ollama API will be used to build the catalog and retrieve models information, using this option leads to additional HTTP calls') + ->end() ->end() ->end() ->arrayNode('cerebras') diff --git a/src/ai-bundle/src/AiBundle.php b/src/ai-bundle/src/AiBundle.php index 2df08057b..35f1bd3c2 100644 --- a/src/ai-bundle/src/AiBundle.php +++ b/src/ai-bundle/src/AiBundle.php @@ -57,6 +57,7 @@ use Symfony\AI\Platform\Bridge\HuggingFace\PlatformFactory as HuggingFacePlatformFactory; use Symfony\AI\Platform\Bridge\LmStudio\PlatformFactory as LmStudioPlatformFactory; use Symfony\AI\Platform\Bridge\Mistral\PlatformFactory as MistralPlatformFactory; +use Symfony\AI\Platform\Bridge\Ollama\OllamaApiCatalog; use Symfony\AI\Platform\Bridge\Ollama\PlatformFactory as OllamaPlatformFactory; use Symfony\AI\Platform\Bridge\OpenAi\PlatformFactory as OpenAiPlatformFactory; use Symfony\AI\Platform\Bridge\OpenRouter\PlatformFactory as OpenRouterPlatformFactory; @@ -568,11 +569,20 @@ private function processPlatformConfig(string $type, array $platform, ContainerB } if ('ollama' === $type) { - $platformId = 'ai.platform.ollama'; + if (\array_key_exists('api_catalog', $platform)) { + $catalogDefinition = (new Definition(OllamaApiCatalog::class)) + ->setLazy(true) + ->setArguments([ + $platform['host_url'], + new Reference('http_client'), + ]); + + $container->setDefinition('ai.platform.model_catalog.ollama', $catalogDefinition); + } + $definition = (new Definition(Platform::class)) ->setFactory(OllamaPlatformFactory::class.'::create') ->setLazy(true) - ->addTag('proxy', ['interface' => PlatformInterface::class]) ->setArguments([ $platform['host_url'], new Reference($platform['http_client'], ContainerInterface::NULL_ON_INVALID_REFERENCE), @@ -580,9 +590,10 @@ private function processPlatformConfig(string $type, array $platform, ContainerB new Reference('ai.platform.contract.ollama'), new Reference('event_dispatcher'), ]) + ->addTag('proxy', ['interface' => PlatformInterface::class]) ->addTag('ai.platform', ['name' => 'ollama']); - $container->setDefinition($platformId, $definition); + $container->setDefinition('ai.platform.ollama', $definition); return; } diff --git a/src/ai-bundle/tests/DependencyInjection/AiBundleTest.php b/src/ai-bundle/tests/DependencyInjection/AiBundleTest.php index 358c0236e..0d7bed4e2 100644 --- a/src/ai-bundle/tests/DependencyInjection/AiBundleTest.php +++ b/src/ai-bundle/tests/DependencyInjection/AiBundleTest.php @@ -23,6 +23,7 @@ use Symfony\AI\AiBundle\AiBundle; use Symfony\AI\Chat\ChatInterface; use Symfony\AI\Chat\MessageStoreInterface; +use Symfony\AI\Platform\Bridge\Ollama\OllamaApiCatalog; use Symfony\AI\Store\Document\Filter\TextContainsFilter; use Symfony\AI\Store\Document\Loader\InMemoryLoader; use Symfony\AI\Store\Document\Transformer\TextTrimTransformer; @@ -583,6 +584,45 @@ public function testConfigurationWithUseAttributeAsKeyWorksWithoutNormalizeKeys( $this->assertTrue($container->hasDefinition('ai.store.mongodb.Production_DB-v3')); } + public function testOllamaCanBeCreatedWithCatalogFromApi() + { + $container = $this->buildContainer([ + 'ai' => [ + 'platform' => [ + 'ollama' => [ + 'api_catalog' => true, + ], + ], + ], + ]); + + $this->assertTrue($container->hasDefinition('ai.platform.ollama')); + $this->assertTrue($container->hasDefinition('ai.platform.model_catalog.ollama')); + + $ollamaDefinition = $container->getDefinition('ai.platform.ollama'); + + $this->assertTrue($ollamaDefinition->isLazy()); + $this->assertCount(5, $ollamaDefinition->getArguments()); + $this->assertSame('http://127.0.0.1:11434', $ollamaDefinition->getArgument(0)); + $this->assertInstanceOf(Reference::class, $ollamaDefinition->getArgument(1)); + $this->assertSame('http_client', (string) $ollamaDefinition->getArgument(1)); + $this->assertInstanceOf(Reference::class, $ollamaDefinition->getArgument(2)); + $this->assertSame('ai.platform.model_catalog.ollama', (string) $ollamaDefinition->getArgument(2)); + $this->assertInstanceOf(Reference::class, $ollamaDefinition->getArgument(3)); + $this->assertSame('ai.platform.contract.ollama', (string) $ollamaDefinition->getArgument(3)); + $this->assertInstanceOf(Reference::class, $ollamaDefinition->getArgument(4)); + $this->assertSame('event_dispatcher', (string) $ollamaDefinition->getArgument(4)); + + $ollamaCatalogDefinition = $container->getDefinition('ai.platform.model_catalog.ollama'); + + $this->assertTrue($ollamaCatalogDefinition->isLazy()); + $this->assertSame(OllamaApiCatalog::class, $ollamaCatalogDefinition->getClass()); + $this->assertCount(2, $ollamaCatalogDefinition->getArguments()); + $this->assertSame('http://127.0.0.1:11434', $ollamaCatalogDefinition->getArgument(0)); + $this->assertInstanceOf(Reference::class, $ollamaCatalogDefinition->getArgument(1)); + $this->assertSame('http_client', (string) $ollamaCatalogDefinition->getArgument(1)); + } + /** * Tests that processor tags use the full agent ID (ai.agent.my_agent) instead of just the agent name (my_agent). * This regression test prevents issues where processors would not be correctly associated with their agents. diff --git a/src/platform/src/Bridge/Ollama/ModelCatalog.php b/src/platform/src/Bridge/Ollama/ModelCatalog.php index 68f487e97..55bb7bd8c 100644 --- a/src/platform/src/Bridge/Ollama/ModelCatalog.php +++ b/src/platform/src/Bridge/Ollama/ModelCatalog.php @@ -218,6 +218,9 @@ public function __construct(array $additionalModels = []) ], ]; - $this->models = array_merge($defaultModels, $additionalModels); + $this->models = [ + ...$defaultModels, + ...$additionalModels, + ]; } } diff --git a/src/platform/src/Bridge/Ollama/OllamaApiCatalog.php b/src/platform/src/Bridge/Ollama/OllamaApiCatalog.php new file mode 100644 index 000000000..9f5c0d709 --- /dev/null +++ b/src/platform/src/Bridge/Ollama/OllamaApiCatalog.php @@ -0,0 +1,78 @@ + + * + * For the full copyright and license information, please view the LICENSE + * file that was distributed with this source code. + */ + +namespace Symfony\AI\Platform\Bridge\Ollama; + +use Symfony\AI\Platform\Capability; +use Symfony\AI\Platform\Exception\InvalidArgumentException; +use Symfony\AI\Platform\ModelCatalog\FallbackModelCatalog; +use Symfony\Contracts\HttpClient\HttpClientInterface; + +/** + * @author Guillaume Loulier + */ +final class OllamaApiCatalog extends FallbackModelCatalog +{ + public function __construct( + private readonly string $host, + private readonly HttpClientInterface $httpClient, + ) { + parent::__construct(); + } + + public function getModel(string $modelName): Ollama + { + $model = parent::getModel($modelName); + + if (\array_key_exists($model->getName(), $this->models)) { + $finalModel = $this->models[$model->getName()]; + + return new $finalModel['class']( + $model->getName(), + $finalModel['capabilities'], + $model->getOptions(), + ); + } + + $response = $this->httpClient->request('POST', \sprintf('%s/api/show', $this->host), [ + 'json' => [ + 'model' => $model->getName(), + ], + ]); + + $payload = $response->toArray(); + + if ([] === $payload['capabilities']) { + throw new InvalidArgumentException('The model information could not be retrieved from the Ollama API. Your Ollama server might be too old. Try upgrade it.'); + } + + $capabilities = array_map( + static fn (string $capability): Capability => match ($capability) { + 'embedding' => Capability::EMBEDDINGS, + 'completion' => Capability::INPUT_TEXT, + 'tools' => Capability::TOOL_CALLING, + 'thinking' => Capability::THINKING, + 'vision' => Capability::INPUT_IMAGE, + default => throw new InvalidArgumentException(\sprintf('The "%s" capability is not supported', $capability)), + }, + $payload['capabilities'], + ); + + $finalModel = new Ollama($model->getName(), $capabilities, $model->getOptions()); + + $this->models[$finalModel->getName()] = [ + 'class' => Ollama::class, + 'capabilities' => $finalModel->getCapabilities(), + ]; + + return $finalModel; + } +} diff --git a/src/platform/src/Bridge/Ollama/OllamaClient.php b/src/platform/src/Bridge/Ollama/OllamaClient.php index 4ff627e65..8cc893bcb 100644 --- a/src/platform/src/Bridge/Ollama/OllamaClient.php +++ b/src/platform/src/Bridge/Ollama/OllamaClient.php @@ -11,6 +11,7 @@ namespace Symfony\AI\Platform\Bridge\Ollama; +use Symfony\AI\Platform\Capability; use Symfony\AI\Platform\Exception\InvalidArgumentException; use Symfony\AI\Platform\Model; use Symfony\AI\Platform\ModelClientInterface; @@ -36,21 +37,9 @@ public function supports(Model $model): bool public function request(Model $model, array|string $payload, array $options = []): RawHttpResult { - $response = $this->httpClient->request('POST', \sprintf('%s/api/show', $this->hostUrl), [ - 'json' => [ - 'model' => $model->getName(), - ], - ]); - - $capabilities = $response->toArray()['capabilities'] ?? null; - - if (null === $capabilities) { - throw new InvalidArgumentException('The model information could not be retrieved from the Ollama API. Your Ollama server might be too old. Try upgrade it.'); - } - return match (true) { - \in_array('completion', $capabilities, true) => $this->doCompletionRequest($payload, $options), - \in_array('embedding', $capabilities, true) => $this->doEmbeddingsRequest($model, $payload, $options), + \in_array(Capability::INPUT_MESSAGES, $model->getCapabilities(), true) => $this->doCompletionRequest($payload, $options), + \in_array(Capability::EMBEDDINGS, $model->getCapabilities(), true) => $this->doEmbeddingsRequest($model, $payload, $options), default => throw new InvalidArgumentException(\sprintf('Unsupported model "%s": "%s".', $model::class, $model->getName())), }; } diff --git a/src/platform/src/Capability.php b/src/platform/src/Capability.php index 459c5d0ad..47e6afc59 100644 --- a/src/platform/src/Capability.php +++ b/src/platform/src/Capability.php @@ -42,4 +42,10 @@ enum Capability: string // VOICE case TEXT_TO_SPEECH = 'text-to-speech'; case SPEECH_TO_TEXT = 'speech-to-text'; + + // EMBEDDINGS + case EMBEDDINGS = 'embeddings'; + + // Thinking + case THINKING = 'thinking'; } diff --git a/src/platform/tests/Bridge/Ollama/OllamaApiCatalogTest.php b/src/platform/tests/Bridge/Ollama/OllamaApiCatalogTest.php new file mode 100644 index 000000000..54c16f30b --- /dev/null +++ b/src/platform/tests/Bridge/Ollama/OllamaApiCatalogTest.php @@ -0,0 +1,40 @@ + + * + * For the full copyright and license information, please view the LICENSE + * file that was distributed with this source code. + */ + +namespace Symfony\AI\Platform\Tests\Bridge\Ollama; + +use PHPUnit\Framework\TestCase; +use Symfony\AI\Platform\Bridge\Ollama\OllamaApiCatalog; +use Symfony\AI\Platform\Capability; +use Symfony\Component\HttpClient\MockHttpClient; +use Symfony\Component\HttpClient\Response\JsonMockResponse; + +final class OllamaApiCatalogTest extends TestCase +{ + public function testModelCatalogCanReturnModelFromApi() + { + $httpClient = new MockHttpClient([ + new JsonMockResponse([ + 'capabilities' => ['completion'], + ]), + ]); + + $modelCatalog = new OllamaApiCatalog('http://127.0.0.1:11434', $httpClient); + + $model = $modelCatalog->getModel('foo'); + + $this->assertSame('foo', $model->getName()); + $this->assertSame([ + Capability::INPUT_TEXT, + ], $model->getCapabilities()); + $this->assertSame(1, $httpClient->getRequestsCount()); + } +} diff --git a/src/platform/tests/Bridge/Ollama/OllamaClientTest.php b/src/platform/tests/Bridge/Ollama/OllamaClientTest.php index 83e252259..f23b7a143 100644 --- a/src/platform/tests/Bridge/Ollama/OllamaClientTest.php +++ b/src/platform/tests/Bridge/Ollama/OllamaClientTest.php @@ -16,6 +16,7 @@ use Symfony\AI\Platform\Bridge\Ollama\OllamaClient; use Symfony\AI\Platform\Bridge\Ollama\OllamaResultConverter; use Symfony\AI\Platform\Bridge\Ollama\PlatformFactory; +use Symfony\AI\Platform\Capability; use Symfony\AI\Platform\Model; use Symfony\AI\Platform\Result\RawHttpResult; use Symfony\AI\Platform\Result\StreamResult; @@ -36,9 +37,6 @@ public function testSupportsModel() public function testOutputStructureIsSupported() { $httpClient = new MockHttpClient([ - new JsonMockResponse([ - 'capabilities' => ['completion', 'tools'], - ]), new JsonMockResponse([ 'model' => 'foo', 'response' => [ @@ -50,7 +48,10 @@ public function testOutputStructureIsSupported() ], 'http://127.0.0.1:1234'); $client = new OllamaClient($httpClient, 'http://127.0.0.1:1234'); - $response = $client->request(new Ollama('llama3.2'), [ + $response = $client->request(new Ollama('llama3.2', [ + Capability::INPUT_MESSAGES, + Capability::TOOL_CALLING, + ]), [ 'messages' => [ [ 'role' => 'user', @@ -77,7 +78,7 @@ public function testOutputStructureIsSupported() ], ]); - $this->assertSame(2, $httpClient->getRequestsCount()); + $this->assertSame(1, $httpClient->getRequestsCount()); $this->assertSame([ 'model' => 'foo', 'response' => [ @@ -91,9 +92,6 @@ public function testOutputStructureIsSupported() public function testStreamingIsSupported() { $httpClient = new MockHttpClient([ - new JsonMockResponse([ - 'capabilities' => ['completion'], - ]), new MockResponse('data: '.json_encode([ 'model' => 'llama3.2', 'created_at' => '2025-08-23T10:00:00Z', @@ -123,7 +121,7 @@ public function testStreamingIsSupported() $this->assertInstanceOf(StreamResult::class, $result); $this->assertInstanceOf(\Generator::class, $result->getContent()); - $this->assertSame(2, $httpClient->getRequestsCount()); + $this->assertSame(1, $httpClient->getRequestsCount()); } public function testStreamingConverterWithDirectResponse()