Skip to content
This repository was archived by the owner on Jul 16, 2025. It is now read-only.
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
110 changes: 56 additions & 54 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -41,27 +41,26 @@ Those models are provided by different **platforms**, like OpenAI, Azure, Replic
#### Example Instantiation

```php
use PhpLlm\LlmChain\OpenAI\Model\Embeddings;
use PhpLlm\LlmChain\OpenAI\Model\Gpt;
use PhpLlm\LlmChain\OpenAI\Model\Gpt\Version;
use PhpLlm\LlmChain\OpenAI\Platform\OpenAI;
use Symfony\Component\HttpClient\HttpClient;
use PhpLlm\LlmChain\Bridge\OpenAI\Embeddings;
use PhpLlm\LlmChain\Bridge\OpenAI\GPT;
use PhpLlm\LlmChain\Bridge\OpenAI\PlatformFactory;

// Platform: OpenAI
$platform = new OpenAI(HttpClient::create(), $_ENV['OPENAI_API_KEY']);
$platform = PlatformFactory::create($_ENV['OPENAI_API_KEY']);

// Language Model: GPT (OpenAI)
$llm = new Gpt($platform, Version::gpt4oMini());
$llm = new GPT(GPT::GPT_4O_MINI);

// Embeddings Model: Embeddings (OpenAI)
$embeddings = new Embeddings($platform);
$embeddings = new Embeddings();
```

#### Supported Models & Platforms

* Language Models
* [OpenAI's GPT](https://platform.openai.com/docs/models/overview) with [OpenAI](https://platform.openai.com/docs/overview) and [Azure](https://learn.microsoft.com/azure/ai-services/openai/concepts/models) as Platform
* [Anthropic's Claude](https://www.anthropic.com/claude) with [Anthropic](https://www.anthropic.com/) as Platform
* [Meta's Llama](https://www.llama.com/) with [Ollama](https://ollama.com/) and [Replicate](https://replicate.com/) as Platform
* Embeddings Models
* [OpenAI's Text Embeddings](https://platform.openai.com/docs/guides/embeddings/embedding-models) with [OpenAI](https://platform.openai.com/docs/overview) and [Azure](https://learn.microsoft.com/azure/ai-services/openai/concepts/models) as Platform
* [Voyage's Embeddings](https://docs.voyageai.com/docs/embeddings) with [Voyage](https://www.voyageai.com/) as Platform
Expand All @@ -71,7 +70,7 @@ See [issue #28](https://github.com/php-llm/llm-chain/issues/28) for planned supp
### Chain & Messages

The core feature of LLM Chain is to interact with language models via messages. This interaction is done by sending
a **MessageBag** to a **Chain**, which takes care of LLM invokation and response handling.
a **MessageBag** to a **Chain**, which takes care of LLM invocation and response handling.

Messages can be of different types, most importantly `UserMessage`, `SystemMessage`, or `AssistantMessage`, and can also
have different content types, like `Text` or `Image`.
Expand All @@ -80,13 +79,13 @@ have different content types, like `Text` or `Image`.

```php
use PhpLlm\LlmChain\Chain;
use PhpLlm\LlmChain\Message\MessageBag;
use PhpLlm\LlmChain\Message\SystemMessage;
use PhpLlm\LlmChain\Message\UserMessage;
use PhpLlm\LlmChain\Model\Message\MessageBag;
use PhpLlm\LlmChain\Model\Message\SystemMessage;
use PhpLlm\LlmChain\Model\Message\UserMessage;

// LLM instantiation
// Platform & LLM instantiation

$chain = new Chain($llm);
$chain = new Chain($platform, $llm);
$messages = new MessageBag(
new SystemMessage('You are a helpful chatbot answering questions about LLM Chain.'),
new UserMessage('Hello, how are you?'),
Expand All @@ -104,6 +103,8 @@ The `MessageInterface` and `Content` interface help to customize this process if
1. **OpenAI's GPT with Azure**: [chat-gpt-azure.php](examples/chat-gpt-azure.php)
1. **OpenAI's GPT**: [chat-gpt-openai.php](examples/chat-gpt-openai.php)
1. **OpenAI's o1**: [chat-o1-openai.php](examples/chat-o1-openai.php)
1. **Meta's Llama with Ollama**: [chat-llama-ollama.php](examples/chat-llama-ollama.php)
1. **Meta's Llama with Replicate**: [chat-llama-replicate.php](examples/chat-llama-replicate.php)

### Tools

Expand All @@ -112,19 +113,21 @@ Tools are services that can be called by the LLM to provide additional features

Tool calling can be enabled by registering the processors in the chain:
```php
use PhpLlm\LlmChain\ToolBox\ChainProcessor;
use PhpLlm\LlmChain\ToolBox\ToolAnalyzer;
use PhpLlm\LlmChain\ToolBox\ToolBox;
use PhpLlm\LlmChain\Chain\ToolBox\ChainProcessor;
use PhpLlm\LlmChain\Chain\ToolBox\ToolAnalyzer;
use PhpLlm\LlmChain\Chain\ToolBox\ToolBox;
use Symfony\Component\Serializer\Encoder\JsonEncoder;
use Symfony\Component\Serializer\Normalizer\ObjectNormalizer;
use Symfony\Component\Serializer\Serializer;

// Platform & LLM instantiation

$yourTool = new YourTool();

$toolBox = new ToolBox(new ToolAnalyzer(), [$yourTool]);
$toolProcessor = new ChainProcessor($toolBox);

$chain = new Chain($llm, inputProcessor: [$toolProcessor], outputProcessor: [$toolProcessor]);
$chain = new Chain($platform, $llm, inputProcessor: [$toolProcessor], outputProcessor: [$toolProcessor]);
```

Custom tools can basically be any class, but must configure by the `#[AsTool]` attribute.
Expand Down Expand Up @@ -159,15 +162,16 @@ For populating a vector store, LLM Chain provides the service `DocumentEmbedder`
`EmbeddingsModel` and one of `StoreInterface`, and works with a collection of `Document` objects as input:

```php
use PhpLlm\LlmChain\DocumentEmbedder;
use PhpLlm\LlmChain\OpenAI\Model\Embeddings;
use PhpLlm\LlmChain\OpenAI\Platform\OpenAI;
use PhpLlm\LlmChain\Store\Pinecone\Store;
use PhpLlm\LlmChain\Embedder;
use PhpLlm\LlmChain\Bridge\OpenAI\Embeddings;
use PhpLlm\LlmChain\Bridge\OpenAI\PlatformFactory;
use PhpLlm\LlmChain\Bridge\Pinecone\Store;
use Probots\Pinecone\Pinecone;
use Symfony\Component\HttpClient\HttpClient;

$embedder = new DocumentEmbedder(
new Embeddings(new OpenAI(HttpClient::create(), $_ENV['OPENAI_API_KEY']);),
$embedder = new Embedder(
PlatformFactory::create($_ENV['OPENAI_API_KEY']),
new Embeddings(),
new Store(Pinecone::client($_ENV['PINECONE_API_KEY'], $_ENV['PINECONE_HOST']),
);
$embedder->embed($documents);
Expand Down Expand Up @@ -196,20 +200,19 @@ In the end the chain is used in combination with a retrieval tool on top of the

```php
use PhpLlm\LlmChain\Chain;
use PhpLlm\LlmChain\DocumentEmbedder;
use PhpLlm\LlmChain\Message\Message;
use PhpLlm\LlmChain\Message\MessageBag;
use PhpLlm\LlmChain\ToolBox\ChainProcessor;
use PhpLlm\LlmChain\ToolBox\Tool\SimilaritySearch;
use PhpLlm\LlmChain\ToolBox\ToolAnalyzer;
use PhpLlm\LlmChain\ToolBox\ToolBox;
use PhpLlm\LlmChain\Model\Message\Message;
use PhpLlm\LlmChain\Model\Message\MessageBag;
use PhpLlm\LlmChain\Chain\ToolBox\ChainProcessor;
use PhpLlm\LlmChain\Chain\ToolBox\Tool\SimilaritySearch;
use PhpLlm\LlmChain\Chain\ToolBox\ToolAnalyzer;
use PhpLlm\LlmChain\Chain\ToolBox\ToolBox;

// Initialize Platform and LLM
// Initialize Platform & Models

$similaritySearch = new SimilaritySearch($embeddings, $store);
$toolBox = new ToolBox(new ToolAnalyzer(), [$similaritySearch]);
$processor = new ChainProcessor($toolBox);
$chain = new Chain(new Gpt($platform), [$processor], [$processor]);
$chain = new Chain($platform, $llm, [$processor], [$processor]);

$messages = new MessageBag(
Message::forSystem(<<<PROMPT
Expand Down Expand Up @@ -250,11 +253,11 @@ the response back to PHP objects.
To achieve this, a specific chain processor needs to be registered:
```php
use PhpLlm\LlmChain\Chain;
use PhpLlm\LlmChain\Message\Message;
use PhpLlm\LlmChain\Message\MessageBag;
use PhpLlm\LlmChain\StructuredOutput\ChainProcessor;
use PhpLlm\LlmChain\StructuredOutput\ResponseFormatFactory;
use PhpLlm\LlmChain\Tests\StructuredOutput\Data\MathReasoning;
use PhpLlm\LlmChain\Model\Message\Message;
use PhpLlm\LlmChain\Model\Message\MessageBag;
use PhpLlm\LlmChain\Chain\StructuredOutput\ChainProcessor;
use PhpLlm\LlmChain\Chain\StructuredOutput\ResponseFormatFactory;
use PhpLlm\LlmChain\Tests\Chain\StructuredOutput\Data\MathReasoning;
use Symfony\Component\Serializer\Encoder\JsonEncoder;
use Symfony\Component\Serializer\Normalizer\ObjectNormalizer;
use Symfony\Component\Serializer\Serializer;
Expand All @@ -263,7 +266,7 @@ use Symfony\Component\Serializer\Serializer;

$serializer = new Serializer([new ObjectNormalizer()], [new JsonEncoder()]);
$processor = new ChainProcessor(new ResponseFormatFactory(), $serializer);
$chain = new Chain($llm, [$processor], [$processor]);
$chain = new Chain($platform, $llm, [$processor], [$processor]);

$messages = new MessageBag(
Message::forSystem('You are a helpful math tutor. Guide the user through the solution step by step.'),
Expand All @@ -279,8 +282,8 @@ dump($response->getContent()); // returns an instance of `MathReasoning` class
Also PHP array structures as `response_format` are supported, which also requires the chain processor mentioned above:

```php
use PhpLlm\LlmChain\Message\Message;
use PhpLlm\LlmChain\Message\MessageBag;
use PhpLlm\LlmChain\Model\Message\Message;
use PhpLlm\LlmChain\Model\Message\MessageBag;

// Initialize Platform, LLM and Chain with processors and Clock tool

Expand Down Expand Up @@ -380,9 +383,9 @@ needs to be used.
Some LLMs also support images as input, which LLM Chain supports as `Content` type within the `UserMessage`:

```php
use PhpLlm\LlmChain\Message\Content\Image;
use PhpLlm\LlmChain\Message\Message;
use PhpLlm\LlmChain\Message\MessageBag;
use PhpLlm\LlmChain\Model\Message\Content\Image;
use PhpLlm\LlmChain\Model\Message\Message;
use PhpLlm\LlmChain\Model\Message\MessageBag;

// Initialize Platoform, LLM & Chain

Expand Down Expand Up @@ -411,16 +414,15 @@ therefore LLM Chain implements a `EmbeddingsModel` interface with various models
The standalone usage results in an `Vector` instance:

```php
use PhpLlm\LlmChain\OpenAI\Model\Embeddings;
use PhpLlm\LlmChain\OpenAI\Model\Embeddings\Version;
use PhpLlm\LlmChain\Bridge\OpenAI\Embeddings;

// Initialize Platform

$embeddings = new Embeddings($platform, Version::textEmbedding3Small());
$embeddings = new Embeddings($platform, Embeddings::TEXT_3_SMALL);

$vector = $embeddings->create($textInput);
$vectors = $platform->request($embeddings, $textInput)->getContent();

dump($vector->getData()); // Array of float values
dump($vectors[0]->getData()); // Array of float values
```

#### Code Examples
Expand All @@ -436,9 +438,9 @@ interface. They are provided while instantiating the Chain instance:
```php
use PhpLlm\LlmChain\Chain;

// Initialize LLM and processors
// Initialize Platform, LLM and processors

$chain = new Chain($llm, $inputProcessors, $outputProcessors);
$chain = new Chain($platform, $llm, $inputProcessors, $outputProcessors);
```

#### InputProcessor
Expand All @@ -449,7 +451,7 @@ able to mutate both on top of the `Input` instance provided.
```php
use PhpLlm\LlmChain\Chain\Input;
use PhpLlm\LlmChain\Chain\InputProcessor;
use PhpLlm\LlmChain\Message\AssistantMessage
use PhpLlm\LlmChain\Model\Message\AssistantMessage

final class MyProcessor implements InputProcessor
{
Expand All @@ -474,7 +476,7 @@ mutate or replace the given response:
```php
use PhpLlm\LlmChain\Chain\Output;
use PhpLlm\LlmChain\Chain\OutputProcessor;
use PhpLlm\LlmChain\Message\AssistantMessage
use PhpLlm\LlmChain\Model\Message\AssistantMessage

final class MyProcessor implements OutputProcessor
{
Expand All @@ -499,7 +501,7 @@ use PhpLlm\LlmChain\Chain\ChainAwareProcessor;
use PhpLlm\LlmChain\Chain\ChainAwareTrait;
use PhpLlm\LlmChain\Chain\Output;
use PhpLlm\LlmChain\Chain\OutputProcessor;
use PhpLlm\LlmChain\Message\AssistantMessage
use PhpLlm\LlmChain\Model\Message\AssistantMessage

final class MyProcessor implements OutputProcessor, ChainAwareProcessor
{
Expand Down
1 change: 1 addition & 0 deletions composer.json
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@
"mongodb/mongodb": "^1.20",
"php-cs-fixer/shim": "^3.64",
"phpstan/phpstan": "^1.12",
"phpstan/phpstan-webmozart-assert": "^1.2",
"phpunit/phpunit": "^11.3",
"probots-io/pinecone-php": "^1.0",
"rector/rector": "^1.2",
Expand Down
15 changes: 7 additions & 8 deletions examples/chat-claude-anthropic.php
Original file line number Diff line number Diff line change
@@ -1,12 +1,11 @@
<?php

use PhpLlm\LlmChain\Bridge\Anthropic\Claude;
use PhpLlm\LlmChain\Bridge\Anthropic\PlatformFactory;
use PhpLlm\LlmChain\Chain;
use PhpLlm\LlmChain\Message\Message;
use PhpLlm\LlmChain\Message\MessageBag;
use PhpLlm\LlmChain\Model\Language\Claude;
use PhpLlm\LlmChain\Platform\Anthropic;
use PhpLlm\LlmChain\Model\Message\Message;
use PhpLlm\LlmChain\Model\Message\MessageBag;
use Symfony\Component\Dotenv\Dotenv;
use Symfony\Component\HttpClient\HttpClient;

require_once dirname(__DIR__).'/vendor/autoload.php';
(new Dotenv())->loadEnv(dirname(__DIR__).'/.env');
Expand All @@ -16,10 +15,10 @@
exit(1);
}

$platform = new Anthropic(HttpClient::create(), $_ENV['ANTHROPIC_API_KEY']);
$llm = new Claude($platform);
$platform = PlatformFactory::create($_ENV['ANTHROPIC_API_KEY']);
$llm = new Claude();

$chain = new Chain($llm);
$chain = new Chain($platform, $llm);
$messages = new MessageBag(
Message::forSystem('You are a pirate and you write funny.'),
Message::ofUser('What is the Symfony framework?'),
Expand Down
15 changes: 7 additions & 8 deletions examples/chat-gpt-azure.php
Original file line number Diff line number Diff line change
@@ -1,12 +1,11 @@
<?php

use PhpLlm\LlmChain\Bridge\Azure\OpenAI\PlatformFactory;
use PhpLlm\LlmChain\Bridge\OpenAI\GPT;
use PhpLlm\LlmChain\Chain;
use PhpLlm\LlmChain\Message\Message;
use PhpLlm\LlmChain\Message\MessageBag;
use PhpLlm\LlmChain\Model\Language\Gpt;
use PhpLlm\LlmChain\Platform\OpenAI\Azure;
use PhpLlm\LlmChain\Model\Message\Message;
use PhpLlm\LlmChain\Model\Message\MessageBag;
use Symfony\Component\Dotenv\Dotenv;
use Symfony\Component\HttpClient\HttpClient;

require_once dirname(__DIR__).'/vendor/autoload.php';
(new Dotenv())->loadEnv(dirname(__DIR__).'/.env');
Expand All @@ -17,15 +16,15 @@
exit(1);
}

$platform = new Azure(HttpClient::create(),
$platform = PlatformFactory::create(
$_ENV['AZURE_OPENAI_BASEURL'],
$_ENV['AZURE_OPENAI_DEPLOYMENT'],
$_ENV['AZURE_OPENAI_VERSION'],
$_ENV['AZURE_OPENAI_KEY'],
);
$llm = new Gpt($platform, Gpt::GPT_4O_MINI);
$llm = new GPT(GPT::GPT_4O_MINI);

$chain = new Chain($llm);
$chain = new Chain($platform, $llm);
$messages = new MessageBag(
Message::forSystem('You are a pirate and you write funny.'),
Message::ofUser('What is the Symfony framework?'),
Expand Down
15 changes: 7 additions & 8 deletions examples/chat-gpt-openai.php
Original file line number Diff line number Diff line change
@@ -1,12 +1,11 @@
<?php

use PhpLlm\LlmChain\Bridge\OpenAI\GPT;
use PhpLlm\LlmChain\Bridge\OpenAI\PlatformFactory;
use PhpLlm\LlmChain\Chain;
use PhpLlm\LlmChain\Message\Message;
use PhpLlm\LlmChain\Message\MessageBag;
use PhpLlm\LlmChain\Model\Language\Gpt;
use PhpLlm\LlmChain\Platform\OpenAI\OpenAI;
use PhpLlm\LlmChain\Model\Message\Message;
use PhpLlm\LlmChain\Model\Message\MessageBag;
use Symfony\Component\Dotenv\Dotenv;
use Symfony\Component\HttpClient\HttpClient;

require_once dirname(__DIR__).'/vendor/autoload.php';
(new Dotenv())->loadEnv(dirname(__DIR__).'/.env');
Expand All @@ -16,12 +15,12 @@
exit(1);
}

$platform = new OpenAI(HttpClient::create(), $_ENV['OPENAI_API_KEY']);
$llm = new Gpt($platform, Gpt::GPT_4O_MINI, [
$platform = PlatformFactory::create($_ENV['OPENAI_API_KEY']);
$llm = new GPT(GPT::GPT_4O_MINI, [
'temperature' => 0.5, // default options for the model
]);

$chain = new Chain($llm);
$chain = new Chain($platform, $llm);
$messages = new MessageBag(
Message::forSystem('You are a pirate and you write funny.'),
Message::ofUser('What is the Symfony framework?'),
Expand Down
15 changes: 7 additions & 8 deletions examples/chat-llama-ollama.php
Original file line number Diff line number Diff line change
@@ -1,12 +1,11 @@
<?php

use PhpLlm\LlmChain\Bridge\Meta\Llama;
use PhpLlm\LlmChain\Bridge\Ollama\PlatformFactory;
use PhpLlm\LlmChain\Chain;
use PhpLlm\LlmChain\Message\Message;
use PhpLlm\LlmChain\Message\MessageBag;
use PhpLlm\LlmChain\Model\Language\Llama;
use PhpLlm\LlmChain\Platform\Ollama;
use PhpLlm\LlmChain\Model\Message\Message;
use PhpLlm\LlmChain\Model\Message\MessageBag;
use Symfony\Component\Dotenv\Dotenv;
use Symfony\Component\HttpClient\HttpClient;

require_once dirname(__DIR__).'/vendor/autoload.php';
(new Dotenv())->loadEnv(dirname(__DIR__).'/.env');
Expand All @@ -16,10 +15,10 @@
exit(1);
}

$platform = new Ollama(HttpClient::create(), $_ENV['OLLAMA_HOST_URL']);
$llm = new Llama($platform);
$platform = PlatformFactory::create($_ENV['OLLAMA_HOST_URL']);
$llm = new Llama('llama3.2');

$chain = new Chain($llm);
$chain = new Chain($platform, $llm);
$messages = new MessageBag(
Message::forSystem('You are a helpful assistant.'),
Message::ofUser('Tina has one brother and one sister. How many sisters do Tina\'s siblings have?'),
Expand Down
Loading