1. Go to this page and download the library: Download php-graph/inference library. Choose the download type require.
2. Extract the ZIP file and open the index.php.
3. Add this code to the index.php.
<?php
require_once('vendor/autoload.php');
/* Start to develop here. Best regards https://php-download.com/ */
php-graph / inference example snippets
use phpGraph\Inference\ProviderFactory;
use phpGraph\Inference\Chat\Request\ChatRequest;
$resource = ProviderFactory::createOllamaResource('http://localhost:11434/');
$request = new ChatRequest([
'model' => 'devstral',
'messages' => [
[
'role' => 'user',
'content' => 'why is the sky blue?'
],
[
'role' => 'assistant',
'content' => 'due to rayleigh scattering.'
],
[
'role' => 'user',
'content' => 'how is that different than mie scattering?'
],
],
'stream' => false,
]);
$response = $resource->chat()->execute($request);
echo $response->toArray()['message']['content'];
use phpGraph\Inference\ProviderFactory;
use phpGraph\Inference\Chat\Request\ChatRequest;
use phpGraph\Inference\Chat\Response\ChatResponse;
use phpGraph\Inference\Chat\Response\Stream\ChatStreamFunctionHandler;
$resource = ProviderFactory::createOllamaResource('http://localhost:11434/');
$request = new ChatRequest([
'model' => 'devstral',
'messages' => [
[
'role' => 'user',
'content' => 'why is the sky blue?'
],
[
'role' => 'assistant',
'content' => 'due to rayleigh scattering.'
],
[
'role' => 'user',
'content' => 'how is that different than mie scattering?'
],
],
'stream' => true,
]);
$streamHandler = new ChatStreamFunctionHandler(
function (ChatResponse $response) {
echo $response->toArray()['message']['content'];
}
);
$resource->chatStreamed()->execute($request, $streamHandler);
use phpGraph\Inference\ProviderFactory;
use phpGraph\Inference\Embed\Request\EmbedRequest;
$resource = ProviderFactory::createOllamaResource('http://localhost:11434/');
$request = new EmbedRequest([
'model' => 'nomic-embed-text',
'prompt' => 'This is a test sentence for embedding.',
],
]);
echo $resource->embed()->execute($request)->toArray()['embedding'];
use phpGraph\Inference\ProviderFactory;
use phpGraph\Inference\Chat\Request\ChatRequest;
$apiKey = getenv('OPENAI_API_KEY');
$resource = ProviderFactory::createOpenAiResource($apiKey);
$request = new ChatRequest([
'model' => 'gpt-4o',
'messages' => [
[
'role' => 'user',
'content' => 'why is the sky blue?'
],
[
'role' => 'assistant',
'content' => 'due to rayleigh scattering.'
],
[
'role' => 'user',
'content' => 'how is that different than mie scattering?'
],
],
'stream' => false,
]);
$response = $resource->chat()->execute($request);
echo $response->toArray()['choices'][0]['message']['content'];
use phpGraph\Inference\ProviderFactory;
use phpGraph\Inference\Chat\Request\ChatRequest;
use phpGraph\Inference\Chat\Response\ChatResponse;
use phpGraph\Inference\Chat\Response\Stream\ChatStreamFunctionHandler;
$apiKey = getenv('OPENAI_API_KEY');
$resource = ProviderFactory::createOpenAiResource($apiKey);
$request = new ChatRequest([
'model' => 'gpt-4o',
'messages' => [
[
'role' => 'user',
'content' => 'why is the sky blue?'
],
[
'role' => 'assistant',
'content' => 'due to rayleigh scattering.'
],
[
'role' => 'user',
'content' => 'how is that different than mie scattering?'
],
],
'stream' => true,
]);
$streamHandler = new ChatStreamFunctionHandler(
function (ChatResponse $response) {
echo $response->toArray()['choices'][0]['delta']['content'];
}
);
$resource->chatStreamed()->execute($request, $streamHandler);
use phpGraph\Inference\ProviderFactory;
use phpGraph\Inference\Embed\Request\EmbedRequest;
$apiKey = getenv('OPENAI_API_KEY');
$resource = ProviderFactory::createOpenAiResource($apiKey);
$request = new EmbedRequest([
'model' => 'text-embedding-3-small',
'input' => [
'This is a test sentence for embedding.',
'Another sentence to embed.'
],
]);
echo $resource->embed()->execute($request)->toArray();
use phpGraph\Inference\ProviderFactory;
use phpGraph\Inference\Chat\Request\ChatRequest;
$apiKey = getenv('MISTRAL_API_KEY');
$resource = ProviderFactory::createMistralResource($apiKey);
$request = new ChatRequest([
'model' => 'mistral-large-latest',
'messages' => [
[
'role' => 'user',
'content' => 'why is the sky blue?'
],
[
'role' => 'assistant',
'content' => 'due to rayleigh scattering.'
],
[
'role' => 'user',
'content' => 'how is that different than mie scattering?'
],
],
'stream' => false,
]);
$response = $resource->chat()->execute($request);
echo $response->toArray()['choices'][0]['message']['content'];
use phpGraph\Inference\ProviderFactory;
use phpGraph\Inference\Chat\Request\ChatRequest;
use phpGraph\Inference\Chat\Response\ChatResponse;
use phpGraph\Inference\Chat\Response\Stream\ChatStreamFunctionHandler;
$apiKey = getenv('MISTRAL_API_KEY');
$resource = ProviderFactory::createMistralResource($apiKey);
$request = new ChatRequest([
'model' => 'mistral-large-latest',
'messages' => [
[
'role' => 'user',
'content' => 'why is the sky blue?'
],
[
'role' => 'assistant',
'content' => 'due to rayleigh scattering.'
],
[
'role' => 'user',
'content' => 'how is that different than mie scattering?'
],
],
'stream' => true,
]);
$streamHandler = new ChatStreamFunctionHandler(
function (ChatResponse $response) {
echo $response->toArray()['choices'][0]['delta']['content'];
}
);
$resource->chatStreamed()->execute($request, $streamHandler);
use phpGraph\Inference\ProviderFactory;
use phpGraph\Inference\Embed\Request\EmbedRequest;
$apiKey = getenv('MISTRAL_API_KEY');
$resource = ProviderFactory::createMistralResource($apiKey);
$request = new EmbedRequest([
'model' => 'mistral-embed',
'input' => [
'This is a test sentence for embedding.',
'Another sentence to embed.'
],
]);
echo $resource->embed()->execute($request)->toArray();
use phpGraph\Inference\ProviderFactory;
use phpGraph\Inference\Chat\Request\ChatRequest;
$apiKey = getenv('DEEPSEEK_API_KEY');
$resource = ProviderFactory::createDeepSeekResource($apiKey);
$request = new ChatRequest([
'model' => 'deepseek-chat',
'messages' => [
[
'role' => 'user',
'content' => 'why is the sky blue?'
],
[
'role' => 'assistant',
'content' => 'due to rayleigh scattering.'
],
[
'role' => 'user',
'content' => 'how is that different than mie scattering?'
],
],
'stream' => false,
]);
$response = $resource->chat()->execute($request);
echo $response->toArray()['choices'][0]['message']['content'];
use phpGraph\Inference\ProviderFactory;
use phpGraph\Inference\Chat\Request\ChatRequest;
use phpGraph\Inference\Chat\Response\ChatResponse;
use phpGraph\Inference\Chat\Response\Stream\ChatStreamFunctionHandler;
$apiKey = getenv('DEEPSEEK_API_KEY');
$resource = ProviderFactory::createDeepSeekResource($apiKey);
$request = new ChatRequest([
'model' => 'deepseek-chat',
'messages' => [
[
'role' => 'user',
'content' => 'why is the sky blue?'
],
[
'role' => 'assistant',
'content' => 'due to rayleigh scattering.'
],
[
'role' => 'user',
'content' => 'how is that different than mie scattering?'
],
],
'stream' => true,
]);
$streamHandler = new ChatStreamFunctionHandler(
function (ChatResponse $response) {
echo $response->toArray()['choices'][0]['delta']['content'];
}
);
$resource->chatStreamed()->execute($request, $streamHandler);
use phpGraph\Inference\ProviderFactory;
use phpGraph\Inference\Embed\Request\EmbedRequest;
$apiKey = getenv('DEEPSEEK_API_KEY');
$resource = ProviderFactory::createDeepSeekResource($apiKey);
$request = new EmbedRequest([
'model' => 'deepseek-embedding',
'input' => [
'This is a test sentence for embedding.',
'Another sentence to embed.'
],
]);
echo $resource->embed()->execute($request)->toArray();
declare(strict_types=1);
namespace App\Command;
use phpGraph\Inference\Chat\Request\ChatRequest;
use phpGraph\Inference\Chat\Response\ChatResponse;
use phpGraph\Inference\Chat\Response\Stream\ChatStreamFunctionHandler;
use phpGraph\Inference\ProviderFactory;
use Symfony\Component\Console\Attribute\AsCommand;
use Symfony\Component\Console\Command\Command;
use Symfony\Component\Console\Input\InputInterface;
use Symfony\Component\Console\Output\OutputInterface;
use Symfony\Component\Console\Style\SymfonyStyle;
#[AsCommand(name: 'inference:chat')]
class InferenceChatCommand extends Command
{
/**
* @param InputInterface $input
* @param OutputInterface $output
*
* @return int
*/
protected function execute(InputInterface $input, OutputInterface $output): int
{
$io = new SymfonyStyle($input, $output);
$ollamaResource = ProviderFactory::createOllamaResource('http://localhost:11434');
$ollamaModel = 'mistral-small3.1';
$messages = $baseMessage = [[
'role' => 'system',
'content' => 'You are an intelligent AI agent named Symfony.'
]];
$tools = [
[
'type' => 'function',
'function' => [
'name' => 'currentDateTime',
'description' => 'Provides the UTC date and time in Y-m-d H:i:s format',
],
],
];
$options = [
'temperature' => 0,
];
while (true) {
$message = $io->ask('message');
if ($message === '/bye') {
break;
}
if ($message === '/reset') {
$messages = $baseMessage;
continue;
}
$messages[] = [
'role' => 'user',
'content' => $message,
];
reload:
$ollamaRequest = new ChatRequest([
'model' => $ollamaModel,
'messages' => $messages,
'tools' => $tools,
'options' => $options,
'stream' => true,
]);
$hasTools = false;
$ollamaStreamHandler = new ChatStreamFunctionHandler(
function (ChatResponse $chatResponse) use ($output, &$messages, &$hasTools) {
$message = $chatResponse->get('message');
if (isset($message['tool_calls'])) {
foreach ($message['tool_calls'] as $tool) {
$name = $tool['function']['name'];
$arguments = $tool['function']['arguments'];
$content = call_user_func_array([$this, $name], $arguments);
$messages[] = [
'role' => 'tool',
'content' => $content,
];
}
$hasTools = true;
}
$content = $message['content'] ?? '';
$output->write($content);
return $content;
}
);
$ollamaResource->chatStreamed()->execute($ollamaRequest, $ollamaStreamHandler);
if ($hasTools) {
goto reload;
}
$messages[] = [
'role' => 'assistant',
'content' => $ollamaStreamHandler->getContent(),
];
$messages = array_slice($messages, -50, 50);
$output->writeln('');
$output->writeln('');
}
return Command::SUCCESS;
}
/**
* @return string
*/
public function currentDateTime(): string
{
return (new \DateTime())->format('Y-m-d H:i:s');
}
}
bash
composer
Loading please wait ...
Before you can download the PHP files, the dependencies should be resolved. This can take some minutes. Please be patient.