1. Go to this page and download the library: Download llm-agents/agents library. Choose the download type require.
2. Extract the ZIP file and open the index.php.
3. Add this code to the index.php.
<?php
require_once('vendor/autoload.php');
/* Start to develop here. Best regards https://php-download.com/ */
llm-agents / agents example snippets
use LLM\Agents\Agent\AgentAggregate;
use LLM\Agents\Agent\Agent;
use LLM\Agents\Solution\Model;
use LLM\Agents\Solution\ToolLink;
use LLM\Agents\Solution\MetadataType;
use LLM\Agents\Solution\SolutionMetadata;
class SiteStatusCheckerAgent extends AgentAggregate
{
public const NAME = 'site_status_checker';
public static function create(): self
{
$agent = new Agent(
key: self::NAME,
name: 'Site Status Checker',
description: 'This agent checks the online status of websites.',
instruction: 'You are a website status checking assistant. Your goal is to help users determine if a website is online. Use the provided tool to check site availability. Give clear, concise responses about a site\'s status.',
);
$aggregate = new self($agent);
$aggregate->addMetadata(
new SolutionMetadata(
type: MetadataType::Memory,
key: 'check_availability',
content: 'Always check the site\'s availability using the provided tool.',
),
new SolutionMetadata(
type: MetadataType::Configuration,
key: 'max_tokens',
content: 500,
)
);
$model = new Model(model: 'gpt-4o-mini');
$aggregate->addAssociation($model);
$aggregate->addAssociation(new ToolLink(name: CheckSiteAvailabilityTool::NAME));
return $aggregate;
}
}
use LLM\Agents\Tool\PhpTool;
use LLM\Agents\Tool\ToolLanguage;
class CheckSiteAvailabilityTool extends PhpTool
{
public const NAME = 'check_site_availability';
public function __construct()
{
parent::__construct(
name: self::NAME,
inputSchema: CheckSiteAvailabilityInput::class,
description: 'This tool checks if a given URL is accessible and returns its HTTP status code and response time.',
);
}
public function getLanguage(): ToolLanguage
{
return ToolLanguage::PHP;
}
public function execute(object $input): string
{
$ch = curl_init($input->url);
curl_setopt_array($ch, [
CURLOPT_RETURNTRANSFER => true,
CURLOPT_HEADER => true,
CURLOPT_NOBODY => true,
CURLOPT_FOLLOWLOCATION => true,
CURLOPT_MAXREDIRS => 10,
CURLOPT_TIMEOUT => 30,
]);
$startTime = microtime(true);
$response = curl_exec($ch);
$endTime = microtime(true);
$statusCode = curl_getinfo($ch, CURLINFO_HTTP_CODE);
$responseTime = round(($endTime - $startTime) * 1000, 2);
curl_close($ch);
$isOnline = $statusCode >= 200 && $statusCode < 400;
return json_encode([
'status_code' => $statusCode,
'response_time_ms' => $responseTime,
'is_online' => $isOnline,
]);
}
}
use Spiral\JsonSchemaGenerator\Attribute\Field;
class CheckSiteAvailabilityInput
{
public function __construct(
#[Field(title: 'URL', description: 'The full URL of the website to check')]
public readonly string $url,
) {}
}
use LLM\Agents\Solution\AgentLink;
class SiteStatusCheckerAgent extends AgentAggregate
{
public const NAME = 'site_status_checker';
public static function create(): self
{
// ... [previous agent setup code] ...
// Link to another agent
$aggregate->addAssociation(
new AgentLink(
name: 'network_diagnostics_agent',
outputSchema: NetworkDiagnosticsOutput::class,
),
);
return $aggregate;
}
}
use LLM\Agents\Tool\PhpTool;
use LLM\Agents\Agent\AgentExecutor;
use LLM\Agents\LLM\Prompt\Chat\ToolCallResultMessage;
use LLM\Agents\LLM\Response\ToolCalledResponse;
use LLM\Agents\Tool\ToolExecutor;
use LLM\Agents\Tool\ToolLanguage;
/**
* @extends PhpTool<AskAgentInput>
*/
final class AskAgentTool extends PhpTool
{
public const NAME = 'ask_agent';
public function __construct(
private readonly AgentExecutor $executor,
private readonly ToolExecutor $toolExecutor,
) {
parent::__construct(
name: self::NAME,
inputSchema: AskAgentInput::class,
description: 'Ask an agent with given name to execute a task.',
);
}
public function getLanguage(): ToolLanguage
{
return ToolLanguage::PHP;
}
public function execute(object $input): string|\Stringable
{
$prompt = \sprintf(
<<<'PROMPT'
%s
Important rules:
- Think before responding to the user.
- Don not markup the content. Only JSON is allowed.
- Don't write anything except the answer using JSON schema.
- Answer in JSON using this schema:
%s
PROMPT
,
$input->question,
$input->outputSchema,
);
while (true) {
$execution = $this->executor->execute($input->name, $prompt);
$result = $execution->result;
$prompt = $execution->prompt;
if ($result instanceof ToolCalledResponse) {
foreach ($result->tools as $tool) {
$functionResult = $this->toolExecutor->execute($tool->name, $tool->arguments);
$prompt = $prompt->withAddedMessage(
new ToolCallResultMessage(
id: $tool->id,
content: [$functionResult],
),
);
}
continue;
}
break;
}
return \json_encode($result->content);
}
}
use Spiral\JsonSchemaGenerator\Attribute\Field;
final class AskAgentInput
{
public function __construct(
#[Field(title: 'Agent Name', description: 'The name of the agent to ask.')]
public string $name,
#[Field(title: 'Question', description: 'The question to ask the agent.')]
public string $question,
#[Field(title: 'Output Schema', description: 'The schema of the output.')]
public string $outputSchema,
) {}
}
use LLM\Agents\AgentExecutor\ExecutorInterface;
use LLM\Agents\LLM\Prompt\Chat\Prompt;
use LLM\Agents\LLM\Prompt\Chat\MessagePrompt;
class AgentRunner
{
public function __construct(
private ExecutorInterface $executor,
) {}
public function run(string $input): string
{
$prompt = new Prompt([
MessagePrompt::user($input),
]);
$execution = $this->executor->execute(
agent: MyAgent::NAME,
prompt: $prompt,
);
return (string)$execution->result->content;
}
}
// Usage
$agentRunner = new AgentRunner($executor);
$result = $agentRunner->run("Do something cool!");
echo $result;
use LLM\Agents\Solution\SolutionMetadata;
use LLM\Agents\Solution\MetadataType;
// In your agent creation method:
$aggregate->addMetadata(
new SolutionMetadata(
type: MetadataType::Memory,
key: 'user_preference',
content: 'The user prefers concise answers.',
),
new SolutionMetadata(
type: MetadataType::Prompt,
key: 'check_google',
content: 'Check the status of google.com.',
),
new SolutionMetadata(
type: MetadataType::Prompt,
key: 'check_yahoo',
content: 'Check the status of yahoo.com.',
),
//...
);
use LLM\Agents\AgentExecutor\ExecutorInterface;
use LLM\Agents\AgentExecutor\ExecutorPipeline;
use LLM\Agents\AgentExecutor\Interceptor\GeneratePromptInterceptor;
use LLM\Agents\AgentExecutor\Interceptor\InjectModelInterceptor;
use LLM\Agents\AgentExecutor\Interceptor\InjectOptionsInterceptor;
use LLM\Agents\AgentExecutor\Interceptor\InjectResponseIntoPromptInterceptor;
use LLM\Agents\AgentExecutor\Interceptor\InjectToolsInterceptor;
$executor = new ExecutorPipeline(...);
$executor = $executor->withInterceptor(
new GeneratePromptInterceptor(...),
new InjectModelInterceptor(...),
new InjectToolsInterceptor(...),
new InjectOptionsInterceptor(...),
new InjectResponseIntoPromptInterceptor(...),
);
$executor->execute(...);
use LLM\Agents\AgentExecutor\ExecutorInterceptorInterface;
use LLM\Agents\AgentExecutor\ExecutionInput;
use LLM\Agents\AgentExecutor\InterceptorHandler;
use LLM\Agents\Agent\Execution;
use LLM\Agents\LLM\Prompt\Chat\Prompt;
use LLM\Agents\LLM\Response\ChatResponse;
use Psr\Log\LoggerInterface;
class TokenCounterInterceptor implements ExecutorInterceptorInterface
{
public function __construct(
private TokenCounterInterface $tokenCounter,
private LoggerInterface $logger,
) {}
public function execute(ExecutionInput $input, InterceptorHandler $next): Execution
{
// Count tokens in the input prompt
$promptTokens = $this->tokenCounter->count((string) $input->prompt);
// Execute the next interceptor in the chain
$execution = $next($input);
// Count tokens in the response
$responseTokens = 0;
if ($execution->result instanceof ChatResponse) {
$responseTokens = $this->tokenCounter->count((string) $execution->result->content);
}
// Log the token counts
$this->logger->info('Token usage', [
'prompt_tokens' => $promptTokens,
'response_tokens' => $responseTokens,
'total_tokens' => $promptTokens + $responseTokens,
]);
return $execution;
}
}
use Psr\Log\LoggerInterface;
// Assume you have implementations of TokenCounterInterface and LoggerInterface
$tokenCounter = new MyTokenCounter();
$logger = new MyLogger();
$executor = $executor->withInterceptor(
new TokenCounterInterceptor($tokenCounter, $logger),
);
use LLM\Agents\LLM\ContextInterface;
use LLM\Agents\LLM\LLMInterface;
use LLM\Agents\LLM\OptionsInterface;
use LLM\Agents\LLM\Prompt\Chat\MessagePrompt;
use LLM\Agents\LLM\Prompt\Chat\PromptInterface as ChatPromptInterface;
use LLM\Agents\LLM\Prompt\PromptInterface;
use LLM\Agents\LLM\Prompt\Tool;
use LLM\Agents\LLM\Response\Response;
use OpenAI\Client;
final readonly class OpenAILLM implements LLMInterface
{
public function __construct(
private Client $client,
private MessageMapper $messageMapper,
private StreamResponseParser $streamParser,
) {}
public function generate(
ContextInterface $context,
PromptInterface $prompt,
OptionsInterface $options,
): Response {
$request = $this->buildOptions($options);
$messages = $prompt instanceof ChatPromptInterface
? $prompt->format()
: [MessagePrompt::user($prompt)->toChatMessage()];
$request['messages'] = array_map(
fn($message) => $this->messageMapper->map($message),
$messages
);
if ($options->has('tools')) {
$request['tools'] = array_values(array_map(
fn(Tool $tool): array => $this->messageMapper->map($tool),
$options->get('tools')
));
}
$stream = $this->client->chat()->createStreamed($request);
return $this->streamParser->parse($stream);
}
private function buildOptions(OptionsInterface $options): array
{
$defaultOptions = [
'temperature' => 0.8,
'max_tokens' => 120,
'model' => null,
// Add other default options as needed
];
$result = array_intersect_key($options->getIterator()->getArrayCopy(), $defaultOptions);
$result += array_diff_key($defaultOptions, $result);
if (!isset($result['model'])) {
throw new \InvalidArgumentException('Model is
use LLM\Agents\LLM\Prompt\Chat\ChatMessage;
use LLM\Agents\LLM\Prompt\Chat\Role;
use LLM\Agents\LLM\Prompt\Chat\ToolCalledPrompt;
use LLM\Agents\LLM\Prompt\Chat\ToolCallResultMessage;
use LLM\Agents\LLM\Prompt\Tool;
use LLM\Agents\LLM\Response\ToolCall;
final readonly class MessageMapper
{
public function map(object $message): array
{
if ($message instanceof ChatMessage) {
return [
'content' => $message->content,
'role' => $message->role->value,
];
}
if ($message instanceof ToolCallResultMessage) {
return [
'content' => \is_array($message->content) ? \json_encode($message->content) : $message->content,
'tool_call_id' => $message->id,
'role' => $message->role->value,
];
}
if ($message instanceof ToolCalledPrompt) {
return [
'content' => null,
'role' => Role::Assistant->value,
'tool_calls' => \array_map(
static fn(ToolCall $tool): array => [
'id' => $tool->id,
'type' => 'function',
'function' => [
'name' => $tool->name,
'arguments' => $tool->arguments,
],
],
$message->tools,
),
];
}
if ($message instanceof Tool) {
return [
'type' => 'function',
'function' => [
'name' => $message->name,
'description' => $message->description,
'parameters' => [
'type' => 'object',
'additionalProperties' => $message->additionalProperties,
] + $message->parameters,
'strict' => $message->strict,
],
];
}
if ($message instanceof \JsonSerializable) {
return $message->jsonSerialize();
}
throw new \InvalidArgumentException('Invalid message type');
}
}
use LLM\Agents\PromptGenerator\Interceptors\AgentMemoryInjector;
use LLM\Agents\PromptGenerator\Interceptors\InstructionGenerator;
use LLM\Agents\PromptGenerator\Interceptors\LinkedAgentsInjector;
use LLM\Agents\PromptGenerator\Interceptors\UserPromptInjector;
use LLM\Agents\PromptGenerator\PromptGeneratorPipeline;
class PromptGeneratorBootloader extends Bootloader
{
public function defineSingletons(): array
{
return [
PromptGeneratorPipeline::class => static function (
LinkedAgentsInjector $linkedAgentsInjector,
): PromptGeneratorPipeline {
$pipeline = new PromptGeneratorPipeline();
return $pipeline->withInterceptor(
new InstructionGenerator(),
new AgentMemoryInjector(),
$linkedAgentsInjector,
new UserPromptInjector(),
// Add more interceptors as needed
);
},
];
}
}
use LLM\Agents\LLM\ContextFactoryInterface;
use LLM\Agents\LLM\ContextInterface;
final class ContextFactory implements ContextFactoryInterface
{
public function create(): ContextInterface
{
return new class implements ContextInterface {
// Implement any necessary methods or properties for your context
};
}
}
use LLM\Agents\LLM\OptionsFactoryInterface;
use LLM\Agents\LLM\OptionsInterface;
final class OptionsFactory implements OptionsFactoryInterface
{
public function create(): OptionsInterface
{
return new class implements OptionsInterface {
private array $options = [];
public function has(string $option): bool
{
return isset($this->options[$option]);
}
public function get(string $option, mixed $default = null): mixed
{
return $this->options[$option] ?? $default;
}
public function with(string $option, mixed $value): static
{
$clone = clone $this;
$clone->options[$option] = $value;
return $clone;
}
public function getIterator(): \Traversable
{
return new \ArrayIterator($this->options);
}
};
}
}
Loading please wait ...
Before you can download the PHP files, the dependencies should be resolved. This can take some minutes. Please be patient.