PHP code example of moe-mizrak / laravel-openrouter

1. Go to this page and download the library: Download moe-mizrak/laravel-openrouter library. Choose the download type require.

2. Extract the ZIP file and open the index.php.

3. Add this code to the index.php.
    
        
<?php
require_once('vendor/autoload.php');

/* Start to develop here. Best regards https://php-download.com/ */

    

moe-mizrak / laravel-openrouter example snippets


return [
    'api_endpoint' => env('OPENROUTER_API_ENDPOINT', 'https://openrouter.ai/api/v1/'),
    'api_key'      => env('OPENROUTER_API_KEY'),
    'api_timeout'  => env('OPENROUTER_API_TIMEOUT', 20)
];

$chatData = new ChatData([
    'messages' => [
        new MessageData([
            'role'    => RoleType::USER,
            'content' => [
                new TextContentData([
                    'type' => TextContentData::ALLOWED_TYPE,
                    'text' => 'This is a sample text content.',
                ]),
                new ImageContentPartData([
                    'type'      => ImageContentPartData::ALLOWED_TYPE,
                    'image_url' => new ImageUrlData([
                        'url'    => 'https://example.com/image.jpg',
                        'detail' => 'Sample image',
                    ]),
                ]),
            ],
        ]),
    ],
    'response_format' => new ResponseFormatData([
        'type' => 'json_object',
    ]),
    'stop' => ['stop_token'],
    'stream' => true,
    'max_tokens' => 1024,
    'temperature' => 0.7,
    'top_p' => 0.9,
    'top_k' => 50,
    'frequency_penalty' => 0.5,
    'presence_penalty' => 0.2,
    'repetition_penalty' => 1.2,
    'seed' => 42,
    'tool_choice' => 'auto',
    'tools' => [
        // ToolCallData instances
    ],
    'logit_bias' => [
        '50256' => -100,
    ],
    'transforms' => ['middle-out'],
    'models' => ['model1', 'model2'],
    'route' => RouteType::FALLBACK,
    'provider' => new ProviderPreferencesData([
        'allow_fallbacks'    => true,
        '

$content = 'Tell me a story about a rogue AI that falls in love with its creator.'; // Your desired prompt or content
$model = 'mistralai/mistral-7b-instruct:free'; // The OpenRouter model you want to use (https://openrouter.ai/docs#models)
$messageData = new MessageData([
    'content' => $content,
    'role'    => RoleType::USER,
]);

$chatData = new ChatData([
    'messages'   => [
        $messageData,
    ],
    'model'      => $model,
    'max_tokens' => 100, // Adjust this value as needed
]);

$chatResponse = LaravelOpenRouter::chatRequest($chatData);

$content = 'Tell me a story about a rogue AI that falls in love with its creator.'; // Your desired prompt or content
$model = 'mistralai/mistral-7b-instruct:free'; // The OpenRouter model you want to use (https://openrouter.ai/docs#models)
$messageData = new MessageData([
    'content' => $content,
    'role'    => RoleType::USER,
]);

$chatData = new ChatData([
    'messages'   => [
        $messageData,
    ],
    'model'      => $model,
    'max_tokens' => 100, // Adjust this value as needed
]);

/*
 * Calls chatStreamRequest ($promise is type of PromiseInterface)
 */
$promise = LaravelOpenRouter::chatStreamRequest($chatData);

// Waits until the promise completes if possible.
$stream = $promise->wait(); // $stream is type of GuzzleHttp\Psr7\Stream

/*
 * 1) You can retrieve whole raw response as: - Choose 1) or 2) depending on your case.
 */
$rawResponseAll = $stream->getContents(); // Instead of chunking streamed response as below - while (! $stream->eof()), it waits and gets raw response all together.
$response = LaravelOpenRouter::filterStreamingResponse($rawResponseAll); // Optionally you can use filterStreamingResponse to filter raw streamed response, and map it into array of responseData DTO same as chatRequest response format.

// 2) Or Retrieve streamed raw response as it becomes available:
while (! $stream->eof()) {
    $rawResponse = $stream->read(1024); // readByte can be set as desired, for better performance 4096 byte (4kB) can be used.
    
    /*
     * Optionally you can use filterStreamingResponse to filter raw streamed response, and map it into array of responseData DTO same as chatRequest response format.
     */
    $response = LaravelOpenRouter::filterStreamingResponse($rawResponse);
}

"""
: OPENROUTER PROCESSING\n
\n
data: {"id":"gen-eWgGaEbIzFq4ziGGIsIjyRtLda54","model":"mistralai/mistral-7b-instruct:free","object":"chat.completion.chunk","created":1718885921,"choices":[{"index":0,"delta":{"role":"assistant","content":"Title"},"finish_reason":null}]}\n
\n
data: {"id":"gen-eWgGaEbIzFq4ziGGIsIjyRtLda54","model":"mistralai/mistral-7b-instruct:free","object":"chat.completion.chunk","created":1718885921,"choices":[{"index":0,"delta":{"role":"assistant","content":": Quant"},"finish_reason":null}]}\n
\n
data: {"id":"gen-eWgGaEbIzFq4ziGGIsIjyRtLda54","model":"mistralai/mistral-7b-instruct:free","object":"chat.completion.chunk","created":1718885921,"choices":[{"index":0,"delta":{"role":"assistant","content":"um Echo"},"finish_reason":null}]}\n
\n
data: {"id":"gen-eWgGaEbIzFq4ziGGIsIjyRtLda54","model":"mistralai/mistral-7b-instruct:free","object":"chat.completion.chunk","created":1718885921,"choices":[{"index":0,"delta":{"role":"assistant","content":": A Sym"},"finish_reason":null}]}\n
\n
data: {"id":"gen-eWgGaEbIzFq4ziGG
"""

"""
IsIjyRtLda54","model":"mistralai/mistral-7b-instruct:free","object":"chat.completion.chunk","created":1718885921,"choices":[{"index":0,"delta":{"role":"assistant","content":"phony of Code"},"finish_reason":null}]}\n
\n
data: {"id":"gen-eWgGaEbIzFq4ziGGIsIjyRtLda54","model":"mistralai/mistral-7b-instruct:free","object":"chat.completion.chunk","created":1718885921,"choices":[{"index":0,"delta":{"role":"assistant","content":"\n\nIn"},"finish_reason":null}]}\n
\n
data: {"id":"gen-eWgGaEbIzFq4ziGGIsIjyRtLda54","model":"mistralai/mistral-7b-instruct:free","object":"chat.completion.chunk","created":1718885921,"choices":[{"index":0,"delta":{"role":"assistant","content":" the heart of"},"finish_reason":null}]}\n
\n
data: {"id":"gen-eWgGaEbIzFq4ziGGIsIjyRtLda54","model":"mistralai/mistral-7b-instruct:free","object":"chat.completion.chunk","created":1718885921,"choices":[{"index":0,"delta":{"role":"assistant","content":" the bustling"},"finish_reason":null}]}\n
\n
data: {"id":"gen-eWgGaEbIzFq4ziGGIsIjyRtLda54","model":"mistralai/mistra
"""

"""
l-7b-instruct:free","object":"chat.completion.chunk","created":1718885921,"choices":[{"index":0,"delta":{"role":"assistant","content":" city of Ne"},"finish_reason":null}]}\n
\n
data: {"id":"gen-eWgGaEbIzFq4ziGGIsIjyRtLda54","model":"mistralai/mistral-7b-instruct:free","object":"chat.completion.chunk","created":1718885921,"choices":[{"index":0,"delta":{"role":"assistant","content":"o-Tok"},"finish_reason":null}]}\n
\n
data: {"id":"gen-eWgGaEbIzFq4ziGGIsIjyRtLda54","model":"mistralai/mistral-7b-instruct:free","object":"chat.completion.chunk","created":1718885921,"choices":[{"index":0,"delta":{"role":"assistant","content":"yo, a"},"finish_reason":null}]}\n
\n
data: {"id":"gen-eWgGaEbIzFq4ziGGIsIjyRtLda54","model":"mistralai/mistral-7b-instruct:free","object":"chat.completion.chunk","created":1718885921,"choices":[{"index":0,"delta":{"role":"assistant","content":" brilliant young research"},"finish_reason":null}]}\n
\n
data: {"id":"gen-eWgGaEbIzFq4ziGGIsIjyRtLda54","model":"mistralai/mistral-7b-instruct:free","object":"chat.com
"""
...

: OPENROUTER PROCESSING\n
\n
data: {"id":"gen-C6Xym94jZcvJv2vVpxYSyw2tV1fR","model":"mistralai/mistral-7b-instruct:free","object":"chat.completion.chunk","created":1718887189,"choices":[{"index":0,"delta":{"role":"assistant","content":""},"finish_reason":null}],"usage":{"prompt_tokens":23,"completion_tokens":100,"total_tokens":123}}\n
\n
data: [DONE]\n 

$model = 'mistralai/mistral-7b-instruct:free';
        
$firstMessage = new MessageData([
    'role'    => RoleType::USER,
    'content' => 'My name is Moe, the AI necromancer.',
]);
        
$chatData = new ChatData([
    'messages' => [
         $firstMessage,
     ],
     'model'   => $model,
]);
// This is the chat which you want LLM to remember
$oldResponse = LaravelOpenRouter::chatRequest($chatData);
        
/*
* You can skip part above and just create your historical message below (maybe you retrieve historical messages from DB etc.)
*/
        
// Here adding historical response to new message
$historicalMessage = new MessageData([
    'role'    => RoleType::ASSISTANT, // set as assistant since it is a historical message retrieved previously
    'content' => Arr::get($oldResponse->choices[0],'message.content'), // Historical response content retrieved from previous chat request
]);
// This is your new message
$newMessage = new MessageData([
    'role'    => RoleType::USER,
    'content' => 'Who am I?',
]);
        
$chatData = new ChatData([
    'messages' => [
         $historicalMessage,
         $newMessage,
    ],
    'model' => $model,
]);

$response = LaravelOpenRouter::chatRequest($chatData);

$content = Arr::get($response->choices[0], 'message.content');
// content = You are Moe, a fictional character and AI Necromancer, as per the context of the conversation we've established. In reality, you are the user interacting with me, an assistant designed to help answer questions and engage in friendly conversation.

$content = 'Tell me a story about a rogue AI that falls in love with its creator.'; // Your desired prompt or content
$model = 'mistralai/mistral-7b-instruct:free'; // The OpenRouter model you want to use (https://openrouter.ai/docs#models)
$messageData = new MessageData([
    'content' => $content,
    'role'    => RoleType::USER,
]);

$chatData = new ChatData([
    'messages'   => [
        $messageData,
    ],
    'model'      => $model,
    'max_tokens' => 100, // Adjust this value as needed
]);

$chatResponse = LaravelOpenRouter::chatRequest($chatData);
$generationId = $chatResponse->id; // generation id which will be passed to costRequest

$costResponse = LaravelOpenRouter::costRequest($generationId);

$limitResponse = LaravelOpenRouter::limitRequest();

public function __construct(protected OpenRouterRequest $openRouterRequest) {}

$content = 'Tell me a story about a rogue AI that falls in love with its creator.'; // Your desired prompt or content
$model = 'mistralai/mistral-7b-instruct:free'; // The OpenRouter model you want to use (https://openrouter.ai/docs#models)
$messageData = new MessageData([
    'content' => $content,
    'role'    => RoleType::USER,
]);

$chatData = new ChatData([
    'messages'   => [
        $messageData,
    ],
    'model'      => $model,
    'max_tokens' => 100, // Adjust this value as needed
]);

$response = $this->openRouterRequest->chatRequest($chatData);

$content = 'Tell me a story about a rogue AI that falls in love with its creator.';
$model = 'mistralai/mistral-7b-instruct:free'; // The OpenRouter model you want to use (https://openrouter.ai/docs#models)
$messageData = new MessageData([
    'content' => $content,
    'role'    => RoleType::USER,
]);

$chatData = new ChatData([
    'messages'   => [
        $messageData,
    ],
    'model'      => $model,
    'max_tokens' => 100, // Adjust this value as needed
]);

$chatResponse = $this->openRouterRequest->chatRequest($chatData);
$generationId = $chatResponse->id; // generation id which will be passed to costRequest

$costResponse = $this->openRouterRequest->costRequest($generationId);

$limitResponse = $this->openRouterRequest->limitRequest();
bash
php artisan vendor:publish --tag=laravel-openrouter