1. Go to this page and download the library: Download cloudstudio/ollama-laravel library. Choose the download type require.
2. Extract the ZIP file and open the index.php.
3. Add this code to the index.php.
<?php
require_once('vendor/autoload.php');
/* Start to develop here. Best regards https://php-download.com/ */
cloudstudio / ollama-laravel example snippets
return [
'model' => env('OLLAMA_MODEL', 'llama2'),
'url' => env('OLLAMA_URL', 'http://127.0.0.1:11434'),
'default_prompt' => env('OLLAMA_DEFAULT_PROMPT', 'Hello, how can I assist you today?'),
'connection' => [
'timeout' => env('OLLAMA_CONNECTION_TIMEOUT', 300),
],
];
use Cloudstudio\Ollama\Facades\Ollama;
/** @var array $response */
$response = Ollama::agent('You are a weather expert...')
->prompt('Why is the sky blue?')
->model('llama2')
->options(['temperature' => 0.8])
->stream(false)
->ask();
/** @var array $response */
$response = Ollama::model('llava:13b')
->prompt('What is in this picture?')
->image(public_path('images/example.jpg'))
->ask();
// "The image features a close-up of a person's hand, wearing bright pink fingernail polish and blue nail polish. In addition to the colorful nails, the hand has two tattoos – one is a cross and the other is an eye."
$messages = [
['role' => 'user', 'content' => 'My name is Toni Soriano and I live in Spain'],
['role' => 'assistant', 'content' => 'Nice to meet you , Toni Soriano'],
['role' => 'user', 'content' => 'where I live ?'],
];
$response = Ollama::agent('You know me really well!')
->model('llama2')
->chat($messages);
// "You mentioned that you live in Spain."
### Chat Completion
$messages = [
['role' => 'user', 'content' => 'What is the weather in Toronto?'],
];
$response = Ollama::model('llama3.1')
->tools([
[
"type" => "function",
"function" => [
"name" => "get_current_weather",
"description" => "Get the current weather for a location",
"parameters" => [
"type" => "object",
"properties" => [
"location" => [
"type" => "string",
"description" => "The location to get the weather for, e.g. San Francisco, CA",
],
"format" => [
"type" => "string",
"description" => "The format to return the weather in, e.g. 'celsius' or 'fahrenheit'",
"enum" => ["celsius", "fahrenheit"],
],
],
"
use Cloudstudio\Ollama\Facades\Ollama;
use Illuminate\Console\BufferedConsoleOutput;
/** @var \GuzzleHttp\Psr7\Response $response */
$response = Ollama::agent('You are a snarky friend with one-line responses')
->prompt("I didn't sleep much last night")
->model('llama3')
->options(['temperature' => 0.1])
->stream(true)
->ask();
$output = new BufferedConsoleOutput();
$responses = Ollama::processStream($response->getBody(), function($data) use ($output) {
$output->write($data['response']);
});
$output->write("\n");
$complete = implode('', array_column($responses, 'response'));
$output->write("<info>$complete</info>");