1. Go to this page and download the library: Download cloudstudio/ollama-laravel library. Choose the download type require.
2. Extract the ZIP file and open the index.php.
3. Add this code to the index.php.
<?php
require_once('vendor/autoload.php');
/* Start to develop here. Best regards https://php-download.com/ */
cloudstudio / ollama-laravel example snippets
return [
'model' => env('OLLAMA_MODEL', 'llama3.1'),
'url' => env('OLLAMA_URL', 'http://127.0.0.1:11434'),
'default_prompt' => env('OLLAMA_DEFAULT_PROMPT', 'Hello, how can I assist you today?'),
'connection' => [
'timeout' => env('OLLAMA_CONNECTION_TIMEOUT', 300),
],
];
use Cloudstudio\Ollama\Facades\Ollama;
$response = Ollama::agent('You are a helpful assistant.')
->prompt('Explain quantum computing in simple terms')
->model('llama3.1')
->ask();
echo $response['response'];
$response = Ollama::agent('You are a creative writing assistant.')
->prompt('Write a short story about a robot learning to paint')
->model('llama3.1')
->options([
'temperature' => 0.8, // More creative
'top_p' => 0.9,
'max_tokens' => 500
])
->ask();
$response = Ollama::agent('You are an expert PHP developer.')
->prompt('Create a Laravel middleware that logs API requests with rate limiting')
->model('codellama')
->options(['temperature' => 0.2]) // Less creative for code
->ask();
$response = Ollama::model('llava:13b')
->prompt('Describe what you see in this image in detail')
->image(public_path('images/product-photo.jpg'))
->ask();
echo $response['response'];
// "This image shows a modern smartphone with a sleek black design..."
$response = Ollama::model('llava:13b')
->prompt('Extract product information from this image including brand, model, features, and estimated price range')
->image(storage_path('app/uploads/product.jpg'))
->ask();
$response = Ollama::model('llava:13b')
->prompt('Compare these images and identify the differences')
->images([
public_path('images/before.jpg'),
public_path('images/after.jpg')
])
->ask();
$response = Ollama::model('llava:13b')
->prompt('Extract all text from this document and summarize the key points')
->image(storage_path('app/documents/invoice.pdf'))
->ask();
$messages = [
['role' => 'system', 'content' => 'You are a helpful customer support agent for an e-commerce website.'],
['role' => 'user', 'content' => 'I ordered a laptop 3 days ago but haven\'t received tracking information'],
['role' => 'assistant', 'content' => 'I understand your concern. Let me help you track your laptop order. Could you please provide your order number?'],
['role' => 'user', 'content' => 'My order number is ORD-12345']
];
$response = Ollama::model('llama3.1')
->chat($messages);
$messages = [
['role' => 'system', 'content' => 'You are a patient math tutor helping a student learn calculus.'],
['role' => 'user', 'content' => 'I don\'t understand how to find the derivative of x^2 + 3x + 2'],
['role' => 'assistant', 'content' => 'I\'d be happy to help! Let\'s break this down step by step...'],
['role' => 'user', 'content' => 'Can you show me the power rule?']
];
$response = Ollama::model('llama3.1')
->options(['temperature' => 0.3]) // Lower temperature for educational content
->chat($messages);
$messages = [
['role' => 'system', 'content' => 'You are a senior software engineer providing code reviews.'],
['role' => 'user', 'content' => 'Please review this PHP function for potential improvements:'],
['role' => 'user', 'content' => '
$messages = [
['role' => 'user', 'content' => 'What\'s the current weather in Tokyo and London?']
];
$response = Ollama::model('llama3.1')
->tools([
[
"type" => "function",
"function" => [
"name" => "get_current_weather",
"description" => "Get the current weather for a specific location",
"parameters" => [
"type" => "object",
"properties" => [
"location" => [
"type" => "string",
"description" => "The city and country, e.g. Tokyo, Japan",
],
"unit" => [
"type" => "string",
"description" => "Temperature unit",
"enum" => ["celsius", "fahrenheit"],
],
],
"
$tools = [
[
"type" => "function",
"function" => [
"name" => "execute_sql_query",
"description" => "Execute a read-only SQL query on the database",
"parameters" => [
"type" => "object",
"properties" => [
"query" => [
"type" => "string",
"description" => "The SQL SELECT query to execute",
],
"table" => [
"type" => "string",
"description" => "The primary table being queried",
]
],
"
use Illuminate\Console\BufferedConsoleOutput;
$response = Ollama::agent('You are a creative storyteller.')
->prompt('Write an engaging short story about time travel')
->model('llama3.1')
->options(['temperature' => 0.8])
->stream(true)
->ask();
$output = new BufferedConsoleOutput();
$responses = Ollama::processStream($response->getBody(), function($data) use ($output) {
echo $data['response']; // Output in real-time
flush();
});
$complete = implode('', array_column($responses, 'response'));
// In your controller
public function streamChat(Request $request)
{
$response = Ollama::agent('You are a helpful assistant.')
->prompt($request->input('message'))
->model('llama3.1')
->stream(true)
->ask();
return response()->stream(function() use ($response) {
Ollama::processStream($response->getBody(), function($data) {
echo "data: " . json_encode($data) . "\n\n";
flush();
});
}, 200, [
'Content-Type' => 'text/plain',
'Cache-Control' => 'no-cache',
'X-Accel-Buffering' => 'no'
]);
}
// Generate embeddings for documents
$documents = [
'Laravel is a PHP web framework',
'Python is a programming language',
'React is a JavaScript library'
];
$embeddings = [];
foreach ($documents as $doc) {
$embeddings[] = Ollama::model('nomic-embed-text')
->embeddings($doc);
}
// Search for similar content
$query = 'Web development framework';
$queryEmbedding = Ollama::model('nomic-embed-text')
->embeddings($query);
// Calculate cosine similarity (implement your similarity function)
$similarities = calculateCosineSimilarity($queryEmbedding, $embeddings);
// Generate product embeddings
$productDescription = 'Wireless noise-canceling headphones with 30-hour battery life';
$productEmbedding = Ollama::model('nomic-embed-text')
->embeddings($productDescription);
// Store embedding in database for later similarity searches
DB::table('products')->where('id', $productId)->update([
'embedding' => json_encode($productEmbedding['embedding'])
]);
// Copy a model
Ollama::model('llama3.1')->copy('my-custom-llama');
// Pull a new model
Ollama::model('codellama:7b')->pull();
// Delete a model
Ollama::model('old-model')->delete();
class ContentModerationService
{
public function moderateContent(string $content): array
{
$response = Ollama::agent(
'You are a content moderator. Analyze content for inappropriate material, spam, or policy violations. Respond with JSON containing "safe" (boolean), "categories" (array), and "confidence" (0-1).'
)
->prompt("Analyze this content: {$content}")
->model('llama3.1')
->format('json')
->options(['temperature' => 0.1])
->ask();
return json_decode($response['response'], true);
}
}
class CodeDocumentationService
{
public function generateDocumentation(string $code): string
{
return Ollama::agent(
'You are a technical writer. Generate comprehensive PHPDoc comments for the given code.'
)
->prompt("Generate documentation for this code:\n\n{$code}")
->model('codellama')
->options(['temperature' => 0.2])
->ask()['response'];
}
}
class TranslationService
{
public function translate(string $text, string $fromLang, string $toLang): string
{
return Ollama::agent(
"You are a professional translator. Translate the given text accurately while preserving tone and context."
)
->prompt("Translate from {$fromLang} to {$toLang}: {$text}")
->model('llama3.1')
->options(['temperature' => 0.3])
->ask()['response'];
}
}
class DataAnalysisService
{
public function analyzeCSV(string $csvPath): array
{
$csvContent = file_get_contents($csvPath);
$response = Ollama::agent(
'You are a data analyst. Analyze the CSV data and provide insights, trends, and recommendations in JSON format.'
)
->prompt("Analyze this CSV data:\n\n{$csvContent}")
->model('llama3.1')
->format('json')
->ask();
return json_decode($response['response'], true);
}
}
// Good: Specific and detailed
$response = Ollama::agent('You are a senior Laravel developer with 10 years of experience.')
->prompt('Create a secure user authentication system using Laravel Sanctum with rate limiting and email verification')
->ask();
// Better: Include context and constraints
$response = Ollama::agent('You are a senior Laravel developer. Follow PSR-12 coding standards and include comprehensive error handling.')
->prompt('Create a user authentication system with these
// Use keepAlive for multiple requests
$ollama = Ollama::model('llama3.1')->keepAlive('10m');
// Process multiple prompts
foreach ($prompts as $prompt) {
$response = $ollama->prompt($prompt)->ask();
// Process response
}
it('can generate content with Ollama', function () {
$response = Ollama::agent('You are a test assistant.')
->prompt('Say hello')
->model('llama3.1')
->ask();
expect($response)->toHaveKey('response');
expect($response['response'])->toBeString();
});