1. Go to this page and download the library: Download mguinea/laravel-robots library. Choose the download type require.
2. Extract the ZIP file and open the index.php.
3. Add this code to the index.php.
<?php
require_once('vendor/autoload.php');
/* Start to develop here. Best regards https://php-download.com/ */
mguinea / laravel-robots example snippets
Route::get('robots.txt', function() {
$robots = new \Mguinea\Robots\Robots;
// If on the live server
if (App::environment() == 'production') {
$robots->addUserAgent('*')->addSitemap('sitemap.xml');
} else {
// If you're on any other server, tell everyone to go away.
$robots->addDisallow("/");
}
return response($robots->generate(), 200)->header('Content-Type', 'text/plain');
});
Route::get('robots.txt', function() {
$robots = new \Mguinea\Robots\Robots;
// If on the live server
if (App::environment() == 'production') {
$robots->addUserAgent('*')->addSitemap('sitemap.xml');
} else {
// If you're on any other server, tell everyone to go away.
$robots->addDisallow("/");
}
return response($robots->generate(), 200)->header('Content-Type', 'text/plain');
});
use Mguinea\Robots\Facades\Robots;
Route::get('robots.txt', function() {
// If on the live server
if (App::environment() == 'production') {
Robots::addUserAgent('*');
Robots::addSitemap('sitemap.xml');
} else {
// If you're on any other server, tell everyone to go away.
Robots::addDisallow("/");
}
return response(Robots::generate(), 200)->header('Content-Type', 'text/plain');
});
use Illuminate\Http\File;
use Mguinea\Robots\Robots;
class Anywhere
{
public function createFile()
{
$robots = new Robots;
$robots->addUserAgent('*')->addSitemap('sitemap.xml');
File::put(public_path('robots.txt'), $robots->generate());
}
}
use Illuminate\Http\File;
use Mguinea\Robots\Robots;
class Anywhere
{
public function fromArray()
{
$robots = new Robots([
'allows' => [
'foo', 'bar'
],
'disallows' => [
'foo', 'bar'
],
'hosts' => [
'foo', 'bar'
],
'sitemaps' => [
'foo', 'bar'
],
'userAgents' => [
'foo', 'bar'
],
'crawlDelay' => 10
]);
return response($robots->generate(), 200)->header('Content-Type', 'text/plain');
}
}
// Add an allow rule to the robots. Allow: foo
$robots->addAllow('foo');
// Add multiple allows rules to the robots. Allow: foo Allow: bar
$robots->addAllow(['foo', 'bar']);
// Add a comment to the robots. # foo
$robots->addComment('foo');
// Add a disallow rule to the robots. Disallow: foo
$robots->addDisallow('foo');
// Add multiple disallows rules to the robots. Disallow: foo Disallow: bar
$robots->addDisallow(['foo', 'bar']);
// Add a Host to the robots. Host: foo
$robots->addHost('foo');
// Add multiple hosts to the robots. Host: foo Host: bar
$robots->addHost(['foo', 'bar']);
// Add a Sitemap to the robots. Sitemap: foo
$robots->addSitemap('foo');
// Add multiple sitemaps to the robots. Sitemap: foo Sitemap: bar
$robots->addSitemap(['foo', 'bar']);
// Add a spacer to the robots.
$robots->addSpacer();
// Add a User-agent to the robots. User-agent: foo
$robots->addUserAgent('foo');
// Add multiple User-agents to the robots. User-agent: foo User-agent: bar
$robots->addUserAgent(['foo', 'bar']);
// Add a crawl-delay to the robots. crawl-delay: 10
$robots->addCrawlDelay(10);
// Generate the robots data.
$robots->generate();
// Reset the rows.
$robots->reset();
Loading please wait ...
Before you can download the PHP files, the dependencies should be resolved. This can take some minutes. Please be patient.