1. Go to this page and download the library: Download m6web/roboxt library. Choose the download type require.
2. Extract the ZIP file and open the index.php.
3. Add this code to the index.php.
<?php
require_once('vendor/autoload.php');
/* Start to develop here. Best regards https://php-download.com/ */
m6web / roboxt example snippets
# Create a Parser instance
$parser = new \Roboxt\Parser();
# Parse your robots.txt file
$file = $parser->parse("http://www.google.com/robots.txt");
# You can verify that an url is allowed by a specific user agent
$tests = [
["/events", "*"],
["/search", "*"],
["/search", "badbot"],
];
foreach ($tests as $test) {
list($url, $agent) = $test;
if ($file->isUrlAllowedByUserAgent($url, $agent)) {
echo "\n ✔ $url is allowed by $agent";
} else {
echo "\n ✘ $url is not allowed by $agent";
}
}
# You can also iterate over all user agents specified by the robots.txt file
# And check the type of each directive
foreach ($file->allUserAgents() as $userAgent) {
echo "\n Agent {$userAgent->getName()}: \n";
foreach ($userAgent->allDirectives() as $directive) {
if ($directive->isDisallow()) {
echo " ✘ {$directive->getValue()} \n";
} else if ($directive->isAllow()) {
echo " ✔ {$directive->getValue()} \n";
}
}
}
Loading please wait ...
Before you can download the PHP files, the dependencies should be resolved. This can take some minutes. Please be patient.