1. Go to this page and download the library: Download spatie/laravel-link-checker library. Choose the download type require.
2. Extract the ZIP file and open the index.php.
3. Add this code to the index.php.
<?php
require_once('vendor/autoload.php');
/* Start to develop here. Best regards https://php-download.com/ */
return [
/*
* The base url of your app. Leave this empty to use
* the url configured in config/app.php
*/
'url' => '',
/*
* The profile determines which links need to be checked.
*/
'default_profile' => Spatie\LinkChecker\CheckAllLinks::class,
/*
* The reporter determines what needs to be done when the
* the crawler has visited a link.
*/
'default_reporter' => Spatie\LinkChecker\Reporters\LogBrokenLinks::class,
/*
* To speed up the checking process we'll fire off requests concurrently.
* Here you can change the amount of concurrent requests.
*/
'concurrency' => 10
/*
* Here you can specify configuration regarding the used reporters
*/
'reporters' => [
'mail' => [
/*
* The `from` address to be used by the mail reporter.
*/
'from_address' => '',
/*
* The `to` address to be used by the mail reporter.
*/
'to_address' => '',
/*
* The subject line to be used by the mail reporter.
*/
'subject' => '',
],
/*
* If you wish to exclude status codes from the reporters,
* you can select the status codes that you wish to
* exclude in the array below like: [200, 302]
*/
'exclude_status_codes' => [],
],
];
// app/console/Kernel.php
protected function schedule(Schedule $schedule)
{
...
$schedule->command('link-checker:run')->sundays()->daily();
}
abstract class CrawlProfile
{
/**
* Determine if the given url should be crawled.
*
* @param \Psr\Http\Message\UriInterface $url
*
* @return bool
*/
abstract public function shouldCrawl(UriInterface $url): bool;
}
abstract class CrawlObserver
{
/**
* Called when the crawler will crawl the url.
*
* @param \Psr\Http\Message\UriInterface $url
*/
public function willCrawl(UriInterface $url)
{
}
/**
* Called when the crawler has crawled the given url successfully.
*
* @param \Psr\Http\Message\UriInterface $url
* @param \Psr\Http\Message\ResponseInterface $response
* @param \Psr\Http\Message\UriInterface|null $foundOnUrl
*/
abstract public function crawled(
UriInterface $url,
ResponseInterface $response,
?UriInterface $foundOnUrl = null
);
/**
* Called when the crawler had a problem crawling the given url.
*
* @param \Psr\Http\Message\UriInterface $url
* @param \GuzzleHttp\Exception\RequestException $requestException
* @param \Psr\Http\Message\UriInterface|null $foundOnUrl
*/
abstract public function crawlFailed(
UriInterface $url,
RequestException $requestException,
?UriInterface $foundOnUrl = null
);
/**
* Called when the crawl has ended.
*/
public function finishedCrawling()
{
}
}