Skip to content

Commit

Permalink
Fix styling
Browse files Browse the repository at this point in the history
  • Loading branch information
freekmurze authored and github-actions[bot] committed Jul 31, 2024
1 parent 099ea77 commit 6230c45
Show file tree
Hide file tree
Showing 6 changed files with 21 additions and 20 deletions.
8 changes: 4 additions & 4 deletions src/Crawler.php
Original file line number Diff line number Diff line change
Expand Up @@ -96,11 +96,11 @@ public function __construct(
protected Client $client,
protected int $concurrency = 10,
) {
$this->crawlProfile = new CrawlAllUrls();
$this->crawlProfile = new CrawlAllUrls;

$this->crawlQueue = new ArrayCrawlQueue();
$this->crawlQueue = new ArrayCrawlQueue;

$this->crawlObservers = new CrawlObserverCollection();
$this->crawlObservers = new CrawlObserverCollection;

$this->crawlRequestFulfilledClass = CrawlRequestFulfilled::class;

Expand Down Expand Up @@ -399,7 +399,7 @@ public function getUserAgent(): string
public function getBrowsershot(): Browsershot
{
if (! $this->browsershot) {
$this->browsershot = new Browsershot();
$this->browsershot = new Browsershot;
}

return $this->browsershot;
Expand Down
7 changes: 4 additions & 3 deletions src/Handlers/CrawlRequestFulfilled.php
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,8 @@
namespace Spatie\Crawler\Handlers;

use Exception;
use GuzzleHttp\Exception\RequestException;
use GuzzleHttp\Psr7\Request;
use GuzzleHttp\Psr7\Uri;
use GuzzleHttp\Psr7\Utils;
use GuzzleHttp\RedirectMiddleware;
Expand All @@ -16,8 +18,6 @@
use Spatie\Crawler\ResponseWithCachedBody;
use Spatie\Crawler\UrlParsers\UrlParser;
use Symfony\Component\Process\Exception\ProcessFailedException;
use GuzzleHttp\Psr7\Request;
use GuzzleHttp\Exception\RequestException;

class CrawlRequestFulfilled
{
Expand Down Expand Up @@ -45,13 +45,14 @@ public function __invoke(ResponseInterface $response, $index)
try {
$body = $this->getBodyAfterExecutingJavaScript($crawlUrl->url);
} catch (ProcessFailedException $exception) {
$request = new Request("GET", $crawlUrl->url);
$request = new Request('GET', $crawlUrl->url);
$exception = new RequestException($exception->getMessage(), $request);
$crawlUrl = $this->crawler->getCrawlQueue()->getUrlById($index);

$this->crawler->getCrawlObservers()->crawlFailed($crawlUrl, $exception);

usleep($this->crawler->getDelayBetweenRequests());

return;
}

Expand Down
2 changes: 1 addition & 1 deletion tests/ArrayCrawlQueueTest.php
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
use Spatie\Crawler\CrawlUrl;

beforeEach(function () {
$this->crawlQueue = new ArrayCrawlQueue();
$this->crawlQueue = new ArrayCrawlQueue;
});

test('a url can be added to crawl queue', function () {
Expand Down
4 changes: 2 additions & 2 deletions tests/CrawlObserverCollectionTest.php
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
use Spatie\Crawler\CrawlUrl;

beforeEach(function () {
$this->crawlObserver = new class() extends CrawlObserver
$this->crawlObserver = new class extends CrawlObserver
{
public $crawled = false;

Expand Down Expand Up @@ -44,7 +44,7 @@ public function crawlFailed(

$observers->crawled(
CrawlUrl::create(new Uri('')),
new Response()
new Response
);

expect($this->crawlObserver)
Expand Down
18 changes: 9 additions & 9 deletions tests/CrawlerTest.php
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@
$crawler = Crawler::create();

if (getenv('TRAVIS')) {
$browsershot = new Browsershot();
$browsershot = new Browsershot;

$browsershot->noSandbox();

Expand All @@ -80,7 +80,7 @@

$crawler
->executeJavaScript()
->setCrawlObserver(new CrawlLogger())
->setCrawlObserver(new CrawlLogger)
->startCrawling('http://localhost:8080');

expect(regularUrls())->each->toBeCrawledOnce();
Expand All @@ -89,7 +89,7 @@
});

it('allows for a browsershot instance to be set', function () {
$browsershot = new Browsershot();
$browsershot = new Browsershot;

if (getenv('TRAVIS')) {
$browsershot->noSandbox();
Expand All @@ -98,7 +98,7 @@
Crawler::create()
->setBrowsershot($browsershot)
->executeJavaScript()
->setCrawlObserver(new CrawlLogger())
->setCrawlObserver(new CrawlLogger)
->startCrawling('http://localhost:8080');

expect(regularUrls())->each->toBeCrawledOnce();
Expand All @@ -110,7 +110,7 @@
Crawler::create()
->executeJavaScript()
->doNotExecuteJavaScript()
->setCrawlObserver(new CrawlLogger())
->setCrawlObserver(new CrawlLogger)
->startCrawling('http://localhost:8080');

expect(regularUrls())->each->toBeCrawledOnce();
Expand All @@ -129,15 +129,15 @@
])
->setBrowsershot($browsershot)
->executeJavaScript()
->setCrawlObserver(new CrawlLogger())
->setCrawlObserver(new CrawlLogger)
->startCrawling('http://localhost:8080/simulate-activity');
})->not->toThrow(ProcessFailedException::class);

expect(['url' => 'http://localhost:8080/simulate-activity'])->toBeCrawledOnce();
});

it('uses a crawl profile to determine what should be crawled', function () {
$crawlProfile = new class() extends CrawlProfile
$crawlProfile = new class extends CrawlProfile
{
public function shouldCrawl(UriInterface $url): bool
{
Expand All @@ -146,7 +146,7 @@ public function shouldCrawl(UriInterface $url): bool
};

createCrawler()
->setCrawlProfile(new $crawlProfile())
->setCrawlProfile(new $crawlProfile)
->startCrawling('http://localhost:8080');

expect([
Expand Down Expand Up @@ -200,7 +200,7 @@ function ($url) {
});

it('can handle pages with invalid urls', function () {
$crawlProfile = new class() extends CrawlProfile
$crawlProfile = new class extends CrawlProfile
{
public function shouldCrawl(UriInterface $url): bool
{
Expand Down
2 changes: 1 addition & 1 deletion tests/Pest.php
Original file line number Diff line number Diff line change
Expand Up @@ -82,5 +82,5 @@ function createCrawler($options = []): Crawler
{
return Crawler::create($options)
->setMaximumDepth(3)
->setCrawlObserver(new CrawlLogger());
->setCrawlObserver(new CrawlLogger);
}

0 comments on commit 6230c45

Please sign in to comment.