public function testFixUrl() { $this->assertEquals("http://example.com/Some%20Thing", UrlCheck::fixUrl("http://example.com/Some Thing")); $this->assertEquals("http://example.com/Some%20Thing", UrlCheck::fixUrl("http://example.com/Some%20Thing")); $this->assertEquals("http://example.com/SomeThing", UrlCheck::fixUrl("http://example.com/SomeThing/")); $this->assertEquals("http://example.com/SomeThing", UrlCheck::fixUrl("http://example.com/SomeThing")); $this->assertEquals("http://example.com/Some%20Thing", UrlCheck::fixUrl("http://example.com/Some Thing/")); }
/** * Writes the found URL as a job on the queue. * * And URL is only persisted to the queue when it not has been indexed yet. * * @param \Symfony\Component\EventDispatcher\GenericEvent $event */ public function onDiscoverUrl(Event $event) { $crawlJob = $event->getSubject()->getCurrentCrawlJob(); foreach ($event['uris'] as $uri) { if ($position = strpos($uri, '#')) { $uri = new Uri(substr($uri, 0, $position)); } $isBlacklisted = UrlCheck::isUrlBlacklisted($uri->normalize()->toString(), $crawlJob->getBlacklist()); if ($isBlacklisted) { $this->eventDispatcher->dispatch("spider.crawl.blacklisted", new Event($this, ['uri' => $uri])); continue; //url blacklisted, so go to next one } if (!$this->indexer->isUrlIndexedandNotExpired(UrlCheck::fixUrl($uri->toString()), $crawlJob->getMetadata())) { $job = new CrawlJob(UrlCheck::fixUrl($uri->normalize()->toString()), (new Uri($crawlJob->getUrl()))->normalize()->toString(), $crawlJob->getBlacklist(), $crawlJob->getMetadata(), $crawlJob->getWhitelist()); if ($job->isAllowedToCrawl()) { $this->queue->publishJob($job); } } } }