/** * Grabs the content from the crawled page and publishes a job on the queue. * * @param \VDB\Spider\Resource $resource * @param \Simgroep\ConcurrentSpiderBundle\CrawlJob $crawlJob * * @throws \Simgroep\ConcurrentSpiderBundle\InvalidContentException */ public function persist(Resource $resource, CrawlJob $crawlJob) { if (strlen($resource->getResponse()->getBody()) >= $this->maximumResourceSize) { throw new InvalidContentException(sprintf('Resource size exceeds limits (%s bytes)', $this->maximumResourceSize)); } $document = $this->documentResolver->getDocumentByResource($resource); $persistenceEvent = new PersistenceEvent($document, $resource, $crawlJob->getMetadata()); $this->eventDispatcher->dispatch(PersistenceEvents::PRE_PERSIST, $persistenceEvent); $message = new AMQPMessage(json_encode(array_merge(['document' => $document->toArray()], ['metadata' => $crawlJob->getMetadata()])), ['delivery_mode' => 1]); $this->queue->publish($message); }
/** * Logs a message that will tell the job is failed. * * @param \Simgroep\ConcurrentSpiderBundle\CrawlJob $crawlJob * @param string $level */ public function markAsFailed(CrawlJob $crawlJob, $errorMessage) { $meta = $crawlJob->getMetadata(); $this->logMessage('emergency', sprintf("Failed (%s) %s", $errorMessage, $crawlJob->getUrl()), $crawlJob->getUrl(), $meta['core']); }