public function get_scrap() { $client = new Client(); $client->getClient()->setDefaultOption('config/curl/' . CURLOPT_TIMEOUT, 60000); $crawled = "news.liputan6.com"; $title = "article.hentry > header.entry-header > h1"; $article = "article.hentry > div.entry-content > div.text-detail > p"; $geturl = DB::collection('crawling')->where('refurl', $crawled)->get(); foreach ($geturl as $key) { $url = $key['url']; $crawler = $client->request('GET', $url); $status_code = $client->getResponse()->getStatus(); if ($status_code == 200) { $crawler->filter($article)->each(function ($node) { $yew = new Scraping(); $yew->article = $node->text(); $yew->save(); }); } else { echo "we F*****G LOST DUDE !"; } echo "<hr>"; } }