$url = 'https://www.example.com'; $html = file_get_contents($url); $dom = new DOMDocument; $dom->loadHTML($html); $xpath = new DOMXPath($dom); $results = $xpath->query('//h1'); echo $results[0]->nodeValue;
for($i = 1; $i < 10; $i++) { $url = "https://www.example.com/page/$i"; $html = file_get_contents($url); $dom = new DOMDocument; $dom->loadHTML($html); $xpath = new DOMXPath($dom); $results = $xpath->query('//div[@class="post"]'); foreach($results as $result) { scraperwiki::save_sqlite(array('id'), array('id' => $i, 'content' => $result->nodeValue)); } }This code scrapes multiple pages of a blog, and then saves the contents of each post into a SQLite database using Scraperwiki's `save_sqlite` function. Overall, Scraperwiki is an extremely useful package library that makes web scraping simple and easy to do using PHP.