function populateDOM($htmlDOM, $src_link, $upd_flag = false)
{
    scraperwiki::sqliteexecute("CREATE TABLE IF NOT EXISTS sources (src_link TEXT PRIMARY KEY, timestamp DATETIME, src_dump TEXT)");
    echo "Checking local cache...<br>\n";
    $result = scraperwiki::sqliteexecute("SELECT src_link, timestamp, src_dump FROM sources WHERE src_link = :slnk", array("slnk" => $src_link));
    if (empty($result->data[0][2]) || $upd_flag == true) {
        echo "No Cache for this site (or force-update flag given), scraping live site for local cache...<br>\n";
        // Load the site and save it locally so that we dont end up crawling their site a million times during development
        $source = scraperWiki::scrape($src_link);
        $htmlDOM->load($source);
        $save_source = $htmlDOM->save();
        echo "Scrape complete, storing into cache...<br>\n";
        scraperwiki::sqliteexecute("INSERT OR REPLACE INTO sources VALUES (:slnk, :stime, :sdmp)", array("slnk" => $src_link, "stime" => time(), "sdmp" => $save_source));
        scraperwiki::sqlitecommit();
        echo "Cache saved.<br>\n";
        echo "Populate DOM Complete.";
        return $htmlDOM;
    } else {
        echo "Using local cache, as cached data exists from '" . date(DATE_RFC822, $result->data[0][1]) . ".'<br>\n";
        echo "Loading...<br>\n";
        $htmlDOM->load($result->data[0][2]);
        echo "Populate DOM Complete.";
        return $htmlDOM;
    }
}
function grep_munich($url, $table_name)
{
    $html = scraperWiki::scrape($url);
    $count = 0;
    # Use the PHP Simple HTML DOM Parser to extract <td> tags
    $dom = new simple_html_dom();
    $dom->load($html);
    //Drop all old informations by dropping the table
    scraperwiki::sqliteexecute("drop table if exists " . $table_name);
    scraperwiki::sqlitecommit();
    $table = $dom->getElementById('flight_info_area');
    foreach ($table->find('tr') as $data) {
        // Flight details. Read tds or ths
        $tds = $data->find("td");
        //if there are less then 7 columns continue to next loop
        if (sizeof($tds) < 7) {
            continue;
        }
        //print $data->plaintext . "\n";
        $flightnr = $tds[1]->plaintext;
        $from = $tds[2]->plaintext;
        $time = $tds[3]->plaintext;
        $expected_time = $tds[4]->plaintext;
        //Create date
        $date = date("Y-m-d");
        //Build array of flight informations
        $flight_data = array("date" => $date, "count" => $count, "flightnr" => $flightnr, "from" => $from, "time" => $time, "expected_time" => $expected_time);
        //Save the informations of one flight
        scraperwiki::save_sqlite(array("date", "count"), $flight_data, $table_name);
        $count = $count + 1;
    }
}
function alreadyKnown($cat, $url)
{
    $data = scraperwiki::sqliteexecute("select distinct id from swdata where cat='" . $cat . "' and url='" . $url . "'");
    if (count($data->data) === 0) {
        return false;
    }
    echo "already known : " . $url . " in " . $cat . "\n";
    return true;
}
function insertar($nombreTabla, $idContrato, $fecha, $objeto, $importe, $adjudicatario, $NIF)
{
    scraperwiki::sqliteexecute("insert into " . $nombreTabla . " values (?,?,?,?,?,?)", array($idContrato, $fecha, utf8_decode($objeto), $importe, utf8_decode($adjudicatario), $NIF));
    scraperwiki::sqlitecommit();
}
            $locationid = "";
            $locationname = "No data";
            $locationlatitude = "";
            $locationlongitude = "";
            $error = $data['error']['message'];
            $categorytitle = "";
            $categoryid = "";
            $mediaid = "";
            $mediatype = "";
            $medialink = "";
            $mediathumb = "";
            $reports["{$id}"] = array("ID" => $id, "Error" => $error, "Title" => $title, "Category" => $categorytitle, "CategoryID" => $categoryid, "Incidentdescription" => $incidentdescription, "Incidentdate" => $incidentdate, "Incidentmode" => $incidentmode, "Incidentactive" => $incidentactive, "Incidentverified" => $incidentverified, "Locationid" => $locationid, "Locationname" => $locationname, "Locationlatitude" => $locationlatitude, "Locationlongitude" => $locationlongitude, "MediaID" => $mediatype, "MediaType" => $mediatype, "MediaLink" => $medialink, "MediaThumb" => $mediathumb);
        }
    }
    print "\n" . "end" . "\n";
}
#print_r($reports);
#scraperwiki::sqliteexecute("drop table reports");
scraperwiki::sqliteexecute("create table if not exists reports ('id' string, 'title' string, 'incidentdescription' string, 'incidentdate' string, 'incidentmode' string, 'incidentactive' string, 'incidentverified' string, 'locationid' string, 'locationname' string, 'locationlatitude' string, 'locationlongitude' string, 'categorytitle' string, 'categoryid' string, 'error' string, 'mediaid' string, 'mediatype' string, 'medialink' string, 'mediathumb' string)");
foreach ($reports as $id => $values) {
    #foreach ($reports as $key => $values) {
    scraperwiki::sqliteexecute("insert or replace into reports values (:id, :title, :incidentdescription, :incidentdate, :incidentmode, :incidentactive, :incidentverified, :locationid, :locationname, :locationlatitude, :locationlongitude, :categorytitle, :categoryid, :error, :mediaid, :mediatype, :medialink, :mediathumb)", $reports[] = array("title" => $values["Title"], "id" => $id, "incidentdescription" => $values["Incidentdescription"], "incidentdate" => $values["Incidentdate"], "incidentmode" => $values["Incidentmode"], "incidentactive" => $values["Incidentactive"], "incidentverified" => $values["Incidentverified"], "locationid" => $values["Locationid"], "locationname" => $values["Locationname"], "locationlatitude" => $values["Locationlatitude"], "locationlongitude" => $values["Locationlongitude"], "categorytitle" => $values["Category"], "categoryid" => $values["CategoryID"], "error" => $values["Error"], "mediaid" => $values["MediaID"], "mediatype" => $values["MediaType"], "medialink" => $values["MediaLink"], "mediathumb" => $values["MediaThumb"]));
    #}
}
#  $unique_keys = array("id","title");
#$table = "reports";
#if (isset($reports)){
#scraperwiki::save_sqlite($unique_keys, $reports, $table);
#}
scraperwiki::sqlitecommit();
#}
//Convert to array
if (!empty($_GET['start'])) {
    $start = $_GET['start'];
}
// To take care global variable if OFF
if (empty($start)) {
    $start = 0;
}
if (strlen($start) > 0 and !is_numeric($start)) {
    //echo "Data Error";
    //exit;
    $start = 0;
}
$sourcescraper = 'california_craigslist_synth_collector';
scraperwiki::attach($sourcescraper);
$recordCount = scraperwiki::sqliteexecute("SELECT count(*) FROM {$sourcescraper}.swdata");
$recordCount = $recordCount->data[0][0];
//echo "Total Records: ".$recordCount."\n";
$totalPages = ceil($recordCount / $limit);
//Make sure that the user doesn't pass go.
if ($start > $recordCount) {
    if ($recordCount > $limit) {
        $start = $recordCount - $limit;
    } else {
        $start = $limit - $recordCount;
    }
}
$eu = $start - 0;
$current = $eu;
$back = $current - $limit;
if ($back < 0) {
function cleaningCampusTable()
{
    $tableName = DEF_TABLE_CAMPUS;
    scraperwiki::sqliteexecute("drop table if exists {$tableName}");
    $createTableSQL = <<<_END_SQL_
CREATE TABLE `{$tableName}` (
    `campus_cd` string, 
    `campus_name` string,
    `school_cd` string, 
    `school_name` string, 
    `school_category_cd` string, 
    `school_category_name` string, 
    `zip` string,
    `address` string,
    `latitude` blob,
    `longitude` blob,
    PRIMARY KEY(
        `campus_cd`
    )
)
_END_SQL_;
    scraperwiki::sqliteexecute($createTableSQL);
}
<?php

# Blank PHP
//scraperwiki::sqliteexecute("DROP TABLE last_update");
scraperwiki::sqliteexecute("CREATE TABLE IF NOT EXISTS ll (\n        id INTEGER PRIMARY KEY ASC,\n        HITs_available int,\n        jobs int\n    )");
scraperwiki::sqliteexecute("CREATE TABLE IF NOT EXISTS ll (\n        id INTEGER PRIMARY KEY ASC,\n        HITs_available int,\n        jobs int\n    )");
//scraperwiki::sqliteexecute('update ll set jobs = 100 AND (set jobs = 200)');
scraperwiki::sqliteexecute('INSERT INTO ll (jobs) VALUES (100), (200), (300)');
scraperwiki::sqlitecommit();
//
$res = scraperwiki::select('* FROM ll');
print_r($res[0]);
//print_r(scraperwiki::show_tables());
//print_r(scraperwiki::table_info($name="last_update"));
# Blank PHP
//scraperwiki::sqliteexecute("DROP TABLE last_update");
scraperwiki::sqliteexecute("CREATE TABLE IF NOT EXISTS ll (\n        id INTEGER PRIMARY KEY ASC,\n        HITs_available int,\n        jobs int\n    )");
scraperwiki::sqliteexecute("CREATE TABLE IF NOT EXISTS ll (\n        id INTEGER PRIMARY KEY ASC,\n        HITs_available int,\n        jobs int\n    )");
//scraperwiki::sqliteexecute('update ll set jobs = 100 AND (set jobs = 200)');
scraperwiki::sqliteexecute('INSERT INTO ll (jobs) VALUES (100), (200), (300)');
scraperwiki::sqlitecommit();
//
$res = scraperwiki::select('* FROM ll');
print_r($res[0]);
//print_r(scraperwiki::show_tables());
//print_r(scraperwiki::table_info($name="last_update"));
function saveTable($data, $pkey, $name, $keys = null)
{
    if (empty($name)) {
        $name = 'swdata';
    }
    if (is_string($pkey)) {
        $pkey = array($pkey);
    }
    if (empty($keys)) {
        $keys = getTableKeys($data);
    }
    if (!in_array($pkey[0], $keys)) {
        print "Table '{$name}' have no key '{$pkey['0']}', adding it";
        for ($i = 0; $i < count($data); $i++) {
            $data[$i][$pkey[0]] = $i + 1;
        }
    }
    scraperwiki::sqliteexecute("drop table if exists {$name}");
    if ($keys) {
        scraperwiki::sqliteexecute("create table {$name} (" . join(', ', $keys) . ')');
    }
    scraperwiki::save_sqlite($pkey, $data, $name, $keys);
}
function init()
{
    scraperwiki::sqliteexecute("drop table if exists jsen_area");
    scraperwiki::sqliteexecute("CREATE TABLE `jsen_area` (`prefecture` string, `larea` string, `count` number, PRIMARY KEY(`prefecture`, `larea`))");
}
$outerdom = new simple_html_dom();
$outerdom->load($outerhtml);
foreach ($outerdom->find('.datatable a') as $outerdata) {
    $outerdata->href = str_replace("default.aspx?catid=80&amp;pagetype=88&amp;sglid=3&amp;fld=", "", $outerdata->href);
    $html = scraperwiki::scrape("http://www.caa.co.uk/default.aspx?catid=80&pagetype=88&sglid=3&fld=" . $outerdata->href);
    $dom = new simple_html_dom();
    $dom->load($html);
    foreach ($dom->find('.datatable') as $datapage) {
        foreach ($datapage->find('a') as $page) {
            if (stripos($page, ".csv") !== false) {
                //print $page->href;
                $data = scraperWiki::scrape("http://www.caa.co.uk/" . $page->href);
                $rows = explode("\n", $data);
                // Extract CSV header
                $headers = str_getcsv(array_shift($rows));
                $sql = "create table if not exists swdata ({$headers})";
                print $sql;
                scraperwiki::sqliteexecute($sql);
                print_r($headers);
                foreach ($rows as $row) {
                    print_r($rows);
                    $row = str_getcsv($row);
                    //print_r(scraperwiki::show_tables());
                    //scraperwiki::save_sqlite(array("header"),array("header"=>1, "data"=>"Hi there"));
                    //scraperwiki::save(array($headers), $line);
                }
                exit;
            }
        }
    }
}
function getStopInfo($stop_id)
{
    $result = scraperwiki::sqliteexecute("select * from stop where stop_id ='" . $stop_id . "'");
    if (isset($result->data[0][3]) && $result->data[0][3] != '') {
        //stop already in database, no need to parse
    } else {
        global $stop_url;
        $full_stop_url = $stop_url . $stop_id;
        $html = scraperWiki::scrape($full_stop_url);
        $dom = new simple_html_dom();
        $dom->load($html);
        $title = $dom->find('title');
        if (stripos($title[0]->plaintext, 'not found')) {
            //404 log and carry on
            return;
        }
        $name_dom = $dom->find('h1');
        $name = $name_dom[0]->plaintext;
        $zone_dom = $dom->find('table.header-column td');
        $zone = $zone_dom[0]->plaintext;
        $desc = $zone_dom[1]->plaintext;
        $map_dom = $dom->find('a#map-link');
        $map_link = $map_dom[0]->href;
        // OLD FORMAT
        // http://maps.google.com/maps/api/staticmap?size=320x250&amp;sensor=false&amp;markers=color:red|label:A|-27.925015,153.338751&amp;
        //New form, seems someone at Translink buggered the encoding.
        //http://maps.google.com/maps/api/staticmap?size=320x250&sensor=false&markers=color:red%7Clabel:A%7C-27.464013,153.029148&&client=gme-translinktransit&signature=5_hQHrTJhHBlK8Aur1LLoNBNkIk=
        /* For old format, uncomment if encoding gets fixed  
          $latlon =  substr( $map_link, strrpos ($map_link , '|')+1, -5); 
         
         */
        //new format
        // print 'mapLink: '. $map_link;
        $map_start = strrpos($map_link, '%7C') + 3;
        $map_end = strlen($map_link) - strrpos($map_link, '&amp;&amp;');
        //print 'map start: '.$map_start. ', map end: '.$map_end;
        $latlon = substr($map_link, $map_start, $map_end * -1);
        //print 'latlong:'. $latlon;
        $latlon_ar = preg_split("/,/", $latlon);
        $lat = $latlon_ar[0];
        $long = $latlon_ar[1];
        $stop = array('stop_id' => $stop_id, 'name' => $name, 'desc' => $desc, 'zone' => $zone, 'lat' => $lat, 'long' => $long);
        scraperwiki::save_sqlite(array('stop_id'), $stop, "stop", 0);
    }
}
                    $tmp = strtotime($mm[1]);
                    $expire_date = date('Y-m-d', $tmp);
                    $row['expire_date'] = $expire_date;
                    $row['_executed'] = date('Y-m-d H:i:s');
                }
            }
        }
        scraperwiki::save_sqlite(array('guid'), $row, "plum", $verbose);
    }
}
require 'scraperwiki/simple_html_dom.php';
$verbose = 0;
$url = "http://www.plumdistrict.com/deals/rss.xml?affiliate_url=http://gan.doubleclick.net/gan_click?lid=41000000032549767&pubid=21000000000320750";
$feed_html = scraperWiki::scrape($url);
if ($feed_html) {
    scraperwiki::sqliteexecute("drop table if exists plum");
    $feed_dom = new simple_html_dom();
    $feed_dom->load($feed_html);
    $arr = array();
    foreach ($feed_dom->find('item') as $item) {
        $row = array();
        $deep_link = $row['guid'] = $item->find('guid', 0)->plaintext;
        $row['deal_content_id'] = $item->find('deal_content_id', 0)->plaintext;
        if ($deep_link) {
            print $deep_link . "\n";
            $deep_html = scraperWiki::scrape($deep_link);
            if ($deep_html) {
                if (preg_match('|Expires ([\\d]{2}/[\\d]{2}/[\\d]{2})|si', $deep_html, $mm)) {
                    $tmp = strtotime($mm[1]);
                    $expire_date = date('Y-m-d', $tmp);
                    $row['expire_date'] = $expire_date;
                        $delai = "sup_annee";
                }
            } else {
                $delai = "no_answer";
            }
        }
    }
    /*
     *  Stores results
     */
    $result = array("id" => $q_num, "q_texte" => utf8_encode($q_texte), "q_date" => $date_q, "a_date" => $date_a, "delai" => utf8_encode($delai));
    scraperwiki::save_sqlite(array("id"), $result);
}
require_once 'scraperwiki/simple_html_dom.php';
$last_id = 0;
$last_id_array = scraperwiki::sqliteexecute("select max(id) from swdata");
if ($last_id_array) {
    $last_id = $last_id_array->data[0][0];
}
$last_id++;
for ($q_num = $last_id; $q_num <= $last_id + 500; $q_num++) {
    $html = scraperWiki::scrape("http://questions.assemblee-nationale.fr/q13/13-" . $q_num . "QE.htm");
    $dom = new simple_html_dom();
    $dom->load($html);
    foreach ($dom->find(".tdstyle") as $data) {
        /*
         *  Scrapes the content of the question
         */
        foreach ($data->find('h2') as $title) {
            if ($title->plaintext == " Texte de la question") {
                foreach ($data->find('.contenutexte') as $contenutexte) {
                $daily['Asar'] = $tr->find('td', 6)->plaintext;
                $daily['Maghrib'] = $tr->find('td', 7)->plaintext;
                $daily['Isyak'] = $tr->find('td', 8)->plaintext;
                $rows[] = $daily;
            }
            $day++;
        }
        $html->clear();
    }
    scraperwiki::save_sqlite(array('Zone', 'Tarikh'), $rows, $table_name = "solat");
}
require 'scraperwiki/simple_html_dom.php';
$zones = array(array('Zone' => 'JHR02', 'Negeri' => 'JOHOR', 'Lokasi' => 'Kota Tinggi, Mersing, Johor Bahru'), array('Zone' => 'JHR04', 'Negeri' => 'JOHOR', 'Lokasi' => 'Batu Pahat, Muar, Segamat, Gemas'), array('Zone' => 'JHR03', 'Negeri' => 'JOHOR', 'Lokasi' => 'Kluang dan Pontian'), array('Zone' => 'JHR01', 'Negeri' => 'JOHOR', 'Lokasi' => 'Pulau Aur dan Pemanggil'), array('Zone' => 'KDH06', 'Negeri' => 'KEDAH', 'Lokasi' => 'Puncak Gunung Jerai'), array('Zone' => 'KDH01', 'Negeri' => 'KEDAH', 'Lokasi' => 'Kota Setar, Kubang Pasu, Pokok Sena'), array('Zone' => 'KDH05', 'Negeri' => 'KEDAH', 'Lokasi' => 'Langkawi'), array('Zone' => 'KDH02', 'Negeri' => 'KEDAH', 'Lokasi' => 'Pendang, Kuala Muda, Yan'), array('Zone' => 'KDH03', 'Negeri' => 'KEDAH', 'Lokasi' => 'Padang Terap, Sik, Baling'), array('Zone' => 'KDH04', 'Negeri' => 'KEDAH', 'Lokasi' => 'Kulim, Bandar Baharu'), array('Zone' => 'KTN03', 'Negeri' => 'KELANTAN', 'Lokasi' => 'Jeli, Gua Musang (Mukim Galas, Bertam)'), array('Zone' => 'KTN01', 'Negeri' => 'KELANTAN', 'Lokasi' => 'K.Bharu,Bachok,Pasir Puteh,Tumpat,Pasir Mas,Tnh. Merah,Machang,Kuala Krai,Mukim Chiku'), array('Zone' => 'MLK01', 'Negeri' => 'MELAKA', 'Lokasi' => 'Bandar Melaka, Alor Gajah, Jasin, Masjid Tanah, Merlimau, Nyalas'), array('Zone' => 'NGS02', 'Negeri' => 'NEGERI SEMBILAN', 'Lokasi' => 'Port Dickson, Seremban, Kuala Pilah, Jelebu, Rembau'), array('Zone' => 'NGS01', 'Negeri' => 'NEGERI SEMBILAN', 'Lokasi' => 'Jempol, Tampin'), array('Zone' => 'PHG05', 'Negeri' => 'PAHANG', 'Lokasi' => 'Genting Sempah, Janda Baik, Bukit Tinggi'), array('Zone' => 'PHG04', 'Negeri' => 'PAHANG', 'Lokasi' => 'Bentong, Raub, Kuala Lipis'), array('Zone' => 'PHG03', 'Negeri' => 'PAHANG', 'Lokasi' => 'Maran, Chenor, Temerloh, Bera, Jerantut'), array('Zone' => 'PHG06', 'Negeri' => 'PAHANG', 'Lokasi' => 'Bukit Fraser, Genting Higlands, Cameron Higlands'), array('Zone' => 'PHG02', 'Negeri' => 'PAHANG', 'Lokasi' => 'Kuantan, Pekan, Rompin, Muadzam Shah'), array('Zone' => 'PHG01', 'Negeri' => 'PAHANG', 'Lokasi' => 'Pulau Tioman'), array('Zone' => 'PRK07', 'Negeri' => 'PERAK', 'Lokasi' => 'Bukit Larut'), array('Zone' => 'PRK02', 'Negeri' => 'PERAK', 'Lokasi' => 'Ipoh, Batu Gajah, Kampar, Sg. Siput dan Kuala Kangsar'), array('Zone' => 'PRK01', 'Negeri' => 'PERAK', 'Lokasi' => 'Tapah,Slim River dan Tanjung Malim'), array('Zone' => 'PRK03', 'Negeri' => 'PERAK', 'Lokasi' => 'Pengkalan Hulu, Grik dan Lenggong '), array('Zone' => 'PRK04', 'Negeri' => 'PERAK', 'Lokasi' => 'Temengor dan Belum'), array('Zone' => 'PRK05', 'Negeri' => 'PERAK', 'Lokasi' => 'Teluk Intan, Bagan Datoh, Kg.Gajah,Sri Iskandar, Beruas,Parit,Lumut,Setiawan dan Pulau Pangkor'), array('Zone' => 'PRK06', 'Negeri' => 'PERAK', 'Lokasi' => 'Selama, Taiping, Bagan Serai dan Parit Buntar'), array('Zone' => 'PLS01', 'Negeri' => 'PERLIS', 'Lokasi' => 'Kangar, Padang Besar, Arau'), array('Zone' => 'PNG01', 'Negeri' => 'PULAU PINANG', 'Lokasi' => 'Seluruh Negeri Pulau Pinang'), array('Zone' => 'SBH09', 'Negeri' => 'SABAH', 'Lokasi' => 'Zon 9 - Sipitang, Membakut, Beaufort, Kuala Penyu, Weston, Tenom, Long Pa Sia'), array('Zone' => 'SBH08', 'Negeri' => 'SABAH', 'Lokasi' => 'Zon 8 - Pensiangan, Keningau, Tambunan, Nabawan'), array('Zone' => 'SBH07', 'Negeri' => 'SABAH', 'Lokasi' => 'Zon 7 - Papar, Ranau, Kota Belud, Tuaran, Penampang, Kota Kinabalu'), array('Zone' => 'SBH06', 'Negeri' => 'SABAH', 'Lokasi' => 'Zon 6 - Gunung Kinabalu'), array('Zone' => 'SBH05', 'Negeri' => 'SABAH', 'Lokasi' => 'Zon 5 - Kudat, Kota Marudu, Pitas, Pulau Banggi'), array('Zone' => 'SBH03', 'Negeri' => 'SABAH', 'Lokasi' => 'Zon 3 - Lahad Datu, Kunak, Silabukan, Tungku, Sahabat, Semporna'), array('Zone' => 'SBH02', 'Negeri' => 'SABAH', 'Lokasi' => 'Zon 2 - Pinangah, Terusan, Beluran, Kuamut, Telupit'), array('Zone' => 'SBH01', 'Negeri' => 'SABAH', 'Lokasi' => 'Zon 1 - Sandakan, Bdr. Bkt. Garam, Semawang, Temanggong, Tambisan'), array('Zone' => 'SBH04', 'Negeri' => 'SABAH', 'Lokasi' => 'Zon 4 - Tawau, Balong, Merotai, Kalabakan'), array('Zone' => 'SWK01', 'Negeri' => 'SARAWAK', 'Lokasi' => 'Zon 1 - Limbang, Sundar, Terusan, Lawas'), array('Zone' => 'SWK08', 'Negeri' => 'SARAWAK', 'Lokasi' => 'Zon 8 - Kuching, Bau, Lundu,Sematan'), array('Zone' => 'SWK07', 'Negeri' => 'SARAWAK', 'Lokasi' => 'Zon 7 - Samarahan, Simunjan, Serian, Sebuyau, Meludam'), array('Zone' => 'SWK06', 'Negeri' => 'SARAWAK', 'Lokasi' => 'Zon 6 - Kabong, Lingga, Sri Aman, Engkelili, Betong, Spaoh, Pusa, Saratok, Roban, Debak'), array('Zone' => 'SWK05', 'Negeri' => 'SARAWAK', 'Lokasi' => 'Zon 5 - Belawai, Matu, Daro, Sarikei, Julau, Bitangor, Rajang'), array('Zone' => 'SWK04', 'Negeri' => 'SARAWAK', 'Lokasi' => 'Zon 4 - Igan, Kanowit, Sibu, Dalat, Oya'), array('Zone' => 'SWK03', 'Negeri' => 'SARAWAK', 'Lokasi' => 'Zon 3 - Song, Belingan, Sebauh, Bintulu, Tatau, Kapit'), array('Zone' => 'SWK02', 'Negeri' => 'SARAWAK', 'Lokasi' => 'Zon 2 - Niah, Belaga, Sibuti, Miri, Bekenu, Marudi'), array('Zone' => 'SGR01', 'Negeri' => 'SELANGOR DAN WILAYAH PERSEKUTUAN', 'Lokasi' => 'Gombak,H.Selangor,Rawang,H.Langat,Sepang,Petaling,S.Alam'), array('Zone' => 'SGR02', 'Negeri' => 'SELANGOR DAN WILAYAH PERSEKUTUAN', 'Lokasi' => 'Sabak Bernam, Kuala Selangor, Klang, Kuala Langat'), array('Zone' => 'SGR03', 'Negeri' => 'SELANGOR DAN WILAYAH PERSEKUTUAN', 'Lokasi' => 'Kuala Lumpur'), array('Zone' => 'SGR04', 'Negeri' => 'SELANGOR DAN WILAYAH PERSEKUTUAN', 'Lokasi' => 'Putrajaya'), array('Zone' => 'TRG01', 'Negeri' => 'TERENGGANU', 'Lokasi' => 'Kuala Terengganu, Marang'), array('Zone' => 'TRG04', 'Negeri' => 'TERENGGANU', 'Lokasi' => 'Kemaman Dungun'), array('Zone' => 'TRG03', 'Negeri' => 'TERENGGANU', 'Lokasi' => 'Hulu Terengganu'), array('Zone' => 'TRG02', 'Negeri' => 'TERENGGANU', 'Lokasi' => 'Besut, Setiu'), array('Zone' => 'WLY02', 'Negeri' => 'WILAYAH PERSEKUTUAN LABUAN', 'Lokasi' => 'Labuan'));
scraperwiki::sqliteexecute('CREATE TABLE IF NOT EXISTS `zone` (`Zone` text,`Negeri` text,`Lokasi` text)');
scraperwiki::save_sqlite(array('Zone', 'Negeri'), $zones, $table_name = "zone");
scraperwiki::sqliteexecute('CREATE TABLE IF NOT EXISTS `solat` (`Zone` text, `Tarikh` text, `Hari` text,`Imsak` text,`Subuh` text,  `Syuruk` text, `Zohor` text, `Asar` text, `Maghrib` text, `Isyak` text )');
foreach ($zones as $zone) {
    $rows = array();
    for ($month = 1; $month <= 12; $month++) {
        $html = scraperWiki::scrape('http://www.e-solat.gov.my/web/waktusolat.php?zone=' . $zone['Zone'] . '&state=&year=' . date('Y') . '&jenis=year&bulan=' . $month . '&LG=BM');
        echo $html;
        $dom = new simple_html_dom();
        $dom->load($html);
        $trs = $dom->find('table', 9)->find('tr');
        echo $trs;
        $day = 0;
        foreach ($trs as $tr) {
            if ($day > 0 && $day < count($trs) - 2) {
                $daily = array();
                $daily['Zone'] = $zone['Zone'];
                $daily['Tarikh'] = date('Y') . '-' . str_pad($month, 2, '0', STR_PAD_LEFT) . '-' . str_pad($day, 2, '0', STR_PAD_LEFT);
        foreach ($nextPage->find('a') as $element) {
            $MyString = $element->href;
            $MyString = htmlspecialchars_decode($MyString);
            //         print "Próxima página: " . $MyString . "\n";
        }
    }
}
//************************programa principal************************//
require 'scraperwiki/simple_html_dom.php';
$dom = new simple_html_dom();
//*Pegar os memberId dos avaliadores pelo sql e passar para a função*//
avaliaVendedor("blowitoutahere");
print "MyString = " . $MyString . "\n";
//limitando em percorrer 3 páginas
for ($pag = 0; $pag < 3; $pag++) {
    $html = scraperWiki::scrape($MyString);
    $dom->load($html);
    foreach ($dom->find("table.FbOuterYukon") as $data) {
        $tds = $data->find("td");
        for ($i = 4; $i <= 197; $i += 8) {
            $record = array('data_autocount' => $i, 'data_Feedback' => $tds[$i + 1]->plaintext, 'data_MemberID_AND_FeedbackScore' => $tds[$i + 2]->plaintext, 'data_Date-Time' => $tds[$i + 3]->plaintext, 'data_Item_Weight_Price_ItemNumber' => $tds[$i + 5]->plaintext, 'data_Price' => $tds[$i + 6]->plaintext);
            // Salva o record na tabela // Salvar o $MyStringVendedor também.
            saveData(array("Data_autocount", "data_Feedback", "data_MemberID_AND_FeedbackScore"), $record);
        }
        getLinks($data);
    }
    proxPaginaVendedor($dom);
}
print_r(scraperwiki::show_tables());
print_r(scraperwiki::sqliteexecute("select * from membersLinks"));
//print_r(scraperwiki::sqliteexecute("select * from allFeedBacks"));
function createHydroAuthorities($drop = FALSE)
{
    if ($drop) {
        scraperwiki::sqliteexecute("drop table hydro_authorities");
        scraperwiki::sqlitecommit();
    }
    scraperwiki::sqliteexecute("CREATE TABLE `hydro_authorities` (`name` text,\n                                                                  `long_name` text, \n                                                                  `region` text, \n                                                                  `last_match_count` integer, \n                                                                  `scrape_date_type` text, \n                                                                  `last_msg` text, \n                                                                  `last_count` integer, \n                                                                  `last_date` text, \n                                                                  `last_scrape` text, \n                                                                  `last_status` text, \n                                                                  `start_url` text, \n                                                                  `config` text, \n                                                                  `start_date` text, \n                                                                  `total` integer, \n                                                                  `info` text, \n                                                                  `status` text, \n                                                                  `base_url` text)");
    scraperwiki::sqlitecommit();
}
    scraperwiki::sqliteexecute("create unique index `{$table}name` on {$table} (name)");
    scraperwiki::sqliteexecute("create index {$table}cumulative_freq on {$table} (cumulative_freq)");
    $data = scraperWiki::scrape($url);
    foreach (explode("\n", $data) as $s) {
        // hack, should really use readf or scanf something like that
        list($name, $freq, $total_freq, $rank) = explode(' ', preg_replace('{ +}', ' ', $s));
        if (!empty($name)) {
            scraperwiki::sqliteexecute("insert into {$table} values (?,?,?,?)", array($rank, ucfirst(strtolower($name)), $freq * 1000, $total_freq * 1000));
        }
    }
    scraperwiki::sqlitecommit();
}
# first time through, create tables and indices
$urls = array('boy_names' => 'http://www.census.gov/genealogy/names/dist.male.first', 'girl_names' => 'http://www.census.gov/genealogy/names/dist.female.first', 'last_names' => 'http://www.census.gov/genealogy/names/dist.all.last');
foreach ($urls as $table => $url) {
    # first time through, create tables and indices
    scraperwiki::sqliteexecute("drop table if exists {$table}");
    scraperwiki::sqliteexecute("create table {$table} (rank INTEGER PRIMARY KEY, `name` string, freq INTEGER, cumulative_freq INTEGER)");
    // scraperwiki::sqliteexecute("create unique index ${table}rank on $table (rank)");
    scraperwiki::sqliteexecute("create unique index `{$table}name` on {$table} (name)");
    scraperwiki::sqliteexecute("create index {$table}cumulative_freq on {$table} (cumulative_freq)");
    $data = scraperWiki::scrape($url);
    foreach (explode("\n", $data) as $s) {
        // hack, should really use readf or scanf something like that
        list($name, $freq, $total_freq, $rank) = explode(' ', preg_replace('{ +}', ' ', $s));
        if (!empty($name)) {
            scraperwiki::sqliteexecute("insert into {$table} values (?,?,?,?)", array($rank, ucfirst(strtolower($name)), $freq * 1000, $total_freq * 1000));
        }
    }
    scraperwiki::sqlitecommit();
}
<?php

require 'scraperwiki/simple_html_dom.php';
$dom = new simple_html_dom();
scraperwiki::attach("test_1_2");
$result = scraperwiki::sqliteexecute("select html from hotel_list_pages");
$hotel_list_pages_contents = $result->data;
foreach ($hotel_list_pages_contents as $contents) {
    $html = $contents[0];
    $dom->load($html);
    foreach ($dom->find("table.hotellist tr") as $data) {
        $tds = $data->find("td h3 a");
        $record = array('hotel' => $tds[0]->plaintext, 'url' => $tds[0]->href);
        scraperwiki::save_sqlite(array('hotel'), $record, $table_name = 'hotel_list');
    }
}
require 'scraperwiki/simple_html_dom.php';
$dom = new simple_html_dom();
scraperwiki::attach("test_1_2");
$result = scraperwiki::sqliteexecute("select html from hotel_list_pages");
$hotel_list_pages_contents = $result->data;
foreach ($hotel_list_pages_contents as $contents) {
    $html = $contents[0];
    $dom->load($html);
    foreach ($dom->find("table.hotellist tr") as $data) {
        $tds = $data->find("td h3 a");
        $record = array('hotel' => $tds[0]->plaintext, 'url' => $tds[0]->href);
        scraperwiki::save_sqlite(array('hotel'), $record, $table_name = 'hotel_list');
    }
}
function alreadyKnown($cat, $name, $link)
{
    $data = scraperwiki::sqliteexecute("select distinct id from swdata where title='" . str_replace("'", "", $name) . "' and category='" . $cat . "'");
    if (count($data->data) === 0) {
        return false;
    }
    echo "already known : " . $name . " in " . $cat . "\n";
    return true;
}
        $data[] = array('col1' => $vals[0]->plaintext, 'col2' => $vals[1]->plaintext, 'col3' => $vals[2]->plaintext, 'col4' => $vals[3]->plaintext, 'col6' => $ing[0][0], 'col7' => str_replace('-', ' ', strstr($row->find('a', 3)->href, '-')), 'col8' => $ing[0][1], 'col9' => str_replace('-', ' ', strstr($row->find('a', 4)->href, '-')), 'col10' => $ing[0][2], 'col11' => str_replace('-', ' ', strstr($row->find('a', 5)->href, '-')), 'col12' => $ing[0][3], 'col13' => str_replace('-', ' ', strstr($row->find('a', 6)->href, '-')));
        //    };
        //    }
    }
    scraperwiki::save_sqlite(array('col1'), $data, "data");
    /* Extract block data */
    /* Extract transactions */
}
?>


<?php 
/* scraper by Tomasz Polonczyk http://www.tomot.eu */
require 'scraperwiki/simple_html_dom.php';
/* Extract BLOCKS */
scraperwiki::sqliteexecute("create table if not exists data (col1 char(24), col2 int(24), col3 char(24), col4 char(24), col5 char(24), col6 char(24), col7 char(24), col8 char(24), col9 char(24), col10 char(24), col11 char(24), col12 char(24), col13 char(24))");
for ($i = 1; $i <= 2; $i++) {
    $url = "http://www.gw2db.com/recipes/cook?page={$i}";
    $html = scraperwiki::scrape($url);
    $dom = new simple_html_dom();
    $dom->load($html);
    foreach ($dom->find('table tr') as $row) {
        echo $row . "\n";
        // echo $row->find('a',3)->href . $row->find('a',4)->href . $row->find('a',5)->href . $row->find('a',6)->href;
        $vals = $row->find('td');
        //print_r($vals) ;
        //echo test.$vals[4].test;
        //    if (!empty($vals)) {
        //     if (preg_match('/[0-9a-fA-F]{64}/', $vals[1]->innertext, $matches )) {
        $string = $vals[4]->plaintext;
        preg_match_all('/\\d+/', $string, $ing);
        $title = str_replace("&amp;", "&", $title);
        $statuscell = $row->find("td", 3);
        $status = trim($statuscell->plaintext);
        //var_dump($thisrecord['description']);
        //print mb_detect_encoding($thisrecord['description'])."\n";
        $moredetails = get_extras($url);
    }
    $councillors["{$refnumber}"] = array("Url" => "{$url}", "Name" => "{$name}", "Title" => "{$title}", "Status" => "{$status}", "Submittedby" => $moredetails["submittedby"], "Petitiontext" => $moredetails["petitiontext"], "Status1" => $moredetails["status1"], "Status2" => $moredetails["status2"], "Status3" => $moredetails["status3"], "Date1" => $moredetails["date1"], "Date2" => $moredetails["date2"], "Date3" => $moredetails["date3"], "Status4" => $moredetails["status4"], "Status5" => $moredetails["status5"], "Status6" => $moredetails["status6"], "Date4" => $moredetails["date4"], "Date5" => $moredetails["date5"], "Date6" => $moredetails["date6"], "Corpname" => $moredetails["corpname"], "Published" => $moredetails["published"]);
}
#}
#scraperwiki::sqliteexecute("drop table councillors");
scraperwiki::sqliteexecute("create table if not exists councillors (`url` string,`name` string, `refnumber` string, `title` string, `status` string, `submittedby` string, `petitiontext` string, `status1` string, `status2` string, `status3` string, `date1` string, `date2` string, `date3` string, `status4` string, `status5` string, `status6` string, `date4` string, `date5` string, `date6` string, `corpname` string, `published` string)");
#, `email` string, `phone` string, `mobile` string, `image` string,  `address` string)");
scraperwiki::sqlitecommit();
foreach ($councillors as $refnumber => $values) {
    scraperwiki::sqliteexecute("insert or replace into councillors values (:url, :name, :refnumber, :title, :status, :submittedby, :petitiontext, :status1, :status2, :status3, :date1, :date2, :date3, :status4, :status6, :status6, :date4, :date5, :date6,  :corpname, :published)", array("url" => $values["Url"], "name" => $values["Name"], "refnumber" => $refnumber, "title" => $values["Title"], "status" => $values["Status"], "submittedby" => $values["Submittedby"], "petitiontext" => $values["Petitiontext"], "status1" => $values["Status1"], "status2" => $values["Status2"], "status3" => $values["Status3"], "date1" => $values["Date1"], "date2" => $values["Date2"], "date3" => $values["Date3"], "status4" => $values["Status4"], "status5" => $values["Status5"], "status6" => $values["Status6"], "date4" => $values["Date4"], "date5" => $values["Date5"], "date6" => $values["Date6"], "corpname" => $values["Corpname"], "published" => $values["Published"]));
    #}
}
scraperwiki::sqlitecommit();
function get_extras($url)
{
    $localhtml = scraperwiki::scrape($url);
    $localdom = new simple_html_dom();
    $localdom->load($localhtml);
    $column = $localdom->find("div[class=column-center2]");
    print $url;
    #   $trows=$column->find("div[class=column-center-inner] table tr");
    #print_r($column[2]);
    #  $contents = explode("</tr>",$column[0]);
    $contents = explode("</h2>", $column[0]);
    print "\n" . "contents";
    $temp = array("0" => "topselling_free", "1" => "topselling_paid");
    foreach ($temp as $key => $str) {
        for ($i = 0; $i < 5 * 24; $i = $i + 24) {
            $record = array('link' => "https://play.google.com" . str_replace("?feature=category-nav", "", $el->href) . "/collection/" . $str . "?start=" . $i . "&num=24", 'category' => $el->innertext, 'paid' => $key, 'startRank' => $i);
            //print json_encode($record) . "\n";
            scraperwiki::save(array('category', 'link', 'paid', 'link', 'startRank'), $record);
        }
    }
}
require 'scraperwiki/simple_html_dom.php';
$html_content = scraperwiki::scrape("https://play.google.com/store/apps?feature=corpus_selector");
$html = str_get_html($html_content);
//"collection/topselling_paid?start=24&num=24"
//topselling_free
//データ削除
scraperwiki::sqliteexecute("drop table swdata");
scraperwiki::sqlitecommit();
foreach ($html->find("li.category-item a") as $el) {
    //ゴミは無視
    if ($el->find(".more-arrow")) {
        continue;
    }
    //print($el->innertext). "\n";
    $temp = array("0" => "topselling_free", "1" => "topselling_paid");
    foreach ($temp as $key => $str) {
        for ($i = 0; $i < 5 * 24; $i = $i + 24) {
            $record = array('link' => "https://play.google.com" . str_replace("?feature=category-nav", "", $el->href) . "/collection/" . $str . "?start=" . $i . "&num=24", 'category' => $el->innertext, 'paid' => $key, 'startRank' => $i);
            //print json_encode($record) . "\n";
            scraperwiki::save(array('category', 'link', 'paid', 'link', 'startRank'), $record);
        }
    }
    }
}
//now lets geocode
$users = scraperwiki::sqliteexecute("SELECT * FROM `users2` WHERE `location` NOT NULL AND `lat` IS NULL GROUP BY `location` ORDER BY `location`");
foreach ($users->keys as $key => $value) {
    $keys[$value] = $key;
}
$users = $users->data;
if (1 == 2) {
    foreach ($users as $user) {
        $addr = urlencode($user[$keys['location']]);
        $url = 'http://maps.googleapis.com/maps/api/geocode/json?sensor=false&address=' . $addr;
        $get = file_get_contents($url);
        $records = json_decode($get, TRUE);
        echo $addr . ":";
        if ($records['status'] == 'OK') {
            //neat_r($records['results'][]);
            $lat = $records['results'][0]['geometry']['location']['lat'];
            $lng = $records['results'][0]['geometry']['location']['lng'];
            echo $lat . "-" . $lng . "\n";
            scraperwiki::sqliteexecute("update `users2` set `lat`='" . $lat . "' where `location`='" . $user[$keys['location']] . "'");
            scraperwiki::sqliteexecute("update `users2` set `lng`='" . $lng . "' where `location`='" . $user[$keys['location']] . "'");
            scraperwiki::sqlitecommit();
        } else {
            echo "N/A\n";
            scraperwiki::sqliteexecute("update `users2` set `lat`='XXX' where `location`='" . $user[$keys['location']] . "'");
            scraperwiki::sqliteexecute("update `users2` set `lng`='XXX' where `location`='" . $user[$keys['location']] . "'");
            scraperwiki::sqlitecommit();
        }
    }
}
 static function get_var($name, $default = null)
 {
     $ds = SW_DataStoreClass::create();
     try {
         $result = scraperwiki::sqliteexecute("select value_blob, type from swvariables where name=?", array($name));
     } catch (Exception $e) {
         if (substr($e->getMessage(), 0, 29) == 'sqlite3.Error: no such table:') {
             return $default;
         }
         if (substr($e->getMessage(), 0, 43) == 'DB Error: (OperationalError) no such table:') {
             return $default;
         }
         throw $e;
     }
     $data = $result->data;
     if (count($data) == 0) {
         return $default;
     }
     $svalue = $data[0][0];
     $vtype = $data[0][1];
     if ($vtype == "integer") {
         return intval($svalue);
     }
     if ($vtype == "double") {
         return floatval($svalue);
     }
     if ($vtype == "NULL") {
         return null;
     }
     return $svalue;
 }
        } else {
            if (stristr($url, "twitter")) {
                $results["twitter_url"] = $url;
            } else {
                $results["website_url"] = $url;
            }
        }
        // There are max 3 urls we are interested in
        if (sizeof($results) === 4) {
            break;
        }
    }
    scraperwiki::save_sqlite(array("id"), $results, "eparlimen_social_links");
}
require 'scraperwiki/simple_html_dom.php';
scraperwiki::sqliteexecute("DELETE FROM eparlimen_social_links");
scraperwiki::attach("eparlimen-constituencies", "urls");
$urls = scraperwiki::select("* FROM urls.eparlimen_constituencies_links");
foreach ($urls as $url) {
    $url = str_replace(",%20", "", $url["url"]);
    // A hack for a known bad link. This should be in the link scraper, but it's xmas and I have better things to do :)
    $html = scraperwiki::scrape($url);
    $dom = new simple_html_dom();
    $dom->load($html);
    $node = $dom->find("ul.wrap_senarai li", 2);
    if (is_object($node)) {
        $code = $node->children(1)->plaintext;
    } else {
        echo "Unable to parse {$url}\n";
        continue;
    }
        $partycell = $nameparty[1];
        $party = trim(str_replace(")", "", $partycell));
        $name = trim(str_replace("Cllr. ", "", strip_tags($party)));
        $namecell = $nameparty[0];
        $name = trim(str_replace("Cllr. ", "", strip_tags($namecell)));
        print $name;
        #$party = $row->find("p",0);
        //$party = $cell->find("p",0);
        print $party;
        $moredetails["name"] = $name;
        $moredetails["party"] = $party;
        return $moredetails;
    }
}
//$moredetails = array();
//$moredetails = get_details($content);
$moredetails = get_nameparty($content);
foreach ($moredetails as $moredetail) {
    $lea = "lea";
    print $lea;
    $councillors["{$name}"] = array("LEA" => $lea, "Party" => $moredetails["party"]);
}
unset($dom, $html, $uri);
scraperwiki::sqliteexecute("drop table councillors");
scraperwiki::sqliteexecute("create table if not exists councillors (`auth` string, `lea` string, `name` string, `party` string)");
#, `email` string, `address` string, `phone` string, `mobile` string, `image` string)");
scraperwiki::sqlitecommit();
foreach ($councillors as $name => $values) {
    scraperwiki::sqliteexecute("insert or replace into councillors values (:auth, :lea, :name, :party)", array("auth" => "Carlow County Council", "lea" => $values["LEA"], "name" => $name, "party" => $values["Party"]));
}
scraperwiki::sqlitecommit();
$dom = new simple_html_dom();
$dom->load($html);
//delete current data
if (isset($info['club'])) {
    scraperwiki::sqliteexecute("delete from club where id = ''");
    scraperwiki::sqlitecommit();
}
$club = array('id' => $term . '/nezavisli');
$h1s = $dom->find('h1');
$club['name'] = trim($h1s[0]->plaintext);
scraperwiki::save_sqlite(array('id'), $club, 'club');
//memberships
if (isset($info['membership'])) {
    scraperwiki::sqliteexecute("delete from membership where src_group_id = ''");
    scraperwiki::sqlitecommit();
}
$divs = $dom->find('div[id=_sectionLayoutContainer__panelContent]');
$as = $divs[0]->find('a');
//echo $divs[0]->outertext;
if (count($as) > 0) {
    foreach ($as as $a) {
        $mp = array('src_group_id' => $term . '/nezavisli');
        preg_match('/PoslanecID=([0-9]{1,})/', $a->href, $matches);
        $mp['mp_id'] = $matches[1];
        $mp['name'] = $a->plaintext;
        //delete his other memebrships
        scraperwiki::sqliteexecute("delete from membership where mp_id = '" . $mp['mp_id'] . "'");
        scraperwiki::sqlitecommit();
        scraperwiki::save_sqlite(array('mp_id', 'src_group_id'), $mp, 'membership');
    }
}
            $synthManufacturer = $manufacturerQuery;
            //Search for a specific manufacturer
            $tManName = preg_replace("/\\s/", "_", $synthManufacturer);
            //Build a temp name for the array key
            $synthManufacturer = urlencode($synthManufacturer);
            echo "Searching manufacturer: " . $synthManufacturer . "\n";
            $foundSynths[$state][$tManName] = findSynthsOnCraigslist($state, $jsonManufacturerScraper, $synthManufacturer, $ignoreWords, $cityDepth, $synthDepth, $depthOverride);
            if (!empty($foundSynths[$state][$tManName])) {
                //scraperwiki::save_var($state.'-'.$synthManufacturer.'-'.'synths', json_encode($foundSynths[$state][$synthManufacturer]));
                $saveMessage = scraperWiki::save_sqlite(array('manufacturer', 'synth_name', 'post_item_date', 'post_item_name', 'post_item_price', 'post_item_link', 'post_item_state', 'query', 'link', 'post_item_description', 'post_item_images'), $foundSynths[$state][$tManName]);
                print strval($saveMessage);
            } else {
                //Delete existing data
                $info = scraperwiki::table_info($name = "swdata");
                if (!empty($info)) {
                    scraperwiki::sqliteexecute("DELETE FROM swdata");
                    //Truncate the table before adding new results
                }
            }
        }
    }
}
/**
* Parses craigslist and returns synths
* @param String state
* @param String synthManufacturer
* @param ignoreWords
* @param cityDepth
* @param synthDepth
* @param depthOverride
* @return Array foundSynths
            $phone = $therest[1];
            $address = $councillordom->find("p", 0);
        }
        # cleanall
        $party = str_replace('The ', '', str_replace('Non Party/Neamh Pháirtí', 'Independent', strip_tags($partystring)));
        $remove = array("tel:", "(", ")", "n/a", "home");
        $phone = trim(str_replace("-", " ", str_replace($remove, "", strtolower(strip_tags($phone)))));
        if (substr($phone, 0, 1) == "4") {
            $phone = "021 " . $phone;
        }
        # Add area code for Cork City numbers missing one; they all start with a 4
        # (This is in fact crap and doesn't apply to non-Eircom numbers,
        # but there are none of those in this set at present.)
        $mobile = trim(str_replace("Mobile: ", "", str_replace("-", " ", strip_tags($mobile))));
        $address = str_replace("<br />", " ", $address);
        #because some entries have no space betw , and</br>
        $address = trim(str_replace("&nbsp;", " ", htmlspecialchars_decode(strip_tags($address))));
        //$mobile = trim(str_replace("Mobile: ","",str_replace("-"," ",strip_tags($therest[2]))));
        unset($councillordom);
        $councillors["{$name}"] = array("LEA" => $leaname, "Party" => $party, "Email" => $email, "Phone" => $phone, "Mobile" => $mobile, "Address" => $address, "Image" => $img);
    }
}
//print_r($councillors);
//scraperwiki::attach("iecouncillorsall");
scraperwiki::sqliteexecute("drop table councillors");
scraperwiki::sqliteexecute("create table if not exists councillors (`authority` string, `lea` string, `name` string, `party` string, `email` string, `phone` string, `mobile` string, `address` string, `image` string)");
scraperwiki::sqlitecommit();
foreach ($councillors as $name => $values) {
    scraperwiki::sqliteexecute("insert or replace into councillors values (:auth, :lea, :name, :party, :email, :phone, :mobile, :address, :image)", array("auth" => "Cork City Council", "lea" => $values["LEA"], "name" => $name, "party" => $values["Party"], "email" => $values["Email"], "phone" => $values["Phone"], "mobile" => $values["Mobile"], "image" => $values["Image"], "address" => $values["Address"]));
}
scraperwiki::sqlitecommit();