function scrape_related($url) { $exists = scraperwiki::select("* from pages where url = '{$url}'"); if (count($exists)) { print_r($exists); return; } global $counter; $counter++; scraperwiki::save_sqlite(array('url'), array('count' => $counter, 'url' => $url), 'pages'); $html_content = scraperwiki::scrape($url); $html = str_get_html($html_content); foreach ($html->find('div.related a') as $related) { scrape_related('http://tunein.com' . $related->href); } }
} } return null; } # Check for pages with no usable data function fourOhFour($html) { if (strpos($html, 'Home not available', 1200) !== false) { return true; } return false; } $blacklist = array(); # get an array of the cottage data to scrape scraperwiki::attach("hoseasons_summary"); $cottData = scraperwiki::select(" COTTAGE_ID, COTTAGE_URL, SLEEPS, BEDROOMS, PETS,COTTAGE_NAME, PRICE_LOW, PRICE_HIGH from 'hoseasons_summary'.SWDATA order by COTTAGE_URL"); $placeholder = scraperwiki::get_var("cottURL"); if ($placeholder != "") { $index = searchForId($placeholder, $cottData); $cottData = array_splice($cottData, $index); } require 'scraperwiki/simple_html_dom.php'; $dom = new simple_html_dom(); foreach ($cottData as $value) { $highPrice = ""; $lowPrice = ""; $found = 0; $count = 0; scraperwiki::save_var("cottURL", $value['COTTAGE_URL']); // check the cottage url against the blacklist foreach ($blacklist as $blItem) {
$routemap = array(); foreach ($routes as $route) { $routemap[$route['route']]['route'] = $route['route']; @($routemap[$route['route']]['coords'] .= $route['latitude'] . ',' . $route['longitude'] . ',2357' . "\n"); } $theroutes = array(); $count = 0; foreach ($routemap as $a_route) { $count++; $r = $a_route['route']; $c = $a_route['coords']; $theroutes[] = array('id' => $count, 'route' => $r, 'coords' => $c); } scraperwiki::save_sqlite(array("id"), $theroutes); //Whoops, seems that doing 600 queries in under 80 seconds isn't a smart idea. This scraper attempts to aggregate coordinates into something usable. scraperwiki::attach("tfl_bus_routes_scraper", "src"); $routes = scraperwiki::select("route, stop_name, latitude, longitude from src.tfl_buses where run = 1 order by sequence asc"); $routemap = array(); foreach ($routes as $route) { $routemap[$route['route']]['route'] = $route['route']; @($routemap[$route['route']]['coords'] .= $route['latitude'] . ',' . $route['longitude'] . ',2357' . "\n"); } $theroutes = array(); $count = 0; foreach ($routemap as $a_route) { $count++; $r = $a_route['route']; $c = $a_route['coords']; $theroutes[] = array('id' => $count, 'route' => $r, 'coords' => $c); } scraperwiki::save_sqlite(array("id"), $theroutes);
$callback = $_GET['callback']; if ($callback) { header("Content-Type: text/javascript; charset=utf8"); echo $callback . "(" . json_encode($alltrips) . ");"; } else { header("Content-type: application/json"); echo json_encode($alltrips); } // {label} {id} {type} {day} {date} {year} {time} {startdate} {latlng} {arasnotaras} {details} {place} {act} {issue} {constitutional} {destf} {address} {days} {destination} ?> <?php //$sourcescraper = 'irish_president_engagementstest'; //$s = scraperwiki::scrape($sourcescraper, $limit=250); // = scraperwiki::attach($sourcescraper, $limit=250); scraperwiki::attach('irish_president_engagementsjson'); $trips = scraperwiki::select("* from irish_president_engagementsjson.swdata where date > date('now','-7 day');"); $alltrips = array(); foreach ($trips as $trip) { $tripinfo = $trip["info"]; $triplabel = $trip["label"]; $tripinfo = str_replace('(', '', $tripinfo); $tripinfo = str_replace(')', ',', $tripinfo); $triplabel = str_replace('(', '', $triplabel); $triplabel = str_replace(')', ',', $triplabel); //print $triplabel; $trip["info"] = $tripinfo; $trip["label"] = $triplabel; $alltrips[] = $trip; } //header('Content-type: application/json'); scraperwiki::httpresponseheader('Content-Type', 'application/json', 'charset=utf8');
if ($sort == "" || $sort == "category") { $pCategory = ""; $data = scraperwiki::select("* from repos order by category,name"); foreach ($data as $d) { $name = $d["name"]; $link = $d["link"]; $category = $d["category"]; if ($category != $pCategory) { print "<li data-role=\"list-divider\">{$category}</li>"; } $pCategory = $category; print "<li><a href=\"?action=detail&link={$link}\" data-rel=\"dialog\">{$name}</a></li>"; } } if ($sort == "alphabet") { $data = scraperwiki::select("* from repos order by name"); foreach ($data as $d) { $name = $d["name"]; $link = $d["link"]; print "<li><a href=\"?action=detail&link={$link}\" data-rel=\"dialog\">{$name}</a></li>"; } } ?> </ul> </div><!-- /content --> <div data-role="footer"> <h4>ofxAddons.com</h4> </div><!-- /footer --> </div><!-- /page -->
return $txt; } function searchForId($id, $array) { foreach ($array as $key => $val) { if ($val['COTTAGE_URL'] === $id) { return $key; } } return null; } $blacklist = array(); $url = "http://www.coastandcountry.co.uk/cottage-details/"; scraperwiki::attach("coastandcountrycouk"); # get an array of the cottage data to scrape $cottData = scraperwiki::select("COTTAGE_URL, PRICE_HIGH, PRICE_LOW from 'coastandcountrycouk'.SWDATA order by COTTAGE_URL"); $placeholder = scraperwiki::get_var("cottID"); if ($placeholder != "") { $index = searchForId($placeholder, $cottData); $cottData = array_splice($cottData, $index); } require 'scraperwiki/simple_html_dom.php'; $dom = new simple_html_dom(); foreach ($cottData as $value) { scraperwiki::save_var("cottID", $value['COTTAGE_URL']); // check the cottage url against the blacklist foreach ($blacklist as $blItem) { if ($value['COTTAGE_URL'] == $blItem) { continue 2; } }
// </a> // <a href="webdata/resources/files/DAS-2015-42 12-09.pdf" onmouseover="self.status='';return true;" target="_blank">164 Roslyn Avenue, Blackmans Bay - Representation expiry date is 25 September 2015</a> // <br> // <span style="padding-left:25px;">Subdivision of one lot and balance</span> //</p> $record = array(); $addressDateAnchor = $thispara->find('a', 1); $addressDateText = $addressDateAnchor->plaintext; $parts = explode(' - Representation expiry date is', $addressDateText); $record['address'] = htmlspecialchars_decode($parts[0] . ', TAS'); $expiry = $parts[1]; $record['on_notice_to'] = date('Y-m-d', strtotime($expiry)); // Set more_info to the DA page because unfortunately the council takes the PDFs down // $record['info_url'] = $kcbase . $addressDateAnchor->href; $record['info_url'] = $dapage; //there's probably a clever way to do this $record['council_reference'] = explode(' ', trim(strrchr($kcbase . $addressDateAnchor->href, '/'), '/'))[0]; $descriptionspan = $thispara->find('span', 0); $record['description'] = htmlspecialchars_decode($descriptionspan->plaintext); $record['date_scraped'] = date('Y-m-d'); $record['comment_url'] = 'mailto:kc@kingborough.tas.gov.au'; // var_dump($record); $existingRecords = scraperwiki::select("* from data where `council_reference`='" . $record['council_reference'] . "'"); if (count($existingRecords) == 0) { print "Saving record " . $record['council_reference'] . "\n"; //print_r ($record); scraperwiki::save(array('council_reference'), $record); } else { print "Skipping already saved record " . $record['council_reference'] . "\n"; } }
} #################################################################################################################### #################################################################################################################### #################################################################################################################### #################################################################################################################### $originalPrice = ""; $discountedPrice = ""; $calcPercentage = ""; $discountAmount = ""; $percentage = ""; $i = 0; $blacklist = array(); $url = "http://www.coastandcountry.co.uk/cottage-details/"; scraperwiki::attach("special_offers_coast_and_country_summary_delete"); # get an array of the cottage data to scrape $cottData = scraperwiki::select("COTTAGE_URL from 'special_offers_coast_and_country_summary_delete'.SWDATA order by COTTAGE_URL"); $placeholder = scraperwiki::get_var("cottID"); if ($placeholder != "") { $index = searchForId($placeholder, $cottData); $cottData = array_splice($cottData, $index); } require 'scraperwiki/simple_html_dom.php'; $dom = new simple_html_dom(); foreach ($cottData as $value) { scraperwiki::save_var("cottID", $value['COTTAGE_URL']); // check the cottage url against the blacklist foreach ($blacklist as $blItem) { if ($value['COTTAGE_URL'] == $blItem) { continue 2; } }
scraperwiki::attach("lichfield_cathedral_events", "cathedral"); $cathedral = scraperwiki::select("* from cathedral.swdata"); foreach (scraperwiki::select("* from cathedral.swdata") as $record) { var_dump($record); die; } /* $insert_ical = array(); $insert_ical['link'] = $insert['link']; $insert_ical['DTSTART'] = $strt; $insert_ical['DTEND'] = $strt+86399; $insert_ical['FREQ'] = "DAILY"; $insert_ical['BYDAY'] = ""; $insert_ical['WKST'] = "MO"; $insert_ical['COUNT'] = round(($nd-$strt)/86400); $insert_event = array(); $insert_event['name'] = ""; $insert_event['link'] = ""; $insert_venue = array(); $insert_venue['name'] = ""; $insert_venue['postcode'] = ""; $insert_venue['lat'] = ""; $insert_venue['lng'] = ""; $insert_venue['picture'] = ""; */ scraperwiki::attach("lichfield_cathedral_events", "cathedral"); $cathedral = scraperwiki::select("* from cathedral.swdata"); foreach (scraperwiki::select("* from cathedral.swdata") as $record) { var_dump($record); die; }
<?php //scraperwiki::sqliteexecute('create table create_test(a text,b text)'); //$val2="insert into create_test values('$name','$id')"; //scraperwiki::sqliteexecute($val2); $result = scraperwiki::select('* from tagtable'); //$data=$result[0]; //$data['rank']='2'; print count($result); //scraperwiki::sqliteexecute('create table create_test(a text,b text)'); //$val2="insert into create_test values('$name','$id')"; //scraperwiki::sqliteexecute($val2); $result = scraperwiki::select('* from tagtable'); //$data=$result[0]; //$data['rank']='2'; print count($result);
} print '</outline>'; } ?> </body> </opml> <?php # Blank PHP $sourcescraper = 'sr_p3_poddar'; scraperwiki::attach("sr_p3_poddar"); $programs = scraperwiki::select("distinct progId from sr_p3_poddar.swdata"); print '<?xml version="1.0" encoding="UTF-8"?>'; ?> <opml version="1.1"> <head> <title>Podsändningar i P3</title> </head> <body> <?php foreach ($programs as $program) { print '<outline text="' . $program['progId'] . '">'; $pods = scraperwiki::select("* from sr_p3_poddar.swdata where progId=" . $program['progId'] . " limit 5"); foreach ($pods as $pod) { print '<outline text="' . $pod['description'] . '" URL="' . $pod['url'] . '" type="audio" />'; } print '</outline>'; } ?> </body> </opml>
<?php # Blank PHP /* // för varje aktie // för varje mäklare // för varje rek, starta en månad tillbaka // jämför rek med kurs när nästa rek inträffar (endast vid ändring av rek), eller nuvarande kurs om endast en rek */ require 'scraperwiki/simple_html_dom.php'; $stocks = array("AAK - AarhusKarlshamn" => "SSE36273", "ABB - ABB Ltd" => "SSE3966", "ABB U - ABB Ltd U" => "SSE81849", "ACAN B - Acando B" => "SSE981", "ACAP A - ACAP Invest A" => "SSE20016", "ACAP B - ACAP Invest B" => "SSE20017", "ACOM - A-Com" => "SSE4287", "ACTI - Active Biotech" => "SSE877", "ANOD B - Addnode B" => "SSE3887", "ADDT B - Addtech B" => "SSE14336", "AERO B - Aerocrine B" => "SSE41047", "ALFA - Alfa Laval" => "SSE18634", "ALNX - Allenex" => "SSE37656", "AOIL SDB - Alliance Oil Company SDB" => "SSE40583", "ATEL - AllTele" => "SSE66668", "ANOT - Anoto Group" => "SSE5110", "AWP - Arise Windpower" => "SSE74570", "ARTI B - Artimplant B" => "SSE953", "ASP - Aspiro" => "SSE13291", "ASSA B - ASSA ABLOY B" => "SSE402", "AZN - AstraZeneca" => "SSE3524", "ATCO A - Atlas Copco A" => "SSE45", "ATCO B - Atlas Copco B" => "SSE46", "LJGR B - Atrium Ljungberg B" => "SSE1011", "ALIV SDB - Autoliv SDB" => "SSE47", "AZA - Avanza Bank Holding" => "SSE988", "AVEG B - Avega Group B" => "SSE43396", "AXFO - Axfood" => "SSE4590", "AXIS - Axis" => "SSE5150", "BBTO B - B&B TOOLS B" => "SSE793", "BEGR - BE Group" => "SSE37309", "BEIA B - Beijer Alma B" => "SSE875", "BEIJ B - Beijer B" => "SSE792", "BELE - Beijer Electronics" => "SSE5081", "BRG B - Bergs Timber B" => "SSE891", "BETS B - Betsson B" => "SSE5121", "BILI A - Bilia A" => "SSE794", "BILL - Billerud" => "SSE14922", "BIOG B - BioGaia B" => "SSE959", "BINV - BioInvent International" => "SSE13348", "BIOT - Biotage" => "SSE5209", "BORG - Björn Borg" => "SSE40286", "BEF SDB - Black Earth Farming SDB" => "SSE66922", "BOL - Boliden" => "SSE15285", "BONG - Bong" => "SSE892", "BOUL - Boule Diagnostics" => "SSE82889", "BRIN B - Brinova Fastigheter B" => "SSE22922", "BTS B - BTS Group B" => "SSE13288", "BURE - Bure Equity" => "SSE800", "BMAX - Byggmax Group" => "SSE75712", "CAST - Castellum" => "SSE966", "CATE - Catena" => "SSE34362", "CCC - Cavotec" => "SSE84962", "CDON - CDON Group" => "SSE79303", "CEVI - CellaVision" => "SSE40679", "CSN - Cision" => "SSE1056", "CLAS B - Clas Ohlson B" => "SSE4145", "CLA B - Cloetta B" => "SSE63225", "COA - Coastal Contacts" => "SSE70690", "COIC - Concentric" => "SSE82656", "CCOR B - Concordia Maritime B" => "SSE971", "CNTA - Connecta" => "SSE29954", "CONS B - Consilium B" => "SSE803", "CORE - Corem Property Group" => "SSE66929", "CORE PREF - Corem Property Group Pref" => "SSE74282", "CTT - CTT Systems" => "SSE3081", "CYBE - Cybercom Group" => "SSE4345", "DAG - Dagon" => "SSE19210", "DEDI - Dedicare B" => "SSE81878", "DGC - DGC One" => "SSE56154", "DIAM B - Diamyd Medical B" => "SSE18765", "DIOS - Diös Fastigheter" => "SSE34271", "DORO - DORO" => "SSE896", "DUNI - Duni" => "SSE49775", "DURC B - Duroc B" => "SSE4005", "ECEX - East Capital Explorer" => "SSE49615", "ELAN B - Elanders B" => "SSE897", "ELEC - Electra Gruppen" => "SSE66436", "ELUX A - Electrolux A" => "SSE80", "ELUX B - Electrolux B" => "SSE81", "EKTA B - Elekta B" => "SSE806", "ELOS B - Elos B" => "SSE947", "ENEA - Enea" => "SSE1149", "ENRO - Eniro" => "SSE11046", "ENQ - EnQuest PLC" => "SSE75073", "EPCT - EpiCept" => "SSE32838", "ERIC A - Ericsson A" => "SSE100", "ERIC B - Ericsson B" => "SSE101", "ETX - Etrion" => "SSE78547", "EWRK - eWork Scandinavia" => "SSE72798", "FABG - Fabege" => "SSE861", "F*G - Fagerhult" => "SSE903", "FPAR - Fast Partner" => "SSE980", "BALD B - Fast. Balder B" => "SSE4212", "BALD PREF - Fast. Balder pref" => "SSE82823", "FEEL - Feelgood Svenska" => "SSE5053", "FIX B - Fenix Outdoor B" => "SSE905", "FING B - Fingerprint Cards B" => "SSE4870", "FBAB - FinnvedenBulten" => "SSE82239", "FPIP - FormPipe Software" => "SSE72565", "GETI B - Getinge B" => "SSE812", "GVKO B - Geveko B" => "SSE813", "GHP - Global Health Partner" => "SSE59064", "GUNN - Gunnebo" => "SSE816", "HAKN - Hakon Invest" => "SSE32443", "HLDX - Haldex" => "SSE817", "HAV B - Havsfrun Investment B" => "SSE990", "HEBA B - HEBA B" => "SSE991", "HEMX - Hemtex" => "SSE31293", "HM B - Hennes & Mauritz B" => "SSE992", "HEXA B - Hexagon B" => "SSE819", "HPOL B - HEXPOL B" => "SSE55907", "HIQ - HiQ International" => "SSE3540", "HMS - HMS Networks" => "SSE43302", "HOLM A - Holmen A" => "SSE221", "HOLM B - Holmen B" => "SSE222", "HUFV A - Hufvudstaden A" => "SSE820", "HUFV C - Hufvudstaden C" => "SSE821", "HUSQ A - Husqvarna A" => "SSE34913", "HUSQ B - Husqvarna B" => "SSE34915", "HOGA B - Höganäs B" => "SSE824", "IAR B - I.A.R Systems Group" => "SSE2346", "IS - Image Systems" => "SSE3571", "IFS A - Industrial & Financial Syst. A" => "SSE994", "IFS B - Industrial & Financial Syst. B" => "SSE995", "INDU A - Industrivärden A" => "SSE142", "INDU C - Industrivärden C" => "SSE143", "INDT - Indutrade" => "SSE31308", "ICTA B - Intellecta B" => "SSE941", "IJ - Intrum Justitia" => "SSE18962", "INVE A - Investor A" => "SSE160", "INVE B - Investor B" => "SSE161", "ITAB B - ITAB Shop Concept B" => "SSE56940", "JEEV - Jeeves Information Systems" => "SSE3555", "JM - JM" => "SSE13217", "KABE B - KABE B" => "SSE912", "KAHL - KappAhl" => "SSE33359", "KARO - Karo Bio" => "SSE3927", "KDEV - Karolinska Development B" => "SSE81547", "KINV A - Kinnevik A" => "SSE998", "KINV B - Kinnevik B" => "SSE999", "KLOV - Klövern" => "SSE19459", "KLOV PREF - Klövern pref" => "SSE86345", "KNOW - Know IT" => "SSE3219", "KLED - Kungsleden" => "SSE3546", "LAGR B - Lagercrantz Group B" => "SSE14335", "LAMM B - Lammhults Design Group B" => "SSE1049", "LATO B - Latour B" => "SSE914", "LIAB - Lindab International" => "SSE37400", "LOOM B - Loomis B" => "SSE61536", "LUND B - Lundbergföretagen B" => "SSE1012", "LUMI SDB - Lundin Mining Corporation SDB" => "SSE27709", "LUPE - Lundin Petroleum" => "SSE22335", "LUXO SDB - Luxonen SDB" => "SSE1014", "MEAB B - Malmbergs Elektriska B" => "SSE3223", "MEDA A - Meda A" => "SSE917", "MVIR B - Medivir B" => "SSE1020", "MEKO - Mekonomen" => "SSE4986", "MELK - Melker Schörling" => "SSE37472", "MTRO SDB A - Metro International SDB A" => "SSE12429", "MTRO SDB B - Metro International SDB B" => "SSE12430", "MSAB B - Micro Systemation B" => "SSE85846", "MICR - Micronic Mydata AB" => "SSE4714", "MSON A - Midsona A" => "SSE3921", "MSON B - Midsona B" => "SSE3922", "MIDW A - Midway A" => "SSE834", "MIDW B - Midway B" => "SSE835", "MIC SDB - Millicom Int. Cellular SDB" => "SSE24507", "MOB - Moberg Derma" => "SSE79252", "MTG A - Modern Times Group A" => "SSE3598", "MTG B - Modern Times Group B" => "SSE3599", "MORP B - Morphic Technologies B" => "SSE53228", "MQ - MQ Holding" => "SSE76085", "MSC B - MSC Konsult B" => "SSE1023", "MULQ - MultiQ International" => "SSE4359", "NAXS - NAXS Nordic Access Buyout Fund" => "SSE40342", "NCC A - NCC A" => "SSE837", "NCC B - NCC B" => "SSE838", "NMAN - Nederman Holding" => "SSE40347", "NET B - Net Entertainment NE B" => "SSE62494", "NETI B - Net Insight B" => "SSE3871", "NEWA B - New Wave B" => "SSE920", "NIBE B - NIBE Industrier B" => "SSE921", "NOBI - Nobia" => "SSE19095", "NOLA B - Nolato B" => "SSE923", "NDA SEK - Nordea Bank" => "SSE220", "NOMI - Nordic Mines" => "SSE57018", "NSP B - Nordic Service Partn. Holdings" => "SSE51621", "NN B - Nordnet B" => "SSE4872", "NOTE - NOTE" => "SSE25319", "NOVE - Novestra" => "SSE5116", "NTEK B - NOVOTEK B" => "SSE4000", "OASM - Oasmia Pharmaceutical" => "SSE76461", "ODD - Odd Molly International" => "SSE40936", "OEM B - OEM International B" => "SSE927", "OPCO - Opcon" => "SSE2282", "ORX - Orexo" => "SSE31885", "ORI SDB - Oriflame, SDB" => "SSE24227", "ORTI A - Ortivus A" => "SSE1031", "ORTI B - Ortivus B" => "SSE1032", "PAR - PA Resources" => "SSE34961", "PART - PartnerTech" => "SSE1036", "PEAB B - Peab B" => "SSE928", "PHON - Phonera" => "SSE5000", "POOL B - Poolia B" => "SSE3974", "PREC - Precise Biometrics" => "SSE10751", "PREV B - Prevas B" => "SSE1039", "PRIC B - Pricer B" => "SSE1040", "PACT - Proact IT Group" => "SSE4003", "PROB - Probi" => "SSE27701", "PROE B - Proffice B" => "SSE4208", "PROF B - Profilgruppen B" => "SSE929", "RATO A - Ratos A" => "SSE1044", "RATO B - Ratos B" => "SSE1045", "RAY B - RaySearch Laboratories B" => "SSE1063", "RSOF B - ReadSoft B" => "SSE3967", "RABT B - Rederi AB Transatlantic" => "SSE964", "REJL B - Rejlerkoncernen" => "SSE37758", "REZT - Rezidor Hotel Group" => "SSE37352", "RNBS - RNB RETAIL AND BRANDS" => "SSE13467", "RROS - Rottneros" => "SSE930", "RTIM B - Rörvik Timber B" => "SSE1050", "SAAB B - SAAB B" => "SSE1051", "SAGA - Sagax" => "SSE43045", "SAGA PREF - Sagax pref" => "SSE43046", "SAND - Sandvik" => "SSE4928", "SAS - SAS" => "SSE13557", "SCA A - SCA A" => "SSE322", "SCA B - SCA B" => "SSE323", "SCV A - SCANIA A" => "SSE260", "SCV B - SCANIA B" => "SSE261", "SEB A - SEB A" => "SSE281", "SEB C - SEB C" => "SSE282", "SECT B - SECTRA B" => "SSE3083", "SECU B - Securitas B" => "SSE401", "SMF - Semafo" => "SSE84981", "SEMC - Semcon" => "SSE1054", "SENS - Sensys Traffic" => "SSE12241", "SIGM B - Sigma B" => "SSE14531", "SINT - SinterCast" => "SSE1058", "SKA B - Skanska B" => "SSE283", "SKF A - SKF A" => "SSE284", "SKF B - SKF B" => "SSE285", "SKIS B - SkiStar B" => "SSE939", "SOF B - Softronic B" => "SSE1546", "SSAB A - SSAB A" => "SSE300", "SSAB B - SSAB B" => "SSE301", "STFY - StjärnaFyrkant AB" => "SSE1007", "STE A - Stora Enso A" => "SSE2169", "STE R - Stora Enso R" => "SSE2170", "SVIK - Studsvik" => "SSE13094", "SHB A - Sv. Handelsbanken A" => "SSE340", "SHB B - Sv. Handelsbanken B" => "SSE341", "SWEC A - SWECO A" => "SSE1061", "SWEC B - SWECO B" => "SSE1062", "SWED A - Swedbank A" => "SSE120", "SWED PREF - Swedbank pref" => "SSE61365", "SVED B - Svedbergs B" => "SSE935", "SWMA - Swedish Match" => "SSE361", "SOBI - Swedish Orphan Biovitrum" => "SSE36316", "SWOL B - Swedol B" => "SSE55913", "SVOL A - Svolder A" => "SSE936", "SVOL B - Svolder B" => "SSE937", "SYSR - Systemair" => "SSE43007", "TEL2 A - Tele2 A" => "SSE1026", "TEL2 B - Tele2 B" => "SSE1027", "TLSN - TeliaSonera" => "SSE5095", "TIEN - Tieto Oyj" => "SSE4025", "TRAC B - Traction B" => "SSE4963", "TRAD - TradeDoubler" => "SSE31884", "TWW SDB A - Transcom WorldWide SDB A" => "SSE14353", "TWW SDB B - Transcom WorldWide SDB B" => "SSE14354", "TRMO - Transmode Holding" => "SSE82457", "TREL B - Trelleborg B" => "SSE364", "TAGR - Trigon Agri" => "SSE40543", "UNIB SDB - Unibet Group" => "SSE36950", "UFLX B - Uniflex B" => "SSE36986", "WALL B - Wallenstam B" => "SSE945", "VBG B - VBG GROUP B" => "SSE942", "VRG B - Venue Retail Group B" => "SSE946", "WIHL - Wihlborgs Fastigheter" => "SSE29759", "VIT B - Vitec Software Group B" => "SSE5177", "VITR - Vitrolife" => "SSE13469", "VOLV A - Volvo A" => "SSE365", "VOLV B - Volvo B" => "SSE366", "VNIL SDB - Vostok Nafta Investment, SDB" => "SSE41044", "XANO B - XANO Industri B" => "SSE1074", "AF B - ÅF B" => "SSE862", "ORES - Öresund" => "SSE863"); //exit(0); $sourcescraper = 'aktietips'; scraperwiki::attach($sourcescraper); $data = scraperwiki::select("stock, by, `to`, date, rec\nfrom `swdata` \nwhere date > '2011-01-01' and date < '2012-05-01' and stock = 'Volvo'\norder by date asc"); $search_maklare = array(); for ($i = 0; $i < count($data); $i++) { $maklare = $data[$i]["by"]; if (array_key_exists($maklare, $search_maklare) || $maklare == null || $maklare == "") { continue; } $stock = $data[$i]["stock"]; $mrows = getReksFromMaklare($stock, $maklare, $data); print_r("<b>" . $stock . " av " . $maklare . "</b><br />"); $search_maklare[$maklare] = true; $kurs1 = -1; $rek1 = null; $points = 0; foreach ($mrows as $row) { print_r($row["by"] . " (" . $row["date"] . "), rek: " . $row["rec"] . ", riktkurs: " . $row["to"] . "<br />");
<ev:enddate><?php echo $enddate; ?> </ev:enddate> <ev:location>Lichfield Cathedral</ev:location> <georss:point>52.685556 -1.830556</georss:point> </item> <?php } ?> </rdf:RDF><?php // Attach the data scraperwiki::attach("hhhlich-lichfield-arts-events"); // Get the data $data = scraperwiki::select("* from swdata limit 10"); //print_r($data); echo '<?xml version="1.0" encoding="utf-8"?>'; ?> <rdf:RDF xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:ev="http://purl.org/rss/1.0/modules/event/" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:georss="http://www.georss.org/georss" xmlns:creativeCommons="http://backend.userland.com/creativeCommonsRssModule" xmlns="http://purl.org/rss/1.0/"> <rdf:Description rdf:about="http://lichfieldlive.co.uk/"> <dc:title>Lichfield What's On Importer</dc:title> <dc:source rdf:resource="http://www.lichfield-cathedral.org/"/> <dc:creator>Lichfield Community Media</dc:creator>
} } return null; } # Check for pages with no usable data function fourOhFour($html) { if (strpos($html, 'Home not available', 1200) !== false) { return true; } return false; } $blacklist = array(); # get an array of the cottage data to scrape scraperwiki::attach("bluechip_summary"); $cottData = scraperwiki::select(" COTTAGE_ID, COTTAGE_URL, SLEEPS, BEDROOMS, FEATURES,COTTAGE_NAME, PRICE_LOW from 'bluechip_summary'.SWDATA order by COTTAGE_URL"); $placeholder = scraperwiki::get_var("cottURL"); if ($placeholder != "") { $index = searchForId($placeholder, $cottData); $cottData = array_splice($cottData, $index); } require 'scraperwiki/simple_html_dom.php'; $dom = new simple_html_dom(); foreach ($cottData as $value) { $highPrice = ""; $lowPrice = ""; $found = 0; $count = 0; scraperwiki::save_var("cottURL", $value['COTTAGE_URL']); // check the cottage url against the blacklist foreach ($blacklist as $blItem) {
)</li> <?php $orgs[$organization] += 1; } print "</ul>"; print "<h2>Organizations contacted</h2><ul>"; foreach ($orgs as $o => $k) { ?> <li><?php echo $o; ?> (<?php echo $k; ?> )</li><?php } print '</ul>'; $data = scraperwiki::select("count(*) AS c, subject AS s FROM contact INNER JOIN contact_subject ON contact_subject.contact_id=contact.contact_id WHERE behalf='{$w}' GROUP BY subject ORDER BY subject DESC"); print "<h2>Subjects covered</h2><ul>"; foreach ($data as $row) { extract($row); ?> <li><?php echo $s; ?> (<?php echo $c; ?> )</li><?php } print "</ul>";
function validateEntry($id) { $result = false; // Set total number of rows try { $recordSet = scraperwiki::select("* from data where id ='" . $id . "'"); if (!empty($recordSet[0]['id'])) { if ($recordSet[0]['surname'] != "") { $result = true; } if ($recordSet[0]['firstname'] != "") { $result = true; } if ($recordSet[0]['fathername'] != "") { $result = true; } } } catch (Exception $e) { } return $result; }
print_r("start"); scraperwiki::attach("appcelerator_devlink"); // Bootstrap variables if (!scraperwiki::table_info($name = "swvariables")) { scraperwiki::save_var('last_page', 0); } $lastPage = scraperwiki::get_var('last_page'); if ($lastPage > 0) { $offset = " OFFSET " . $lastPage; $counter = $lastPage; } else { $offset = ""; $counter = 0; } print_r($offset); $data = scraperwiki::select("* from appcelerator_devlink.swdata LIMIT 1500" . $offset); foreach ($data as $row) { $OBJ = array('id' => $row['id'], 'name' => $row['name'], 'company' => $row['company'], 'location' => $row['location'], 'date' => $row['date'], 'url' => $row["url"], 'profile' => $row["profile"], 'twitter' => '', 'klout' => '', 'linkedIn' => '', 'certifications' => ''); // Clean Links $links = json_decode($row['links']); foreach ($links as $link) { if ($link->label == 'Twitter:') { // Twitter $twitter = str_replace("@", "", $link->data); $twitter = preg_replace("/\\s+/", "", $twitter); $twitter = str_replace("Twitter:", "", $twitter); $OBJ['twitter'] = $twitter; // Klout (based on Twitter) $klout = scraperWiki::scrape('http://api.klout.com/v2/identity.json/twitter?screenName=' . $twitter . '&key=v23b2ddvdf8n5fvap95kk56r'); $klout = json_decode($klout); $klout = scraperWiki::scrape('http://api.klout.com/v2/user.json/' . $klout->id . '?key=v23b2ddvdf8n5fvap95kk56r');
<style type="text/css"> body { margin: 0; padding: 0; font:0.8em/1.5em "Lucida Grande", "Lucida Sans Unicode", Helvetica, Arial, sans-serif; } </style> </head> <body> <div id="map_canvas" style="width: 100%; height: 100%;"></div> </body> </html> <?php scraperwiki::attach("walsall_warwickshire_food_safety_inspections"); $data = scraperwiki::select("* from walsall_warwickshire_food_safety_inspections.swdata"); foreach ($data as $data) { if (strlen($data['latlng_lng']) > 0) { $markers[] = "['<h3>" . addslashes(trim($data['name'])) . "</h3><p>" . $data['rating'] . " stars</p>'," . $data['latlng_lat'] . "," . $data['latlng_lng'] . "]"; } } $markers = implode(",", $markers); ?> <html> <head> <script type="text/javascript" src="http://maps.google.com/maps/api/js?sensor=false"></script> <script src="http://ajax.googleapis.com/ajax/libs/jquery/1.5.2/jquery.min.js" type="text/javascript"></script> <script type="text/javascript"> jQuery.noConflict(); jQuery(document).ready(function($){
<?php # Blank PHP //scraperwiki::sqliteexecute("DROP TABLE last_update"); scraperwiki::sqliteexecute("CREATE TABLE IF NOT EXISTS ll (\n id INTEGER PRIMARY KEY ASC,\n HITs_available int,\n jobs int\n )"); scraperwiki::sqliteexecute("CREATE TABLE IF NOT EXISTS ll (\n id INTEGER PRIMARY KEY ASC,\n HITs_available int,\n jobs int\n )"); //scraperwiki::sqliteexecute('update ll set jobs = 100 AND (set jobs = 200)'); scraperwiki::sqliteexecute('INSERT INTO ll (jobs) VALUES (100), (200), (300)'); scraperwiki::sqlitecommit(); // $res = scraperwiki::select('* FROM ll'); print_r($res[0]); //print_r(scraperwiki::show_tables()); //print_r(scraperwiki::table_info($name="last_update")); # Blank PHP //scraperwiki::sqliteexecute("DROP TABLE last_update"); scraperwiki::sqliteexecute("CREATE TABLE IF NOT EXISTS ll (\n id INTEGER PRIMARY KEY ASC,\n HITs_available int,\n jobs int\n )"); scraperwiki::sqliteexecute("CREATE TABLE IF NOT EXISTS ll (\n id INTEGER PRIMARY KEY ASC,\n HITs_available int,\n jobs int\n )"); //scraperwiki::sqliteexecute('update ll set jobs = 100 AND (set jobs = 200)'); scraperwiki::sqliteexecute('INSERT INTO ll (jobs) VALUES (100), (200), (300)'); scraperwiki::sqlitecommit(); // $res = scraperwiki::select('* FROM ll'); print_r($res[0]); //print_r(scraperwiki::show_tables()); //print_r(scraperwiki::table_info($name="last_update"));
$results["twitter_url"] = $url; } else { $results["website_url"] = $url; } } // There are max 3 urls we are interested in if (sizeof($results) === 4) { break; } } scraperwiki::save_sqlite(array("id"), $results, "eparlimen_social_links"); } require 'scraperwiki/simple_html_dom.php'; scraperwiki::sqliteexecute("DELETE FROM eparlimen_social_links"); scraperwiki::attach("eparlimen-constituencies", "urls"); $urls = scraperwiki::select("* FROM urls.eparlimen_constituencies_links"); foreach ($urls as $url) { $url = str_replace(",%20", "", $url["url"]); // A hack for a known bad link. This should be in the link scraper, but it's xmas and I have better things to do :) $html = scraperwiki::scrape($url); $dom = new simple_html_dom(); $dom->load($html); $node = $dom->find("ul.wrap_senarai li", 2); if (is_object($node)) { $code = $node->children(1)->plaintext; } else { echo "Unable to parse {$url}\n"; continue; } $results = array("id" => $code); foreach ($dom->find("div.detail div a") as $data) {
# Blank PHP $sourceScraper = 'testi-scraper_1'; scraperwiki::attach($sourceScraper); $data = scraperwiki::select("SELECT * FROM swdata WHERE (Minutes_late > 0)"); print_r($data); print "<table>"; print "<tr><th>Country</th><th>Years in school</th>"; foreach ($data as $d) { print "<tr>"; print "<td>" . $d["Primary_code"] . "</td>"; print "<td>" . $d["Actual_time"] . "</td>"; print "</tr>"; } print "</table>"; print "This is a <em>fragment</em> of HTML."; # Blank PHP $sourceScraper = 'testi-scraper_1'; scraperwiki::attach($sourceScraper); $data = scraperwiki::select("SELECT * FROM swdata WHERE (Minutes_late > 0)"); print_r($data); print "<table>"; print "<tr><th>Country</th><th>Years in school</th>"; foreach ($data as $d) { print "<tr>"; print "<td>" . $d["Primary_code"] . "</td>"; print "<td>" . $d["Actual_time"] . "</td>"; print "</tr>"; } print "</table>"; print "This is a <em>fragment</em> of HTML.";
//scraperwiki::save_var('last_id',55626); //55150 /*scraperwiki::sqliteexecute("delete from info where id>55652"); scraperwiki::sqlitecommit(); die();*/ //get last id //scraperwiki::save_var('last_id',0); $last_id = scraperwiki::get_var('last_id', 0); echo $last_id; //read the saved tables scraperwiki::attach("cz_senate_voting_records_downloader_2", "src"); $rows = scraperwiki::select("id from src.swdata where id>{$last_id} order by id"); if (!empty($rows)) { foreach ($rows as $html) { //get dom $dom = new simple_html_dom(); $html2 = scraperwiki::select("* from src.swdata where id={$html['id']}"); $dom->load(str_replace(" ", " ", $html2[0]['html'])); //common part $div = $dom->find("div[class=wysiwyg]", 0); //info $h1 = $div->find('h1', 0); preg_match('/([0-9]{1,}). schůze/', $h1->innertext, $matches); $schuze = $matches[1]; preg_match('/([0-9]{1,}). hlasování/', $h1->innertext, $matches); $hlasovani = $matches[1]; preg_match('/([0-9]{2}).([0-9]{2}).([0-9]{4})/', $h1->innertext, $matches); $date = implode('-', array($matches[3], $matches[2], $matches[1])); $p = $div->find('p[class=openingText]', 0); $p_ar = explode('<br />', $p->innertext); $name = $p_ar[0]; if (isset($p_ar[1])) {
if (mb_strlen($noidung) > 1000) { $j++; @scraperwiki::save_sqlite(array('id'), array('id' => $j . '-' . $src[0]['url'], 'title' => $src[0]['title'], 'url' => $src[0]['url'], 'content' => base64_encode($noidung), 'order' => $j, 'num' => $src[0]['num'], 'reply' => $src[0]['reply'])); } } $html->clear(); unset($html); scraperwiki::save_var('last_id', $i); } require 'scraperwiki/simple_html_dom.php'; scraperwiki::attach("s-in-s", "src"); //scraperwiki::save_var('last_id', 1); //exit(); $id = scraperwiki::get_var('last_id'); for ($i = $id; $i < 1900; $i++) { $src = scraperwiki::select("* from src.swdata limit {$i},1"); $url = $src[0]['link']; $url = 'http://sexinsex.net/bbs/' . $url; $html_content = scraperwiki::scrape($url); $html = str_get_html($html_content); $data = array(); $tr = $html->find("div.postmessage div.t_msgfont"); $j = 0; foreach ($tr as $trr) { $noidung = $trr->find('div', 0)->innertext; //$noidung = utf8_encode($noidung); if (mb_strlen($noidung) > 1000) { $j++; @scraperwiki::save_sqlite(array('id'), array('id' => $j . '-' . $src[0]['url'], 'title' => $src[0]['title'], 'url' => $src[0]['url'], 'content' => base64_encode($noidung), 'order' => $j, 'num' => $src[0]['num'], 'reply' => $src[0]['reply'])); } }
<?php require 'scraperwiki.php'; require 'scraperwiki/simple_html_dom.php'; $startProductId = scraperwiki::get_var("currentId", -1); if ($startProductId == -1) { print "No previous saved position found. Starting from scratch."; } else { print "Resuming from product id {$startProductId}\n"; } scraperwiki::attach("hobbyking_batteryidlist"); $batteries = scraperwiki::select("id from hobbyking_batteryidlist.data where id > {$startProductId} order by id asc"); $remainingCount = count($batteries); print "Found {$remainingCount} batteries left to be scraped."; $maxPerRun = 100; $loopCount = 0; foreach ($batteries as $bat) { if ($loopCount > $maxPerRun) { print "Ending run after {$maxPerRun} iterations."; break; } $productId = $bat['id']; print "Retrieving " . $productId . "\n"; $html = scraperWiki::scrape("http://www.hobbyking.com/hobbyking/store/uh_viewItem.asp?idProduct={$productId}"); //print $html . "\n"; $dom = new simple_html_dom(); $dom->load($html); // Get the product data (located in a span tag). Should only be one product data area! $productDataAreasDom = $dom->find("SPAN[id=prodDataArea]"); $productDataDom = $productDataAreasDom[0]; //print $productData . "\n";
$price = intval(preg_replace('[\\D]', '', $eg)); // scrape price as integer $mileage = $el->find("span.mileage", 0)->innertext; $mileage = intval(preg_replace('[\\D]', '', $mileage)); // scrape milage as integer $distance = $el->find("span.distanceAmount", 0)->innertext; $distance = intval(preg_replace('[\\D]', '', $distance)); // scrape distance as integer $url = $el->find("h2 a", 0)->href; // scrape url //$url = $el->find("div.vehicleTitle a",0 ); // scrape url ->href; $advertType = $el->find("span.advertType", 0)->innertext; $advertcount = scraperwiki::select("count(*) as count from swdata where guid=" . $advertReference); if ($advertcount[0]['count'] == 1) { print 'existing record'; $createarray = scraperwiki::select("created as created from swdata where guid=" . $advertReference); $created = $createarray[0]['created']; } else { print 'new record'; $created = date("Ymd"); } $record_counter++; $description = $el->find("div.searchResultMainText", 0)->innertext; // if (strpos($description,"Leather")==0 ) break ; print $record_counter . "\t" . $advertReference . "\t" . $model . "\t" . $type . "\t" . $year . "\t" . $age . "\t" . $distance . "\t" . $mileage . "\t" . $price . "\t" . $advertType . "\t" . $url . "\n"; // if ($type=="GS") break; //dont save crappy GS models // if (strpos($model,"ommercial")>0 ) break ; //no commercials allowed // if (strpos($model,"OMMERCIAL")>0 ) break ; //no commercials allowed //if (strpos($description," Cloth ")>0 ) break ; //dont save cloth seats // if ($type=="HSE") break; // if ($type=="SE") break;
function debug_table($schema, $tname, $showSchema = FALSE) { global $DEBUG_TABLES; if (!$DEBUG_TABLES) { return; } $tablename = $schema . "." . $tname; $num = scraperwiki::select("count(*) AS n FROM " . $tablename); print "{$tablename} size: " . $num[0]['n'] . " rows.\n"; if ($showSchema) { print "{$tablename} schema: "; $info = scraperwiki::table_info($tablename); #debug_array($info, "Table_info($tablename)"); foreach ($info as $column) { print $column['name'] . "(" . $column['type'] . "); "; } print "\n"; } }
foreach ($all_by_area as $item) { echo '<tr><td>' . $item['name'] . '</td><td>' . $item['area'] . '</td></tr>' . "\n"; } echo '</table>'; echo '<h2>Landkreise nach Bevölkerungsdichte (Einwohner/km<sup>2</sup>)</h2>'; echo '<table>'; foreach ($all_by_density as $item) { echo '<tr><td>' . $item['name'] . '</td><td>' . $item['inhab_density'] . '</td></tr>' . "\n"; } echo '</table>'; $sourcescraper = 'german-landkreise'; scraperwiki::attach($sourcescraper); $grouped_by_state = scraperwiki::select("state, COUNT(*) AS num FROM swdata GROUP BY state"); $all_by_population = scraperwiki::select("name, inhabitants FROM swdata ORDER BY inhabitants DESC"); $all_by_area = scraperwiki::select("name, CAST(area AS NUMERIC) AS area FROM swdata ORDER BY area DESC"); $all_by_density = scraperwiki::select("name, inhab_density FROM swdata ORDER BY inhab_density DESC"); echo '<h2>Landkreise nach Bundesland</h2>'; echo '<table>'; foreach ($grouped_by_state as $item) { echo '<tr><td>' . $item['state'] . '</td><td>' . $item['num'] . '</td></tr>' . "\n"; } echo '</table>'; echo '<h2>Landkreise nach Einwohnerzahl</h2>'; echo '<table>'; foreach ($all_by_population as $item) { echo '<tr><td>' . $item['name'] . '</td><td>' . $item['inhabitants'] . '</td></tr>' . "\n"; } echo '</table>'; echo '<h2>Landkreise nach Fläche (km<sup>2</sup>)</h2>'; echo '<table>'; foreach ($all_by_area as $item) {
$vevents = scraperwiki::select("* from ons_release_schedule_ical.vevents limit {$limit} offset {$offset}"); $icalevents = array(); foreach ($vevents as $vevent) { $icalevent = "BEGIN:VEVENT\nDTSTAMP" . $vevent["DTSTAMP"] . "\nDTSTART" . $vevent["DTSTART"] . "\nX-TITLE:" . $vevent["Title"] . "\nSUMMARY:" . $vevent["Summary"] . "\nDESCRIPTION:Theme: " . $vevent["Theme"] . "\n" . $vevent["Summary"] . "\\nEND:VEVENT\n"; #print_r($icalevent); $icalevents[] = $icalevent; } print "BEGIN:VCALENDAR\nMETHOD:PUBLISH\nVERSION:2.0\nX-WR-CALNAME:ONS Release Calendar\nPRODID:-//Apple Inc.//iCal 4.0.4//EN\nX-APPLE-CALENDAR-COLOR:#B027AE\nX-WR-TIMEZONE:Europe/London\nCALSCALE:GREGORIAN\n" . implode("", $icalevents) . "\nEND:VCALENDAR"; // Derive an ical string of 10 eventss $querylist = explode("&", getenv("QUERY_STRING")); $limit = 10; $offset = 0; foreach ($querylist as $queryl) { $ql = explode("=", $queryl); if ($ql[0] == "limit" && count($ql) == 2) { $limit = intval($ql[1]); } if ($ql[0] == "offset" && count($ql) == 2) { $offset = intval($ql[1]); } } scraperwiki::httpresponseheader("Content-Type", "text/plain"); scraperwiki::attach('ons_release_schedule_ical'); $vevents = scraperwiki::select("* from ons_release_schedule_ical.vevents limit {$limit} offset {$offset}"); $icalevents = array(); foreach ($vevents as $vevent) { $icalevent = "BEGIN:VEVENT\nDTSTAMP" . $vevent["DTSTAMP"] . "\nDTSTART" . $vevent["DTSTART"] . "\nX-TITLE:" . $vevent["Title"] . "\nSUMMARY:" . $vevent["Summary"] . "\nDESCRIPTION:Theme: " . $vevent["Theme"] . "\n" . $vevent["Summary"] . "\\nEND:VEVENT\n"; #print_r($icalevent); $icalevents[] = $icalevent; } print "BEGIN:VCALENDAR\nMETHOD:PUBLISH\nVERSION:2.0\nX-WR-CALNAME:ONS Release Calendar\nPRODID:-//Apple Inc.//iCal 4.0.4//EN\nX-APPLE-CALENDAR-COLOR:#B027AE\nX-WR-TIMEZONE:Europe/London\nCALSCALE:GREGORIAN\n" . implode("", $icalevents) . "\nEND:VCALENDAR";
function file_get_contents_curl($url) { $ch = curl_init(); curl_setopt($ch, CURLOPT_HEADER, 0); curl_setopt($ch, CURLOPT_RETURNTRANSFER, 1); //Set curl to return the data instead of printing it to the browser. curl_setopt($ch, "http://" + CURLOPT_URL, $url); $data = curl_exec($ch); curl_close($ch); return $data; } foreach ($scraper as $scr) { scraperwiki::attach($scr); $qry = "* from " . $scr . ".swdata"; //echo $qry; $arr = scraperwiki::select($qry); // print_r($arr); foreach ($arr as $d) { // print $d["key"]; // print $d["site"]; $pr = (int) getPagerank($d["url"]); if (1) { $d_key = $d["key"]; $d_site = $d["site"]; //print_r($d["url"]." PR is ". (string)$pr ." site is ".$d_site); // ." key is " . $d_key); $record = array('url' => utf8_encode($d["url"]), 'pr' => utf8_encode($pr), 'ar' => utf8_encode($d["rank"]), 'id' => utf8_encode($d_key), 'desc' => $d["site"]); #print_r($record); scraperwiki::save_sqlite(array("id"), $record, "prank"); } } }
<?php // .. CREACION DEL ARRAY foreach ($data as $item) { echo " <item>\n"; echo " <title>" . $item['artist'] . " - " . $item['title'] . "</title>\n"; echo " <enclosure url=\"" . $item['url'] . "\" type=\"audio/mpeg\" />\n"; echo " <guid>" . $item['loved_count'] . "</guid>\n"; echo " </item>\n"; } ?> </channel> </rss><?php scraperwiki::httpresponseheader('Content-Type', 'application/atom+xml'); scraperwiki::attach("exfm"); $data = scraperwiki::select("* from exfm.swdata"); ?> <rss xmlns:itunes="http://www.itunes.com/dtds/podcast-1.0.dtd" version="2.0"> <channel> <title>TuMusika Evolution Podcast</title> <link>http://www.tumusika.net/</link> <language>es-es</language> <itunes:owner> <itunes:name>TuMusika Evolution</itunes:name> <itunes:email>darkgiank@darkgiank.com</itunes:email> </itunes:owner> <?php // .. CREACION DEL ARRAY foreach ($data as $item) {