/** Function to get company quotes from external site
 * @param $var -- var:: Type string(company trickersymbol)
 * @returns $quote_data -- quote_data:: Type string array
 *
 */
function getQuoteData($var)
{
    $url = "http://finance.yahoo.com/q?s=" . $var;
    $h = new http();
    $h->dir = "class_http_dir/";
    if (!$h->fetch($url, 2)) {
        echo "<h2>There is a problem with the http request!</h2>";
        echo $h->log;
        exit;
    }
    $res_arr = array();
    $quote_data = http::table_into_array($h->body, 'Delayed quote data', 0, null);
    if (is_array($quote_data)) {
        array_shift($quote_data);
        array_shift($quote_data);
        if ($quote_data[0][0] != 'Last Trade:') {
            array_shift($quote_data);
        }
    } else {
        die;
    }
    for ($i = 0; $i < 16; $i++) {
        if ($quote_data != '') {
            $res_arr[] = $quote_data[$i];
        }
    }
    return $res_arr;
}
 function table_into_xml($rawHTML, $needle = "", $needle_within = 0, $allowedTags = "")
 {
     if (!($aryTable = http::table_into_array($rawHTML, $needle, $needle_within, $allowedTags))) {
         return false;
     }
     $xml = "<?xml version=\"1.0\" standalone=\"yes\" \\?\\>\n";
     $xml .= "<TABLE>\n";
     $rowIdx = 0;
     foreach ($aryTable as $row) {
         $xml .= "\t<ROW id=\"" . $rowIdx . "\">\n";
         $colIdx = 0;
         foreach ($row as $col) {
             $xml .= "\t\t<COL id=\"" . $colIdx . "\">" . trim(utf8_encode(htmlspecialchars($col))) . "</COL>\n";
             $colIdx++;
         }
         $xml .= "\t</ROW>\n";
         $rowIdx++;
     }
     $xml .= "</TABLE>";
     return $xml;
 }
Beispiel #3
0
}
/* Echo out the body content fetched from the url. */
echo $h->body;
/*
Extract a specific table of data out of scraped content. The class
comes with 2 static methods you can use for this purpose.
  table_into_array() will rip a single table into an array.
  table_into_xml() will internally call table_into_array() then create an 
  XML document from the array. I thought this would be cool, but in practice,
  I've never used this method since the array is so easy to work with.

This example builds on the previous example to extract the MSFT stats out
of the body content. Read the comments in the class file to learn how to use
this static method.
*/
$msft_stats = http::table_into_array($h->body, "Avg Daily Volume", 1, null);
/* Print out the array so you can see the stats data. */
echo "<pre>";
print_r($msft_stats);
echo "</pre>";
/*
Scraping content that is username/password protected. The class can do basic
authentication. Pass your username and password in like this:
*/
$url = "http://someprivatesite.net";
$h->fetch($url, 0, null, "MyUserName", "MyPassword");
/*
If your need to access content on a port other than 80, just put the port in
the URL in the standard way:
*/
$h->fetch("http://somedomain.org:8088");