<?php include_once 'feed/json_common.php'; /* * Echo out the data */ $obj = get_json_data('soundcloud'); if (count($obj) <= 0 || !$obj[0]->user_id) { exit; } echo '<table class="table table-striped"><th><h2 class="text-center">LMMS SoundCloud</h2></th>'; foreach ($obj as $item) { create_row('soundcloud', $item->title, "javascript:embedSound('#div-" . $item->id . "','" . $item->id . "')", trim_feed($item->description, $item->permalink_url), $item->user->username, $item->user->permalink_url, $item->created_at, $item->artwork_url ? $item->artwork_url : $item->user->avatar_url, 'div-' . $item->id); } echo '</table>';
<?php include_once 'feed/json_common.php'; /* * Maximum number of displayed items */ $max = 20; /* * Creates an array of relational JSON objects from cached or online GitHub data */ $obj = get_json_data('github', 'issues', '?state=open'); /* * Loop through items and echo */ $count = 0; /* * Echo our data to the page */ echo '<table class="table table-striped"><th><h2 class="text-center">LMMS GitHub</h2></th>'; foreach ($obj as $item) { $title = 'GitHub #' . $item->number . ' • ' . $item->title; create_row('github', $title, $item->html_url, trim_feed($item->body, $item->html_url), $item->user->login, $item->user->html_url, $item->created_at); if ($count++ == $max) { break; } } echo '</table>';
<?php include_once 'feed/json_common.php'; /* * Echo out the data */ $obj = get_json_data('google', 'activities', '?maxResults=25'); echo '<table class="table table-striped"><th><h2 class="text-center">LMMS Google+</h2></th>'; // Sort on publish date usort($obj->items, function ($a, $b) { return strtotime($a->published) > strtotime($b->published) ? -1 : 1; }); foreach ($obj as $items) { if (!is_array($items) || count($items) < 1) { continue; } $duplicates = array(); foreach ($items as $item) { // Google+ seems to have an abundance of duplicates in their feed (likely historical edits) // This is a quick hack to check for duplicates based on title name if (array_key_exists($item->title, $duplicates)) { continue; } $duplicates[$item->title] = true; create_row('google+', $item->title, $item->url, trim_feed($item->object->content, $item->url), $item->actor->displayName, $item->actor->url, $item->published); } } echo '</table>';
*/ $videos_url = 'https://www.youtube.com/user/LMMSOfficial/videos'; $obj = get_json_data('youtube', 'activities', '&part=snippet&maxResults=25'); echo '<table class="table table-striped"><th><h2 class="text-center">LMMS YouTube</h2></th>'; foreach ($obj as $items) { if (!is_array($items) || count($items) < 1) { continue; } $duplicates = array(); foreach ($items as $item) { $item = $item->snippet; // YouTube seems to have some duplicates in their feed (likely historical edits) // This is a quick hack to check for duplicates based on title name if (array_key_exists($item->title, $duplicates)) { continue; } $duplicates[$item->title] = true; $id = parse_youtube_id($item->thumbnails->default->url); $url = $id ? 'http://www.youtube.com/watch?v=' . $id : ''; create_row('youtube', $item->title, "javascript:embedVideo('#div-" . $id . "','" . $id . "')", trim_feed($item->description, $url), $item->channelTitle, $videos_url, $item->publishedAt, $item->thumbnails->default->url, 'div-' . $id); } } echo '</table>'; // The feed doesn't give us a clean URL to the video // So we parse it from the thumbnail image URL function parse_youtube_id($thumbnail) { $arr = explode('/', $thumbnail); $i = count($arr) - 2; return $i > 0 ? $arr[$i] : ''; }