public function load(AbstractMetaModel $environment_metamodel, array $filters = NULL) { LogHelper::log_notice(t('Loading Environment Meta Model from GovDashboard Content Types ...')); // Note we do not apply filters because we do not have any // if we want to use the filters we would need to prepare list of data source names // but to prepare those name we need to load meta model. // but that is what we are trying to do in this code // Catch 22 if (isset($filters)) { throw new UnsupportedOperationException(t('Filters are not supported during data source loading')); } $datamartNodes = gd_datamart_get_datamarts(LOAD_ENTITY); // preparing data sources foreach ($datamartNodes as $datamartNode) { GD_DataMartMetaModelLoaderHelper::prepareDataSource($environment_metamodel, $datamartNode); } // finalizing the preparation foreach($datamartNodes as $datamartNode) { $datasource = GD_DataMartMetaModelLoaderHelper::getDataSourceByNodeId($environment_metamodel->datasources, $datamartNode->nid); GD_DataMartMetaModelLoaderHelper::finalizeDataSourcePreparation($environment_metamodel, $datasource); } LogHelper::log_info(t('Processed @datamartCount data mart node(s)', array('@datamartCount' => count($datamartNodes)))); }
public function load(AbstractMetaModel $environment_metamodel, array $filters = NULL) { LogHelper::log_notice(t('Generating Environment Meta Model for Drupal database connections ...')); global $databases; $datasourceCount = 0; foreach ($databases as $namespace => $connections) { foreach ($connections as $datasourceNameOnly => $connection) { $datasource = new DataSourceMetaData(); $datasource->name = NameSpaceHelper::addNameSpace($namespace, $datasourceNameOnly); $datasource->markAsPrivate(); $datasource->readonly = FALSE; // setting required properties $this->setDataSourceProperty($datasource, $connection, 'type', 'driver'); // setting other provided properties $this->setDataSourceExtensionProperties($datasource, $connection); // registering the data source $environment_metamodel->registerDataSource($datasource); $datasourceCount++; } } // Default database connection is shared because we store common utilities and dimensions there $defaultDataSource = $environment_metamodel->getDataSource(self::$DATASOURCE_NAME__DEFAULT); $defaultDataSource->shared = TRUE; LogHelper::log_info(t('Generated @datasourceCount data sources', array('@datasourceCount' => $datasourceCount))); }
public function load(AbstractMetaModel $environment_metamodel, array $filters = NULL) { LogHelper::log_notice(t('Loading Environment Meta Model from settings.php ...')); $datasourceCount = 0; $configurationDataSources = Environment::getInstance()->getConfigurationSection('Data Sources'); if (isset($configurationDataSources)) { foreach ($configurationDataSources as $namespace => $sourceDataSources) { foreach ($sourceDataSources as $datasourceName => $sourceDataSource) { $datasourceName = NameSpaceHelper::resolveNameSpace($namespace, $datasourceName); $datasource = new DataSourceMetaData(); $datasource->name = $datasourceName; $datasource->initializeFrom($sourceDataSource); // it is possible that configuration contains 'readonly' property. We need to honor it // ... and only when it is not set we mark the data source as read only if (!isset($datasource->readonly)) { $datasource->readonly = TRUE; } $environment_metamodel->registerDataSource($datasource); $datasourceCount++; } } } LogHelper::log_info(t('Processed @datasourceCount data sources', array('@datasourceCount' => $datasourceCount))); }
public function __call($methodName, $args) { $timeStart = microtime(TRUE); $result = call_user_func_array(array($this->controllerInstance, $methodName), $args); LogHelper::log_notice(t( 'Data Controller execution time for @methodName(): !executionTime', array('@methodName' => $methodName, '!executionTime' => LogHelper::formatExecutionTime($timeStart)))); return $result; }
final public function join(JoinController_SourceConfiguration $sourceConfigurationA, JoinController_SourceConfiguration $sourceConfigurationB) { $timeStart = microtime(TRUE); $result = $this->joinSourceConfigurations($sourceConfigurationA, $sourceConfigurationB); LogHelper::log_notice(t( '@className execution time: !executionTime', array('@className' => get_class($this), '!executionTime' => LogHelper::formatExecutionTime($timeStart)))); return $result; }
public function openResource() { LogHelper::log_notice(t('Parsing data from a buffer (size: @bufferSize) ...', array('@bufferSize' => $this->bufferSize))); $result = parent::openResource(); if ($result) { $this->index = 0; } return $result; }
public function load(AbstractMetaModelFactory $factory, AbstractMetaModel $environment_metamodel, array $filters = NULL, $finalAttempt) { LogHelper::log_notice(t('Generating Environment Meta Model for APC cache ...')); $datasourceCount = 0; $datasource = new DataSourceMetaData(); $datasource->name = NameSpaceHelper::addNameSpace(APCHandler::$CACHE__TYPE, DefaultCacheFactory::$DATASOURCE_NAME__DEFAULT); $datasource->type = APCHandler::$CACHE__TYPE; $environment_metamodel->registerDataSource($datasource); $datasourceCount++; LogHelper::log_info(t('Generated @datasourceCount data sources', array('@datasourceCount' => $datasourceCount))); return self::LOAD_STATE__SUCCESSFUL; }
public function openResource() { LogHelper::log_notice(t('Parsing @filename ...', array('@filename' => $this->filename))); $result = parent::openResource(); if ($result) { ini_set('auto_detect_line_endings', TRUE); $this->handle = fopen($this->filename, 'r'); $result = $this->handle !== FALSE; } return $result; }
public function load(AbstractMetaModelFactory $factory, AbstractMetaModel $environment_metamodel, array $filters = NULL) { LogHelper::log_notice(t('Generating Environment Meta Model for PHP dataset functionality ...')); $datasourceCount = 0; $datasource = new DataSourceMetaData(); $datasource->name = PHPDataSourceHandler::$DATASOURCE_NAME__DEFAULT; $datasource->type = PHPDataSourceHandler::$DATASOURCE__TYPE; $environment_metamodel->registerDataSource($datasource); $datasourceCount++; LogHelper::log_info(t('Generated @datasourceCount data sources', array('@datasourceCount' => $datasourceCount))); }
public function __construct($prefix, DataSourceMetaData $datasource) { LogHelper::log_notice(t('[@cacheType] Initializing PHP extension ...', array('@cacheType' => $this->getCacheType()))); // taking into account possible datasource's nested name space $adjustedPrefix = isset($datasource->nestedNameSpace) ? NameSpaceHelper::addNameSpace($prefix, $datasource->nestedNameSpace) : $prefix; parent::__construct($adjustedPrefix); // the optional datasource can have its own expiration schedule if (isset($datasource->entryExpiration)) { $this->entryExpiration = $datasource->entryExpiration; } if ($this->initialize($prefix, $datasource) !== FALSE) { $this->checkAccessibility(FALSE); } }
public function load(AbstractMetaModel $environment_metamodel, array $filters = NULL) { LogHelper::log_notice(t('Generating Environment Meta Model for APC cache ...')); $datasourceCount = 0; $datasource = new DataSourceMetaData(); $datasource->name = NameSpaceHelper::addNameSpace(APCHandler::CACHE__TYPE, 'default'); $datasource->type = APCHandler::CACHE__TYPE; $datasource->category = CacheFactory::$DATASOURCE__CATEGORY; $datasource->markAsPrivate(); $environment_metamodel->registerDataSource($datasource); $datasourceCount++; LogHelper::log_info(t('Generated @datasourceCount data sources', array('@datasourceCount' => $datasourceCount))); }
public function load(AbstractMetaModelFactory $factory, AbstractMetaModel $environment_metamodel, array $filters = NULL, $finalAttempt) { LogHelper::log_notice(t('Loading Environment Meta Model from settings.php ...')); $datasourceCount = 0; $configurationDataSources = Environment::getInstance()->getConfigurationSection('Data Sources'); if (isset($configurationDataSources)) { foreach ($configurationDataSources as $namespace => $sourceDataSources) { foreach ($sourceDataSources as $datasourceName => $sourceDataSource) { $datasourceName = NameSpaceHelper::resolveNameSpace($namespace, $datasourceName); $datasource = new DataSourceMetaData(); $datasource->name = $datasourceName; $datasource->system = TRUE; $datasource->readonly = TRUE; $datasource->initializeFrom($sourceDataSource); $environment_metamodel->registerDataSource($datasource); $datasourceCount++; } } } LogHelper::log_info(t('Processed @datasourceCount data sources', array('@datasourceCount' => $datasourceCount))); return self::LOAD_STATE__SUCCESSFUL; }
public function load(AbstractMetaModelFactory $factory, AbstractMetaModel $metamodel, array $filters = NULL, $finalAttempt) { $selectedDataSourceType = $this->selectedDataSourceType(); LogHelper::log_notice(t("Finalizing Dataset Meta Data for '@databaseType' database connections ...", array('@databaseType' => $selectedDataSourceType))); if ($finalAttempt === FALSE) { return self::LOAD_STATE__POSTPONED; } $finalizedDatasetCount = 0; $environment_metamodel = data_controller_get_environment_metamodel(); // processing all database connections foreach ($environment_metamodel->datasources as $datasource) { if ($datasource->type !== $selectedDataSourceType) { continue; } // selecting datasets which could be processed for the selected connection $selectedSources = NULL; foreach ($metamodel->datasets as $dataset) { // the dataset should belong to the selected data source if ($dataset->datasourceName !== $datasource->name) { continue; } // the dataset has to be of type table if (DatasetTypeHelper::detectDatasetSourceType($dataset) !== DatasetTypeHelper::DATASET_SOURCE_TYPE__TABLE) { continue; } // whole dataset meta data was prepared using different method. There is nothing else can be done if ($dataset->isComplete()) { continue; } $tableName = strtolower($dataset->source); // invalidating existing column indexes $dataset->invalidateColumnIndexes(); // there could be several datasets for one table $selectedSources[$tableName][] = $dataset; } if (!isset($selectedSources)) { continue; } $datasourceHandler = DataSourceQueryFactory::getInstance()->getHandler($datasource->type); $metadataCallback = $datasourceHandler->prepareQueryStatementExecutionCallbackInstance(); // processing meta data for selected datasets $columnsMetaDataProperties = $this->prepareColumnsMetaDataProperties($datasource, array_keys($selectedSources)); if (isset($columnsMetaDataProperties)) { foreach ($columnsMetaDataProperties as $columnMetaDataProperties) { $tableName = strtolower($columnMetaDataProperties[self::PROPERTY__TABLE_NAME]); $datasets = $selectedSources[$tableName]; foreach ($datasets as $dataset) { $column = new ColumnMetaData(); $column->name = strtolower($columnMetaDataProperties[self::PROPERTY__COLUMN_NAME]); $column->columnIndex = $columnMetaDataProperties[self::PROPERTY__COLUMN_INDEX]; // preparing column type $column->type->databaseType = $columnMetaDataProperties[self::PROPERTY__COLUMN_TYPE]; $column->type->applicationType = $metadataCallback->calculateApplicationDataType($column); // checking if the column is a system column which should be invisible if (substr_compare($column->name, DatasetSystemColumnNames::COLUMN_NAME_PREFIX, 0, strlen(DatasetSystemColumnNames::COLUMN_NAME_PREFIX)) === 0) { $column->visible = FALSE; } $dataset->initializeColumnFrom($column); } } } // marking all selected datasets as completed foreach ($selectedSources as $datasets) { foreach ($datasets as $dataset) { $dataset->markAsComplete(); $finalizedDatasetCount++; } } } LogHelper::log_info(t('Finalized @datasetCount dataset meta data configurations', array('@datasetCount' => $finalizedDatasetCount))); return self::LOAD_STATE__SUCCESSFUL; }
function gd_dashboard_page_export ( $dashboardNode ) { // check to see if export is allowed if ( !gd_dashboard_get_setting('export') ) { LogHelper::log_notice('Exporting dashboards disabled globally.'); return MENU_NOT_FOUND; } if ( !gd_dashboard_access_view($dashboardNode) ) { return MENU_ACCESS_DENIED; } $exporterPath = gd_dashboard_get_setting('export_tool_path'); $arguments = array(); $arguments[] = array ( 'name' => '--title', 'value' => '\''.$dashboardNode->title.'\'' ); $callbackURL = GOVDASH_HOST; if ( user_is_logged_in() || isset($_GET['oauth_consumer_key']) ) { $callbackURL .= '/dashboards'; } else if ( gd_dashboard_is_public($dashboardNode) ) { $callbackURL .= '/public/dashboards'; } else { LogHelper::log_notice('Dashboard was requested anonymously but is not public. Requested: '.$dashboardNode->nid); return MENU_NOT_FOUND; } $params = $_GET; unset($params['q']); $params['export-view'] = true; $params['id'] = $dashboardNode->nid; $callbackURL .= '?'.http_build_query($params,null,'&'); if ( !isset($_GET['oauth_consumer_key']) ) { foreach ($_COOKIE as $key => $value) { $arguments[] = array( 'name' => '--cookie', 'value' => '\'' . $key . '\' \'' . $value . '\'' ); } } $arguments[] = array ( 'name' => '--user-style-sheet', 'value' => dirname(__FILE__) . '/css/export.css' ); $arguments[] = array ( 'name' => '--javascript-delay', 'value' => '5000' ); $arguments[] = array ( 'name' => '--page-size', 'value' => 'Letter' ); $arguments[] = array ( 'name' => '--header-html', 'value' => DRUPAL_ROOT.gd_dashboard_get_setting('export_header_path') ); $arguments[] = array ( 'name' => '--footer-html', 'value' => DRUPAL_ROOT.gd_dashboard_get_setting('export_footer_path') ); $arguments[] = '--print-media-type'; $command = $exporterPath; foreach ( $arguments as $arg ) { if ( is_array($arg) ) { $command .= ' ' . $arg['name'] . ' ' . escapeshellcmd($arg['value']); } else { $command .= ' '.escapeshellcmd($arg); } } // url input $command .= ' ' . escapeshellcmd($callbackURL); // pdf output $command .= ' -'; // stderr getting logged or tossed to black hole by default $command .= ' 2>'.escapeshellcmd(gd_dashboard_get_setting('export_log_path')); // keep oauth token out of logs. if ( !isset($_GET['oauth_consumer_key']) ) { LogHelper::log_debug($command); } // generate filename title $filename = str_replace(' ','_',trim($dashboardNode->title)); $filename .= '__'.date('Ymd'); ob_start(); header("Pragma: public"); header("Expires: 0"); header("Cache-Control: must-revalidate, post-check=0, pre-check=0"); header("Cache-Control: private",false); header('Content-Description: File Transfer'); if (strpos(php_sapi_name(), 'cgi') === false) { header('Content-Type: application/force-download'); header('Content-Type: application/octet-stream', false); header('Content-Type: application/download', false); header('Content-Type: application/pdf', false); } else { header('Content-Type: application/pdf'); } header('Content-Disposition: attachment; filename="' . $filename . '.pdf"'); header('Content-Transfer-Encoding: binary'); passthru($command,$error); if ( !isset($_SERVER['HTTP_ACCEPT_ENCODING']) || empty($_SERVER['HTTP_ACCEPT_ENCODING']) ) { // the content length may vary if the server is using compression header('Content-Length: '.ob_get_length()); } if ( $error ) { header_remove(); ob_get_clean(); gd_error_handler('Dashboard export failed to execute wkhtmltopdf successfully.'); return MENU_NOT_FOUND; } ob_end_flush(); exit(); }
public function load(AbstractMetaModelFactory $factory, AbstractMetaModel $metamodel, array $filters = NULL, $finalAttempt) { LogHelper::log_notice(t('Loading Meta Model from configuration files ...')); return parent::load($factory, $metamodel, $filters, $finalAttempt); }
return; }*/ $dir .= '/' . $conf['check_book']['ref_data_dir']; if (!file_prepare_directory($dir, FILE_CREATE_DIRECTORY)) { LogHelper::log_error("Could not prepare directory {$dir} for generating reference data."); echo $failure; return; } /*if(!is_link($dir) && !@chmod($dir,0777)){ LogHelper::log_error("Could not change permissions to 777 for $dir."); echo $failure; return; }*/ foreach ($ref_data_queries as $file_name => $ref_data_query) { $file = DRUPAL_ROOT . '/' . $dir . '/' . $file_name . '.csv'; $command = $conf['check_book']['data_feeds']['command'] . " -c \"\\\\COPY (" . $ref_data_query . ") TO '" . $file . "' WITH DELIMITER ',' CSV HEADER QUOTE '\\\"' ESCAPE '\\\"' \" "; try { LogHelper::log_notice("Command for generating {$file_name} ref data: " . $command); shell_exec($command); LogHelper::log_notice("Completed executing DB query for {$file_name}. " . (is_file($file) ? "Generated file : {$file}" : "Could not generate file for {$file_name}")); } catch (Exception $e) { $value = TextLogMessageTrimmer::$LOGGED_TEXT_LENGTH__MAXIMUM; TextLogMessageTrimmer::$LOGGED_TEXT_LENGTH__MAXIMUM = NULL; LogHelper::log_error($e); LogHelper::log_error("Erorr executing DB query for generating {$file_name} ref data: " . $command . ". Exception is: " . $e); TextLogMessageTrimmer::$LOGGED_TEXT_LENGTH__MAXIMUM = $value; echo $failure; return; } } echo $success;
public function parse(AbstractDataProvider $dataProvider, array $dataSubmitters = NULL) { $skippedRecordCount = 0; $loadedRecordCount = 0; $timeStart = microtime(TRUE); if ($dataProvider->openResource()) { LogHelper::log_notice(t( 'Parsing @limitRecordCount records. Skipping first @skipRecordCount records (memory usage: @memoryUsed) ...', array( '@skipRecordCount' => $this->skipRecordCount, '@limitRecordCount' => (isset($this->limitRecordCount) ? $this->limitRecordCount : t('all')), '@memoryUsed' => memory_get_usage()))); try { if ($this->initializeProcessing($dataSubmitters)) { // preparing list of columns $this->prepareMetaData($dataProvider, $dataSubmitters); $metadataColumnCount = $this->metadata->getColumnCount(FALSE, TRUE); if ((!isset($this->limitRecordCount) || ($this->limitRecordCount > 0)) && $this->executeBeforeProcessingRecords($dataSubmitters, $dataProvider)) { // processing records $fileProcessedCompletely = FALSE; while (!isset($this->limitRecordCount) || ($loadedRecordCount < $this->limitRecordCount)) { $dataProvider->startReading(); $record = $this->parseNextRecord($dataProvider, $dataSubmitters); // number of loaded columns should match number of columns in meta data if (isset($record)) { $attempt = 1; while (TRUE) { $recordColumnCount = count($record); if ($recordColumnCount == $metadataColumnCount) { break; } else { if ($attempt > self::$MAX_ATTEMPTS_TO_RESOLVE_PARSING_ISSUES) { $dataProvider->endReading(); LogHelper::log_debug($this->metadata); LogHelper::log_debug($record); throw new DataParserException(t( 'Expected to load values for %metadataColumnCount columns. Loaded %loadedColumnCount [line: %lineNumber]', array('%metadataColumnCount' => $metadataColumnCount, '%loadedColumnCount' => $recordColumnCount, '%lineNumber' => $dataProvider->getCurrentLineNumber()))); } $dataProvider->rollbackReading(); $dataProvider->startReading(); $record = $this->parseNextRecord($dataProvider, $dataSubmitters, $attempt); $attempt++; } } } $dataProvider->endReading(); // checking if we reached the end if (!isset($record)) { $fileProcessedCompletely = TRUE; break; } // skipping required number of records if ($skippedRecordCount < $this->skipRecordCount) { $skippedRecordCount++; continue; } $this->postProcessColumnValues($record); // checking if we need to skip processing the record $recordNumber = $dataProvider->getCurrentLineNumber(); if ($this->executeBeforeRecordSubmitted($dataSubmitters, $recordNumber, $record)) { $this->submitRecord($dataSubmitters, $recordNumber, $record); $this->executeAfterRecordSubmitted($dataSubmitters, $recordNumber, $record); $loadedRecordCount++; if (($loadedRecordCount % 1000) == 0) { LogHelper::log_info(t( 'Processed @recordCount records so far (memory usage: @memoryUsed)', array('@recordCount' => $loadedRecordCount, '@memoryUsed' => memory_get_usage()))); } } } $this->executeAfterProcessingRecords($dataSubmitters, $fileProcessedCompletely); } $this->finishProcessing($dataSubmitters); } } catch (DataParserException $e) { LogHelper::log_warn(t('Place of original exception @file:@line', array('@file' => $e->getFile(), '@line' => $e->getLine()))); try { $this->abortProcessing($dataSubmitters); } catch (Exception $ne) { // we do not need to rethrow this exception. We need to preserve and rethrow original exception LogHelper::log_error($ne); } try { $dataProvider->closeResource(); } catch (Exception $ne) { // we do not need to rethrow this exception. We need to preserve and rethrow original exception LogHelper::log_error($ne); } throw new IllegalStateException($e->getMessage()); } catch (Exception $e) { LogHelper::log_warn(t('Place of original exception @file:@line', array('@file' => $e->getFile(), '@line' => $e->getLine()))); try { $this->abortProcessing($dataSubmitters); } catch (Exception $ne) { // we do not need to rethrow this exception. We need to preserve and rethrow original exception LogHelper::log_error($ne); } $ise = new IllegalStateException( ExceptionHelper::getExceptionMessage($e) . t(' [%lineNumber line(s) parsed so far]', array('%lineNumber' => $dataProvider->getCurrentLineNumber())), 0, $e); try { $dataProvider->closeResource(); } catch (Exception $ne) { // we do not need to rethrow this exception. We need to preserve and rethrow original exception LogHelper::log_error($ne); } throw $ise; } $dataProvider->closeResource(); } LogHelper::log_notice(t( 'Processing @recordCount record(s) took !executionTime', array('@recordCount' => $loadedRecordCount, '!executionTime' => LogHelper::formatExecutionTime($timeStart)))); return $loadedRecordCount; }
protected function loadMetaModel(AbstractMetaModel $metamodel) { $metaModelName = get_class($this); LogHelper::log_notice(t('Loading @metamodelName ...', array('@metamodelName' => $metaModelName))); $metamodelTimeStart = microtime(TRUE); $metamodelMemoryUsage = memory_get_usage(); if (isset($this->loaders)) { // preparing each loader for load operation foreach ($this->loaders as $priority => $loaders) { foreach ($loaders as $loader) { $loader->prepare($metamodel); } } $filters = $this->getMetaModelFilters(); foreach ($this->loaders as $priority => $loaders) { foreach ($loaders as $loader) { $loaderClassName = get_class($loader); $loaderTimeStart = microtime(TRUE); $loader->load($metamodel, $filters); LogHelper::log_notice(t( "'@loaderClassName' Meta Model Loader execution time: !executionTime", array('@loaderClassName' => $loaderClassName, '!executionTime' => LogHelper::formatExecutionTime($loaderTimeStart)))); } } // finalizing loading operation foreach ($this->loaders as $priority => $loaders) { foreach ($loaders as $loader) { $loader->finalize($metamodel); } } } LogHelper::log_notice(t( '@metamodelName loading time: !loadingTime; Memory consumed: !memoryUsage', array( '@metamodelName' => $metaModelName, '!loadingTime' => LogHelper::formatExecutionTime($metamodelTimeStart), '!memoryUsage' => (memory_get_usage() - $metamodelMemoryUsage)))); }
public function load(AbstractMetaModel $metamodel, array $filters = NULL) { LogHelper::log_notice(t('Loading Meta Model from configuration files ...')); return parent::load($metamodel, $filters); }
protected function loadMetaModel(AbstractMetaModel $metamodel) { $metaModelName = $this->getMetaModelName(); LogHelper::log_notice(t('Loading @metamodelName ...', array('@metamodelName' => $metaModelName))); $metamodelTimeStart = microtime(TRUE); $metamodelMemoryUsage = memory_get_usage(); if (isset($this->loaders)) { // preparing each loader for load operation foreach ($this->loaders as $loader) { $loader->prepare($this, $metamodel); } $filters = $this->getMetaModelFilters(); // creating a copy of list of loaders. A loader is removed from the list once corresponding load operation is completed $loaders = $this->loaders; $finalAttempt = FALSE; $index = $postponedLoaderCounter = 0; while (($count = count($loaders)) > 0) { if ($index >= $count) { if ($postponedLoaderCounter >= $count) { if ($finalAttempt) { // ALL loaders were postponed. There is no data which they depend on break; } else { $finalAttempt = TRUE; } } // resetting indexes to start from first loader $index = $postponedLoaderCounter = 0; } elseif ($count == 1) { // to avoid receiving 'postponed' status from last loader $finalAttempt = TRUE; } $loader = $loaders[$index]; $loaderClassName = get_class($loader); $loaderTimeStart = microtime(TRUE); $state = $loader->load($this, $metamodel, $filters, $finalAttempt); LogHelper::log_info(t("'@loaderClassName' Meta Model Loader execution time: !executionTime", array('@loaderClassName' => $loaderClassName, '!executionTime' => ExecutionPerformanceHelper::formatExecutionTime($loaderTimeStart)))); switch ($state) { case AbstractMetaModelLoader::LOAD_STATE__SUCCESSFUL: case AbstractMetaModelLoader::LOAD_STATE__SKIPPED: unset($loaders[$index]); $loaders = array_values($loaders); // re-indexing the array $postponedLoaderCounter = 0; $finalAttempt = FALSE; break; case AbstractMetaModelLoader::LOAD_STATE__POSTPONED: LogHelper::log_notice(t("Execution of '@loaderClassName' Meta Model Loader is postponed", array('@loaderClassName' => $loaderClassName))); $index++; $postponedLoaderCounter++; break; default: throw new IllegalStateException(t("'@loaderClassName' Meta Model Loader returned unsupported state: @stateName", array('@loaderClassName' => $loaderClassName, '@stateName' => $state))); } } // finalizing loading operation foreach ($this->loaders as $loader) { $loader->finalize($this, $metamodel); } } LogHelper::log_info(t('@metamodelName loading time: !loadingTime; Memory consumed: !memoryUsage', array('@metamodelName' => $metaModelName, '!loadingTime' => ExecutionPerformanceHelper::formatExecutionTime($metamodelTimeStart), '!memoryUsage' => memory_get_usage() - $metamodelMemoryUsage))); }
protected function openEnvelope($cacheEntryName, $envelope, array $options = NULL) { if (isset($envelope)) { $sourceDataAsOfDateTime = $this->getSourceDataAsOfDateTime($options); if ($envelope->isEnvelopeStale($sourceDataAsOfDateTime)) { // data source was refreshed after the envelope was created LogHelper::log_debug(t( 'Forced envelope refresh for the cache entry name: @cacheEntryName', array('@cacheEntryName' => $cacheEntryName))); $envelope = NULL; } } // found unexpired data if (isset($envelope) && !$envelope->isDataStale()) { return $envelope->data; } $cacheLockEntryName = $this->assembleCacheLockEntryName($cacheEntryName); $isLockPresent = $this->isEntryPresentImpl($cacheLockEntryName); if ($isLockPresent) { if (isset($envelope)) { // found expired data which is in process of refreshing - returning stale data LogHelper::log_notice(t( 'Using stale data for the cache entry name: @cacheEntryName', array('@cacheEntryName' => $cacheEntryName))); return $envelope->data; } else { // there is no data, but someone started to generate it ... waiting ... for limited time $lockWaitTime = 1000000 * self::$LOCK_WAIT_TIME / self::$LOCK_CHECK_COUNT; // going into sleep mode and hope the lock is released for ($i = 0; $i < self::$LOCK_CHECK_COUNT; $i++) { usleep($lockWaitTime); $envelopes = $this->loadValuesImpl(array($cacheEntryName, $cacheLockEntryName)); // checking if value present now if (isset($envelopes[$cacheEntryName])) { return $envelopes[$cacheEntryName]->data; } // checking if the lock is still present if (isset($envelopes[$cacheLockEntryName])) { continue; } // the lock disappeared. There is nothing can be done now break; } } } else { if (isset($envelope)) { if (self::$currentThreadLockCount >= self::$LOCK_LIMIT_PER_THREAD) { // this thread done enough. From now on returning stale data for this thread LogHelper::log_notice(t( 'Using stale data for the cache entry name (lock limit reached): @cacheEntryName', array('@cacheEntryName' => $cacheEntryName))); return $envelope->data; } } // preparing lock $lock = new CacheEntryLock(); // calculating lock expiration time $lockExpirationTime = self::$LOCK_EXPIRATION_TIME; if ($lockExpirationTime < self::$LOCK_WAIT_TIME) { $lockExpirationTime = self::$LOCK_WAIT_TIME; } // setting the lock and ignoring if the operation was successful or not $this->storeValuesImpl( array($cacheLockEntryName => new CacheEntryEnvelope($lock)), $lockExpirationTime); self::$currentThreadLockCount++; } return NULL; }
public function executeStatement(DataSourceMetaData $datasource, $sql) { $affectedRecordCount = 0; if (self::$STATEMENT_EXECUTION_MODE == self::STATEMENT_EXECUTION_MODE__PROCEED) { $connection = $this->getConnection($datasource); $timeStart = microtime(TRUE); $affectedRecordCount = $this->getExtension('executeStatement')->execute($this, $connection, $sql); LogHelper::log_notice(t( 'Database execution time for @statementCount statement(s): !executionTime', array( '@statementCount' => count($sql), '!executionTime' => LogHelper::formatExecutionTime($timeStart)))); } return $affectedRecordCount; }
// Do not expect this to occur. LogHelper::log_notice("{$log_id}: Could not claim the Job {$job_id}."); LogHelper::log_notice("{$log_id}: Started udpating failed status(COULD NOT CLAIM THE JOB) 3 for job {$job_id}."); $job_details = array('status' => 3, 'end_time' => time()); $job_log = "~~{$log_id}: COULD NOT CLAIM THE JOB on " . date("m-d-Y, H:i:s"); QueueUtil::updateJobDetails($job_id, $job_details, $job_log); LogHelper::log_notice("{$log_id}: Completed udpating failed status(COULD NOT CLAIM THE JOB) 3 for job {$job_id}."); } } catch (JobRecoveryException $jre) { LogHelper::log_error("{$log_id}: Job recoverable Exception occured while processing job {$job_id}. Exception is " . $jre); LogHelper::log_notice("{$log_id}: Started recovering job to set staus to 0 for job {$job_id}."); $job_details = array('status' => 0, 'start_time' => NULL, 'end_time' => NULL); $job_log = "~~{$log_id}: Job recovered for job {$job_id} on " . date("m-d-Y, H:i:s") . ". Exception is " . $jre->getMessage(); QueueUtil::updateJobDetails($job_id, $job_details, $job_log); LogHelper::log_notice("{$log_id}: Completed recovering job and updated staus to 0 for job {$job_id} for reprocessing."); } catch (Exception $exception) { LogHelper::log_error("{$log_id}: Error while processing queue job {$job_id}. Exception is " . $exception); LogHelper::log_notice("{$log_id}: Started udpating failed status 3 for job {$job_id}."); $job_details = array('status' => 3, 'end_time' => time()); $job_log = "~~{$log_id}: Error while processing queue job on " . date("m-d-Y, H:i:s") . ". Exception is " . $exception->getMessage(); QueueUtil::updateJobDetails($job_id, $job_details, $job_log); LogHelper::log_notice("{$log_id}: Completed udpating failed status 3 for job {$job_id}."); } } else { LogHelper::log_notice("{$log_id}: No requests are found for processing. Sleep until next job is available."); } } else { LogHelper::log_notice("{$log_id}: Currently a job is in progress. Skipping processing next job until current job is finished."); } LogHelper::log_notice("{$log_id}: Completed process queue cron.");
protected function useApplicableCubeRegions(CubeQueryRequest $request, CubeMetaData $cube) { $metamodel = data_controller_get_metamodel(); if (!isset($cube->regions)) { return; } // FIXME add support for measures in query list. Selected region needs to suport not only returning measures but also querying onces $isExactMatchRequired = FALSE; if (isset($request->measures)) { foreach ($request->measures as $requestMeasure) { $measureName = $requestMeasure->measureName; $cubeMeasure = $cube->findMeasure($measureName); if (isset($cubeMeasure) && isset($cubeMeasure->aggregationType)) { switch ($cubeMeasure->aggregationType) { case MeasureTypes::ADDITIVE: break; case MeasureTypes::SEMI_ADDITIVE: case MeasureTypes::NON_ADDITIVE: $isExactMatchRequired = TRUE; break; default: throw new UnsupportedOperationException(t('Unsupported measure aggregation type: @measureAggregationType', array('@measureAggregationType' => $cubeMeasure->aggregationType))); } } } } // collecting possible eligible regions $eligibleRegionNames = NULL; foreach ($cube->regions as $regionName => $region) { // checking if the region supports all requested measures if (isset($request->measures)) { foreach ($request->measures as $requestMeasure) { if (!isset($region->measures[$requestMeasure->measureName])) { continue 2; } } } $eligibleRegionNames[] = $regionName; } if (!isset($eligibleRegionNames)) { return; } // filtering eligible regions based on requested or queried dimensions if (isset($request->dimensions)) { $this->excludeIneligibleRegions($cube, $eligibleRegionNames, $request->dimensions, $isExactMatchRequired); } if (isset($request->queries)) { $this->excludeIneligibleRegions($cube, $eligibleRegionNames, $request->queries, $isExactMatchRequired); } // do we still have any regions which could be used for the request if (count($eligibleRegionNames) === 0) { return; } // we select first region suitable for the request $selectedRegionName = reset($eligibleRegionNames); $selectedRegion = $cube->regions->{$selectedRegionName}; // preparing new cube configuration $regionCube = new CubeMetaData(); $regionCube->name = "{$cube->name}_using_{$selectedRegionName}_region"; // source dataset $regionCube->sourceDatasetName = $selectedRegion->datasetName; // dimensions if (isset($cube->dimensions)) { foreach ($cube->dimensions as $dimension) { $dimensionName = $dimension->name; if (!isset($selectedRegion->dimensions->{$dimensionName})) { continue; } $regionCubeDimension = $regionCube->registerDimension($dimensionName); $selectedRegionDimension = $selectedRegion->dimensions->{$dimensionName}; if (isset($selectedRegionDimension->levels)) { // we need to prepare new dimension which contains levels which are supported by this region $sourceLevel = NULL; $isSelectedLevelFound = FALSE; foreach ($dimension->levels as $level) { $levelName = $level->name; if (!$isSelectedLevelFound && isset($level->sourceColumnName)) { $sourceLevel = $level; } $isLevelPresent = isset($selectedRegionDimension->levels->{$levelName}); if ($isLevelPresent) { if ($isSelectedLevelFound) { throw new UnsupportedOperationException(t("Only one level is supported yet for each dimension in '@selectedRegionName' region of '@cubeName' cube", array('@selectedRegionName' => $selectedRegionName, '@cubeName' => $cube->publicName))); } $isSelectedLevelFound = TRUE; } if ($isSelectedLevelFound) { $regionLevel = $regionCubeDimension->registerLevel($levelName); $regionLevel->initializeFrom($level); if ($isLevelPresent) { $regionLevel->sourceColumnName = $sourceLevel->sourceColumnName; } elseif (isset($regionLevel->sourceColumnName)) { // we cannot support source key on consecutive levels. It is only supported for 'virtual' cubes unset($regionLevel->sourceColumnName); } } } } else { $regionCubeDimension->initializeFrom($dimension); } } } // measures if (isset($request->measures)) { foreach ($request->measures as $requestMeasure) { $measureName = $requestMeasure->measureName; $cubeMeasure = $cube->getMeasure($measureName); $regionMeasure = $regionCube->registerMeasure($measureName); $regionMeasure->initializeFrom($cubeMeasure); } } // FIXME the following code will throw an exception if we try to reuse the same region during execution of a PHP script // registering the cube in meta model $regionCube->temporary = TRUE; // FIXME when loading meta model automatically create cubes for all regions $metamodel->registerCube($regionCube); // updating the request to use new cube LogHelper::log_notice(t("Using '@selectedRegionName' region of '@cubeName' cube", array('@selectedRegionName' => $selectedRegionName, '@cubeName' => $cube->name))); LogHelper::log_info(t('Creating temporary cube to satisfy this request: @regionCubeName', array('@regionCubeName' => $regionCube->name))); // FIXME create new request and delete the following method in request class $request->setCubeName($regionCube->name); }
protected function loadFromDataSource(SystemTableMetaModelLoaderCallContext $callcontext, AbstractMetaModel $metamodel, DataSourceMetaData $datasource, array $filters = NULL) { LogHelper::log_notice(t( "Updating Meta Model from '@datasourceName' data source (type: @datasourceType) system tables ...", array('@datasourceName' => $datasource->publicName, '@datasourceType' => $datasource->type))); $this->prepareDatasets4Update($callcontext, $metamodel, $datasource); $this->updateDatasets($callcontext, $datasource); $this->processTableComments($callcontext, $metamodel, $datasource); $this->processColumnComments($callcontext, $metamodel, $datasource); }
public function load(AbstractMetaModel $metamodel, array $filters = NULL) { $environment_metamodel = data_controller_get_environment_metamodel(); LogHelper::log_notice(t('Loading Meta Model from GovDashboard Content Types ...')); $loaderName = $this->getName(); $datasetQuery = new EntityFieldQuery(); $datasetQuery->entityCondition('entity_type', 'node'); $datasetQuery->propertyCondition('type', NODE_TYPE_DATASET); $datasetQuery->propertyCondition('status', NODE_PUBLISHED); $datasetQuery->addTag('DANGEROUS_ACCESS_CHECK_OPT_OUT'); // applying filters. Note that we have custom mapping for filter properties $datasetFilters = isset($filters['DatasetMetaData']) ? $filters['DatasetMetaData'] : NULL; if (isset($datasetFilters)) { foreach ($datasetFilters as $propertyName => $filterValues) { switch ($propertyName) { case 'datasourceName': $selectedDataSourceNames = FALSE; // checking if any of the data sources are actually data marts foreach ($filterValues as $datasourceName) { $datasource = $environment_metamodel->findDataSource($datasourceName); if (isset($datasource->nid)) { $selectedDataSourceNames[] = $datasourceName; } } if (isset($selectedDataSourceNames)) { $datasetQuery->fieldCondition('field_dataset_datasource', 'value', $selectedDataSourceNames); } else { // there is no selected datamarts for this request return; } break; default: throw new UnsupportedOperationException(t( 'Unsupported mapping for the property for filtering during dataset loading: %propertyName', array('%propertyName' => $propertyName))); } } } $datasetEntities = $datasetQuery->execute(); $dataset_nids = isset($datasetEntities['node']) ? array_keys($datasetEntities['node']) : NULL; if (!isset($dataset_nids)) { return; } $datasetNodes = node_load_multiple($dataset_nids); // loading columns for selected datasets $columnNodes = gd_column_get_columns_4_dataset($dataset_nids, LOAD_ENTITY, INCLUDE_UNPUBLISHED); // grouping nodes in context of dataset $datasetsColumnNodes = $this->groupNodesByDataset($columnNodes, 'field_column_dataset'); // preparing dataset & cubes $processedDatasetCount = 0; foreach ($datasetNodes as $datasetNode) { $dataset_nid = $datasetNode->nid; $datasourceName = get_node_field_value($datasetNode, 'field_dataset_datasource'); $datasource = isset($datasourceName) ? $environment_metamodel->findDataSource($datasourceName) : NULL; if (!isset($datasource)) { // the data mart could be unpublished or ... continue; } $datasetColumnNodes = isset($datasetsColumnNodes[$dataset_nid]) ? $datasetsColumnNodes[$dataset_nid] : NULL; // preparing dataset $dataset = GD_DatasetMetaModelLoaderHelper::prepareDataset($metamodel, $datasetNode, $datasetColumnNodes, $datasource); // assigning a loader which created the dataset $dataset->loaderName = $loaderName; $processedDatasetCount++; } LogHelper::log_info(t('Processed @datasetCount dataset node(s)', array('@datasetCount' => $processedDatasetCount))); }
public function countDatasetRecords(DataControllerCallContext $callcontext, DatasetCountRequest $request) { $datasetName = $request->getDatasetName(); LogHelper::log_notice(t('Counting script-based dataset records: @datasetName', array('@datasetName' => $datasetName))); $metamodel = data_controller_get_metamodel(); $dataset = $metamodel->getDataset($datasetName); $serializer = new DatasetCountUIRequestSerializer(); $parameters = $serializer->serialize($request); $count = $this->executeScriptFunction($dataset, 'countDatasetRecords', $parameters); LogHelper::log_info(t('Counted @count record(s)', array('@count' => $count))); return $count; }
public function load(AbstractMetaModelFactory $factory, AbstractMetaModel $metamodel, array $filters = NULL, $finalAttempt) { LogHelper::log_notice(t('Creating Meta Model using Drupal Content Types...')); $datasetCounter = 0; $contentTypes = content_types(); if (isset($contentTypes)) { foreach ($contentTypes as $contentTypeName => $contentType) { // preparing list of tables which could be supported by our code $supportedTables = NULL; foreach ($contentType['fields'] as $field) { $fieldName = $field['field_name']; if ($field['multiple'] > 0) { $message = t('Multiple values are not supported yet: @contentTypeName.@fieldName', array('@contentTypeName' => $contentTypeName, '@fieldName' => $fieldName)); LogHelper::log_warn($message); continue; // UnsupportedOperationException } // preparing table name where the field is stored $fieldStorage = $field['db_storage']; switch ($fieldStorage) { case CONTENT_DB_STORAGE_PER_CONTENT_TYPE: $tableName = _content_tablename($field['type_name'], $fieldStorage); break; case CONTENT_DB_STORAGE_PER_FIELD: break; $tableName = _content_tablename($fieldName, $fieldStorage); default: $message = t("Unsupported storage type - '@fieldStorage' for the field: @fieldName", array('@fieldStorage' => $fieldStorage, '@fieldName' => $fieldName)); LogHelper::log_warn($message); continue; // UnsupportedOperationException } // calculating number of 'visible' suffixes $visibleSuffixCount = 0; foreach ($field['columns'] as $columnAttributes) { if (isset($columnAttributes['views'])) { if ($columnAttributes['views'] === TRUE) { $visibleSuffixCount++; } } else { $visibleSuffixCount++; } } // generating fields for all 'visible' suffixes foreach ($field['columns'] as $columnSuffix => $columnAttributes) { if (isset($columnAttributes['views']) && $columnAttributes['views'] === FALSE) { continue; } $supportedField = new stdClass(); // required flag $supportedField->required = $field->required == 1; // original name of the field $supportedField->original_name = $fieldName; // calculating name of database column $supportedField->column = $fieldName . '_' . $columnSuffix; // field name if ($visibleSuffixCount === 1) { $supportedField->name = $fieldName; } else { $supportedField->name = $supportedField->column; } if (isset($supportedTables[$tableName]->storage)) { $previousStorage = $supportedTables[$tableName]->storage; if ($fieldStorage != $previousStorage) { $message = t("Inconsistent storage for '@tableName' table([@fieldStorage1, @fieldStorage2]) for the field: @fieldName", array('@tableName' => $tableName, '@fieldName' => $fieldName, '@fieldStorage1' => $previousStorage, '@fieldStorage2' => $fieldStorage)); LogHelper::log_warn($message); continue; // IllegalStateException } } else { $supportedTables[$tableName]->storage = $fieldStorage; } $supportedTables[$tableName]->supportedFields[$supportedField->name] = $supportedField; } } // preparing dataset source $datasetSource = new stdClass(); $datasetSource->assembler->type = ContentTypeDatasetSourceAssembler::$DATASET_SOURCE_ASSEMBLER__TYPE; $datasetSource->assembler->config->drupal = $contentType; if (isset($supportedTables)) { $datasetSource->assembler->config->supportedTables = $supportedTables; } // preparing & registering dataset $dataset = new DatasetMetaData(); $dataset->name = $this->getDatasetName($contentTypeName); $dataset->description = $contentType['description']; $dataset->datasourceName = AbstractDrupalDataSourceQueryProxy::$DATASOURCE_NAME__DEFAULT; $dataset->source = $datasetSource; // FIXME Populate list of columns and mark the dataset as complete $dataset->registerColumn('nid')->key = TRUE; $metamodel->registerDataset($dataset); $datasetCounter++; } } LogHelper::log_info(t('Processed @datasetCount datasets', array('@datasetCount' => $datasetCounter))); return self::LOAD_STATE__SUCCESSFUL; }
protected function loadFromDataSource(SystemTableMetaModelLoaderCallContext $callcontext, AbstractMetaModel $metamodel, DataSourceMetaData $datasource, array $filters = NULL) { LogHelper::log_notice(t( "Loading Meta Model from '@datasourceName' data source (type: @datasourceType) system tables ...", array('@datasourceName' => $datasource->publicName, '@datasourceType' => $datasource->type))); $this->generateDatasets($callcontext, $datasource); $this->eliminateIneligibleDatasets($callcontext, $filters); $this->processPrimaryKeyConstraints($callcontext, $datasource); $this->processReferences($callcontext, $datasource); $this->fixColumnApplicationType($callcontext); $this->processTableComments($callcontext, $metamodel, $datasource); $this->processColumnComments($callcontext, $metamodel, $datasource); $this->generateMissingDatasetPublicNames($callcontext); $this->generateMissingColumnPublicNames($callcontext); $this->registerDatasets($callcontext, $metamodel); $this->generateLogicalDatasets($callcontext, $metamodel); $this->registerLogicalDatasets($callcontext, $metamodel); $this->generateCubes($callcontext, $metamodel); }
public function countCubeRecords(DataControllerCallContext $callcontext, CubeQueryRequest $request, ResultFormatter $resultFormatter) { $cubeName = $request->getCubeName(); LogHelper::log_notice(t('Counting SQL-based cube records: @cubeName', array('@cubeName' => $cubeName))); $environment_metamodel = data_controller_get_environment_metamodel(); $metamodel = data_controller_get_metamodel(); $cube = $metamodel->getCube($cubeName); $cubeDatasetName = $cube->sourceDatasetName; $cubeDataset = $metamodel->getDataset($cubeDatasetName); $datasource = $environment_metamodel->getDataSource($cubeDataset->datasourceName); $statement = $this->prepareCubeQueryStatement($callcontext, $request); list($isSubqueryRequired, $assembledSections) = $statement->prepareSections(NULL); $statement = new Statement(); $statement->tables[] = new SubquerySection(Statement::assemble($isSubqueryRequired, NULL, $assembledSections)); return $this->countRecords($callcontext, $datasource, array($statement)); }