/**
  * {@inheritDoc}
  */
 public function setNamespace($namespace)
 {
     parent::setNamespace($namespace);
     foreach ($this->cacheProviders as $cacheProvider) {
         $cacheProvider->setNamespace($namespace);
     }
 }
 {
     CacheProvider::generateCacheKey();
 }
 public function testGenerateCacheKeys()
 {
<?php

/**
  It became apparent, that generating the JSON files export/download/$study.json
  can be quite expansive, and may fail on our server iff memory or time consumption grow too high.
  This is also documented in #237.
  To combat this problem this script (re-)generates all the JSON files as necessary.
*/
chdir(__DIR__);
require_once '../config.php';
require_once '../query/cacheProvider.php';
chdir('..');
echo "Regenerating Study Cache:\n";
foreach (DataProvider::getStudies() as $study) {
    if (CacheProvider::hasCache($study)) {
        continue;
    }
    echo "{$study}..\n";
    $chunk = DataProvider::getStudyChunk($study);
    CacheProvider::setCache($study, json_encode($chunk));
}
echo "Done.\n";
Beispiel #4
0
             }
         }
     }
     array_push($qs, 'SET FOREIGN_KEY_CHECKS=1;', 'COMMIT;', 'SET AUTOCOMMIT=1;');
     if (php_sapi_name() !== 'cli') {
         header("Pragma: public");
         header("Expires: 0");
         header("Cache-Control: must-revalidate, post-check=0, pre-check=0");
         header("Content-Type: application/octet-stream; charset=utf-8");
         header("Content-Disposition: attachment;filename=\"dump.sql\"");
         header("Content-Transfer-Encoding: binary");
     }
     echo implode("\n", $qs) . "\n";
     break;
 case 'import':
     CacheProvider::cleanCache('../');
     //Executing multiple queries:
     $report = array();
     $i = 1;
     $worked = $dbConnection->multi_query($file);
     if (!$worked) {
         array_push($report, $i);
     }
     while ($dbConnection->more_results()) {
         $worked = $dbConnection->next_result();
         $i++;
         if (!$worked) {
             array_push($report, $i);
         }
     }
     ?>
Beispiel #5
0
  1.: We offer a list of studies, and also global data applying to each study.
  2.: Each study can be fetched separately.
  3.: JavaScript will tack a timestamp on each study,
      so that we can drop older studies from localStorage,
      in case that we're running out of space.
  4.: The data for each study thus consists of the following things:
      - Name and basic data for the study itself
      - A list of Families in the Study
      - A list of Regions per Family
      - A list of Languages per Region
      - A list of Words per Study
      - A list of Transcriptions per pair of Word and Language
      - Defaults for the Study
*/
if (array_key_exists('global', $_GET)) {
    echo Config::toJSON(array('studies' => DataProvider::getStudies(), 'global' => DataProvider::getGlobal()));
} else {
    if (array_key_exists('study', $_GET)) {
        if (CacheProvider::hasCache($_GET['study'])) {
            echo CacheProvider::getCache($_GET['study']);
        } else {
            $ret = DataProvider::getStudyChunk($_GET['study']);
            //Done:
            $data = json_encode($ret);
            echo $data;
            CacheProvider::setCache($_GET['study'], $data);
        }
    } else {
        echo json_encode(array('lastUpdate' => DataProvider::getLastImport(), 'Description' => 'Add a global parameter to fetch global data, ' . 'and add a study parameter to fetch a study.'));
    }
}