function doJobLoop(){
	global $wgJobTypeConfig, $wahJobDelay, $wahRunOnce, $wahStatusOutput;

	//look for jobs (sleep for $wahJobDelay if none found)
	$job = WahJobManager :: getNewJob(false, 'Internal');
	if(!$job && $wahRunOnce == false){
		if($wahStatusOutput)
			print "no jobs found waiting $wahJobDelay \n";
		sleep($wahJobDelay);
		return doJobLoop();
	}elseif(!$job  && $wahRunOnce == true){
		if($wahStatusOutput)
			print "no job found \n";
		return ;
	}

	$jobSet = WahJobManager ::getJobSetById( $job->job_set_id );
	$jobDetails = FormatJson::decode( $job->job_json ) ;

	//get the title (so we can access the source file)
	$fTitle = Title::newFromText( $job->title, $job->ns );
	$file = wfLocalFile( $fTitle );
	$thumbPath = $file->getThumbPath( $jobSet->set_encodekey );
	//make sure the directory is ready:
	wfMkdirParents( $thumbPath, null, __METHOD__ );

	$destTarget = $thumbPath . '.ogg';
	//issue the encoding command
	if($wahStatusOutput) print "Running Encode Command...\n";
	wahDoEncode($file->getPath(), $destTarget, $jobDetails->encodeSettings );

	//once done with encode update the status:
	WahJobManager :: updateJobDone($job);
	//update set done (if only one item in the set)
	$wjm = WahJobManager::newFromSet( $jobSet );
	$percDone = $wjm->getDonePerc();
	if( $percDone == 1 ){
		WahJobManager :: updateSetDone( $jobSet );
	}else{
		if($wahStatusOutput)
			print "job not complete? (might be mixing chunkDuration types?) ";
	}
}
	/**
	 * process the submitted job:
	 */
	function doProccessJobKey( $job_key ){
		global $wgRequest, $wgUser;
		//check if its a valid job key (job_number _ sh1(job_json) )
		list($job_id, $json_sha1) = explode( '_', $job_key );

		//get the job object
		$job = WahJobManager::getJobById( $job_id );

		if( !$job || sha1($job->job_json) != $json_sha1){
			//die on bad job key
			return $this->dieUsage('Bad Job key', 'badjobkey') ;
		}

		$jobSet =  WahJobManager::getJobSetById( $job->job_set_id );

		//check if its a valid video ogg file (ffmpeg2theora --info)
		$uploadedJobFile = $wgRequest->getFileTempname('file');
		$mediaMeta = wahGetMediaJsonMeta( $uploadedJobFile );

		if( !$mediaMeta ){
			//failed basic ffmpeg2theora video validation
			return $this->dieUsage("Not a valid Video file", 'badfile');
		}

		//gab the ogg types from OggHandler.php
		global $wgOggVideoTypes, $wgOggAudioTypes;
		//check for theora and vorbis streams in the metadata output of the file:
		if( isset($wgOggVideoTypes) && isset($wgOggAudioTypes) ){
			$isOgg = false;

			foreach ( $mediaMeta->video as $videoStream ) {
				if(in_array( ucfirst( $videoStream->codec ),  $wgOggVideoTypes))
					$isOgg =true;
			}
			foreach ( $mediaMeta->audio as $audioStream ) {
				if(in_array( ucfirst( $audioStream->codec ),  $wgOggAudioTypes))
					$isOgg = true;
			}
			if(!$isOgg){
				return $this->dieUsage( 'Not a valid Ogg file', 'badfile' );
			}
		}

		//all good so far put it into the derivative temp folder by with each piece as it job_id name
		//@@todo need to rework this a bit for flattening "sequences"
		$fTitle = Title::newFromText( $jobSet->set_title, $jobSet->set_namespace );
		$file = wfLocalFile( $fTitle );
		$thumbPath = $file->getThumbPath( $jobSet->set_encodekey );

		$destTarget = $thumbPath .'/'. $job->job_order_id . '.ogg';
		if( is_file( $destTarget ) ){
			//someone else beat this user to finish the job? or out-of-sync file system?
			//kind of tricky to tie the old file to a particular user so lets just:
			unlink($destTarget);
			//compare the oshashes? take the later file if they don't match
			/*$metaDest = wahGetMediaJsonMeta( $destTarget );
			if( $mediaMeta->oshash == $metaDest->oshash ){

				return $this->dieUsage( 'The target upload file already exists', 'alreadydone' );
			}else{
				//old exipred file? or someone order a job to override? remove old chunk and continue proccessing:
				unlink($destTarget);
			}*/
		}
		//move the current chunk to that path:
		//@@todo use Repo methods (this is failing atm)
		/*$status = RepoGroup::singleton()->getLocalRepo()->store(
			$uploadedJobFile,
			'thumb',
			$destTarget
		);
		if( !$status->isGood() ){
			return $this->dieUsageMsg( array('code'=>'fileerror', 'info'=>'Could Not Move The Uploaded File') );
		}*/
		wfMkdirParents( $thumbPath, null, __METHOD__ );
		if( !move_uploaded_file($uploadedJobFile, $destTarget) ){
			return $this->dieUsage( 'Could Not Move The Uploaded File', 'fileerror' );
		}
		//issue the jobDone to the Manager:
		WahJobManager :: updateJobDone($job, $wgUser->getId());
		$dbw = wfGetDB( DB_SLAVE );

		//check if its the "last" job shell out a Join command
		$wjm = WahJobManager::newFromSet( $jobSet );
		$percDone = $wjm->getDonePerc();
		if($percDone != 1){
			//the stream is not done but success on chunk
			return $this->getResult()->addValue( null, $this->getModuleName(),
					array(
						'chunkaccepted' => true,
						'setdone'		=> false
					)
				);
		}elseif( $percDone == 1 ){
			//all the files are "done" according to the DB:
			//make sure all the files exist in the
			$fileList = array();
			for( $i=0; $i < $jobSet->set_jobs_count ; $i++ ){
				//make sure all the files are present:
				if(!is_file( "$thumbPath/{$i}.ogg" )){
					wfDebug('Missing wikiAtHome chunk $i');
					//unset the job complete state
					$dbw->update( 'wah_jobqueue',
						array(
							'job_done_time = NULL',
							'job_done_user_id = NULL'
						),
						array(
							'job_set_id' 	=> $jobSet->set_id,
							'job_order_id' 	=> $i
						),
						__METHOD__,
						array(
							'LIMIT' => 1
						)
					);
					//make sure jobset is not tagged done either:
					$dbw->update( 'wah_jobset',
						array(
							'set_done_time = NULL'
						),
						array(
							'set_id' 		=> $jobSet->set_id,
						),
						__METHOD__,
						array(
							'LIMIT' => 1
						)
					);
					//return missing files (maybe something is ~broken~)
					wfDebug("WikiAtHome database out of sync with file system?\nFile: $thumbPath/{$i}.ogg missing, re-adding job");
					return $this->getResult()->addValue( null, $this->getModuleName(),
						array(
							'chunkaccepted' => true,
							'setdone'		=> false
						)
					);
				}
				//else add it to the combine list:
				$fileList[] = "{$thumbPath}/{$i}.ogg";
			}
			$finalDestTarget = "{$thumbPath}.ogg";
			//make sure we have a set of thumbs to merge:
			if( count( $fileList )  > 1 ){
				//do merge request
				//@@todo do this in a background shell task
				//( if the files are very large video could take longer than 30 seconds to concatenate )
				wahDoOggCat( $finalDestTarget, $fileList);
			}else{
				//rename to $finalDestTarget
				$curThumbPath = current( $fileList );
				rename($curThumbPath, $finalDestTarget);
			}
			//if the file got created tag the jobset as done:
			if( is_file( $finalDestTarget )){
				//@@do some more checks (like length is accurate and is ogg video)

				//update jobSet done:
				WahJobManager :: updateSetDone( $jobSet );
				//send out stream done
				return $this->getResult()->addValue( null, $this->getModuleName(),
					array(
						'chunkaccepted' => true,
						'setdone'		=> true
					)
				);
			}else{
				wfDebug( "Concatenation Failed. Tag job as failed?");
				//tag the job as failed ( also put in the fail time )
				$dbw->update('wah_jobset',
					array(
						'set_done_time' => time(),
						'set_failed' => 1
					),
					array(
						'set_id' => $jobSet->set_id
					),
					__METHOD__,
					array(
						'LIMIT' => 1
					)
				);
				//send join failed
				return $this->dieUsage("Concatenation Failed: $curThumbPath to $finalDestTarget" . count( $fileList ) . ' ' .print_r( $fileList ), 'catfail');


			}
		}

		//return success

	}
	/**
	 * sucks we have to maintain two version of Ogg doTransform but it proved difficult to integrate them.
	 * in the future we should have a concept of "derivatives" and greatly simplify the media handlers.
	 */
	function doTransform( $file, $dstPath, $dstUrl, $params, $flags = 0 ) {
		global $wgEnabledDerivatives, $wgFFmpegLocation, $wgOut;

		$width = $params['width'];
		$srcWidth = $file->getWidth();
		$srcHeight = $file->getHeight();
		$height = $srcWidth == 0 ? $srcHeight : $width * $srcHeight / $srcWidth;
		$length = $this->getLength( $file );

		//make sure we have all the output classes of oggHandler loaded by the autoLoader:
		$oggHandle =  MediaHandler::getHandler( 'application/ogg' );

		//add the oggHandler js:
		$oggHandle->setHeaders( $wgOut );

		//do some arbitrary derivative selection logic:
		$encodeKey = WikiAtHome::getTargetDerivative($width, $file);
		//see if we have that encoding profile already:

		//get the job manager .. check status and output current state or defer to oggHanndler_body for output
		$wjm = WahJobManager::newFromFile( $file , $encodeKey );

		//check for the derivative file:
		//$fTitle = Title::newFromText( $wjm->getTitle(), $wjm->getNamespace() );
		//$oggFile = wfLocalFile( $fTitle );
		$thumbPath 	 = $file->getThumbPath( $wjm->getEncodeKey() );
		$oggThumbUrl = $file->getThumbUrl( $wjm->getEncodeKey() . '.ogg');

		//check that we have the requested theora derivative
		if( is_file ( "{$thumbPath}.ogg" )){
			//get the thumb time:
			$thumbTime = false;
			if ( isset( $params['thumbtime'] ) ) {
				$thumbTime = $this->parseTimeString( $params['thumbtime'], $length );
			}
			if ( $thumbTime === false ) {
				# Seek to midpoint by default, it tends to be more interesting than the start
				$thumbTime = $length / 2;
			}
			wfMkdirParents( dirname( $dstPath ), null, __METHOD__ );
			if(!is_file($dstPath)){
				$cmd = wfEscapeShellArg( $wgFFmpegLocation ) .
				' -ss ' . intval( $thumbTime ) . ' ' .
				' -i ' . wfEscapeShellArg( $file->getPath() ) .
				# MJPEG, that's the same as JPEG except it's supported by the windows build of ffmpeg
				# No audio, one frame
				' -f mjpeg -an -vframes 1 ' .
				wfEscapeShellArg( $dstPath ) . ' 2>&1';

				$retval = 0;
				$returnText = wfShellExec( $cmd, $retval );
				//if Bad file return error:
				if ( $this->removeBadFile( $dstPath, $retval ) || $retval ) {
					$lines = explode( "\n", str_replace( "\r\n", "\n", $returnText ) );
					return new MediaTransformError( 'thumbnail_error', $width, $height, implode( "\n", $lines ) );
				}
			}
			return new OggTransformOutput( $file, $oggThumbUrl, $dstUrl, $width, $height, $length, $dstPath, $noIcon=false, $offset=0, 0);
		}else{
			//output our current progress
			return new MediaQueueTransformOutput($file, null, $width, $height, $wjm->getDonePerc() );
		}
	}
	/**
	 * returns a new job
	 *
	 * @param prefered jobset id
	 *
	 * returns the jobs object or false if no jobs are available
	 */
	static function getNewJob( $jobset_id = false , $reqMode = 'AtHome'){
		global $wgNumberOfClientsPerJobSet, $wgJobTimeOut, $wgUser, $wgJobTypeConfig;
		$dbr = wfGetDB( DB_SLAVE );

		//its always best to assigning from jobset (since the user already has the data)
		if( $jobset_id ){
			$jobSet = WahJobManager::getJobSetById( $jobset_id );
			if(!$jobSet)
				return false; //not a valid job_set key (no jobs for you)

			//check if the jobset is an accepted job type
			if( WahJobManager::validateJobType( $jobSet->set_job_type, $reqMode) ){
				//try to get one from the current jobset
				$job = $dbr->selectRow( 'wah_jobqueue',
					'*',
					array(
						'job_set_id' =>  intval( $jobset_id ),
						'job_done_time IS NULL',
						'job_last_assigned_time < '.  $dbr->addQuotes( time() - $wgJobTimeOut )
					),
					__METHOD__
				);
				if( $job ){
					return WahJobManager::assignJob( $job );
				}
			}
		}

		//check if we already have a job given but never completed:
		$job = $dbr->selectRow( 'wah_jobqueue',
			'*',
			array(
				'job_last_assigned_user_id' => $wgUser->getId(),
				'job_done_time is NULL'
			),
		 	__METHOD__
		);
		if($job){
			$jobSet = WahJobManager::getJobSetById( $job->job_set_id );
			//make sure the job is oky to assign:
			if( WahJobManager::validateJobType( $jobSet->set_job_type, $reqMode) ){
				//re-assign the same job (don't update anything so it can timeout if they keep getting the same job)
				return WahJobManager::assignJob( $job , false, false);
			}
		}

		$conditionAry =array(
			'set_done_time IS NULL',
			'set_client_count < '.  $dbr->addQuotes( $wgNumberOfClientsPerJobSet )
		);

		//build a request to get a compatible job:
		$okyJobOrList = '';
		$or = '';
		foreach($wgJobTypeConfig as $tKey=>$tSet){
			if( $tSet['assign' . $reqMode] ){
				$okyJobOrList .= $or . ' ( set_job_type = ' . $dbr->addQuotes(  $tKey ) . ' )';
				$or = ' OR ';
			}
		}
		//no valid jobs:
		if( $okyJobOrList=='' ){
			return false;
		}
		//else add it to the sql statement :
		if( $okyJobOrList != '' ){
			//no types are assignAtHome
			$conditionAry[] = $okyJobOrList;
		}

		//just do a normal select from jobset
		$jobSet = $dbr->selectRow( 'wah_jobset',
			'*',
			$conditionAry,
			__METHOD__
		);

		if( !$jobSet ){
			//no jobs:
			return false;
		}else{
			//get a job from the jobset and increment the set_client_count
			//(if the user has an unfinished job) re assign it (in cases where job is lost in trasport)
			//get a job from the selected jobset:
			$job = $dbr->selectRow('wah_jobqueue', '*',
					array(
						'job_set_id' => $jobSet->set_id,
						'job_done_time IS NULL',
						'job_last_assigned_time IS NULL OR job_last_assigned_time < ' .
							 $dbr->addQuotes( time() - $wgJobTimeOut )
					),
					__METHOD__
			);
			if( !$job ){
				//no jobs in this jobset (return nojob)
				//@@todo we could "retry" since we will get here when a set has everything assigned in less than $wgJobTimeOut
				return false;
			}else{
				return WahJobManager::assignJob( $job , $jobSet);
			}
		}
	}