public function batchQuery($object, $fields, $startDate = null, $fh = null)
 {
     $this->initSession();
     $myBulkApiConnection = new BulkApiClient($this->session->serverUrl, $this->session->sessionId);
     $myBulkApiConnection->setLoggingEnabled(false);
     $myBulkApiConnection->setCompressionEnabled(true);
     // create in-memory representation of the job
     $job = new JobInfo();
     $job->setObject($object);
     $job->setOpertion('query');
     $job->setContentType('CSV');
     $job->setConcurrencyMode('Parallel');
     $soql = "SELECT " . implode(',', $fields) . " FROM {$object}";
     if ($startDate != null) {
         $soql .= " WHERE LastModifiedDate >= {$startDate}";
     }
     echo 'Creating job...';
     $job = $myBulkApiConnection->createJob($job);
     echo 'ok' . PHP_EOL;
     echo 'Creating batch...';
     $batch = $myBulkApiConnection->createBatch($job, $soql);
     echo 'ok' . PHP_EOL;
     echo 'Closing job...';
     $myBulkApiConnection->updateJobState($job->getId(), 'Closed');
     echo 'ok' . PHP_EOL;
     $sleepTime = 4;
     echo 'Waiting for job to complete...';
     while ($batch->getState() == 'Queued' || $batch->getState() == 'InProgress') {
         // poll Salesforce for the status of the batch
         sleep($sleepTime *= 1.1);
         echo ".";
         $batch = $myBulkApiConnection->getBatchInfo($job->getId(), $batch->getId());
     }
     echo 'ok' . PHP_EOL;
     // get status of batches
     echo "Retrieving results...";
     $resultList = $myBulkApiConnection->getBatchResultList($job->getId(), $batch->getId());
     // retrieve queried data
     foreach ($resultList as $resultId) {
         $myBulkApiConnection->getBatchResult($job->getId(), $batch->getId(), $resultId, $fh);
     }
     echo 'ok' . PHP_EOL;
     if (isset($fh)) {
         $preview = stream_get_contents($fh, 32, 0);
         rewind($fh);
         if (strcasecmp($preview, 'Records not found for this query') == 0 || trim($preview) == false) {
             // return false if no records returned
             return false;
         } else {
             return true;
         }
     }
 }
/**
 * Does the actual work of PUTting
 * for asyncPUT funcitons and forwarding
 * on to the results page.
 *
 * @param unknown_type $apiCall
 * @param unknown_type $extId
 * @param unknown_type $fieldMap
 * @param unknown_type $csvArray
 */
function putAsync($apiCall, $extId, $fieldMap, $csvArray, $zipFile, $contentType)
{
    $doingZip = isset($zipFile);
    if (!$doingZip && !($fieldMap && $csvArray && WorkbenchContext::get()->getDefaultObject())) {
        displayError("CSV file and field mapping not initialized or object not selected. Upload a new file and map fields.", true, true);
    } else {
        try {
            $job = new JobInfo();
            $job->setObject(WorkbenchContext::get()->getDefaultObject());
            $job->setOpertion($apiCall);
            $job->setContentType(isset($contentType) ? $contentType : ($doingZip ? "ZIP_CSV" : "CSV"));
            $job->setConcurrencyMode(WorkbenchConfig::get()->value("asyncConcurrencyMode"));
            if (WorkbenchConfig::get()->value("assignmentRuleHeader_assignmentRuleId")) {
                $job->setAssignmentRuleId(WorkbenchConfig::get()->value("assignmentRuleHeader_assignmentRuleId"));
            }
            if ($apiCall == "upsert" && isset($extId)) {
                $job->setExternalIdFieldName($extId);
            }
            $job = WorkbenchContext::get()->getAsyncBulkConnection()->createJob($job);
        } catch (Exception $e) {
            displayError($e->getMessage(), true, true);
        }
        if ($job->getId() == null) {
            displayError("No job id found. Aborting Bulk API operation.", true, true);
        }
        if ($doingZip) {
            try {
                WorkbenchContext::get()->getAsyncBulkConnection()->createBatch($job, $zipFile);
            } catch (Exception $e) {
                displayError($e->getMessage(), true, true);
            }
        } else {
            $csvHeader = array_shift($csvArray);
            $results = array();
            while ($csvArray) {
                $sObjects = array();
                $csvArrayBatch = array_splice($csvArray, 0, WorkbenchConfig::get()->value("asyncBatchSize"));
                $asyncCsv = array();
                $asyncCsvHeaderRow = array();
                foreach ($fieldMap as $salesforceField => $fieldMapArray) {
                    if (isset($fieldMapArray['csvField'])) {
                        if (isset($fieldMapArray['relationshipName']) && isset($fieldMapArray['relatedFieldName'])) {
                            $asyncCsvHeaderRow[] = ($fieldMapArray['isPolymorphic'] ? $fieldMapArray['relatedObjectName'] . ":" : "") . $fieldMapArray['relationshipName'] . "." . $fieldMapArray['relatedFieldName'];
                        } else {
                            if (isset($salesforceField)) {
                                $asyncCsvHeaderRow[] = $salesforceField;
                            }
                        }
                    }
                }
                $asyncCsv[] = $asyncCsvHeaderRow;
                for ($row = 0; $row < count($csvArrayBatch); $row++) {
                    //create new row
                    $asyncCsvRow = array();
                    foreach ($fieldMap as $salesforceField => $fieldMapArray) {
                        if (isset($fieldMapArray['csvField'])) {
                            $col = array_search($fieldMapArray['csvField'], $csvHeader);
                            if (isset($salesforceField) && isset($fieldMapArray['csvField'])) {
                                if ($csvArrayBatch[$row][$col] == "" && WorkbenchConfig::get()->value("fieldsToNull")) {
                                    $asyncCsvRow[] = "#N/A";
                                } else {
                                    $asyncCsvRow[] = $csvArrayBatch[$row][$col];
                                }
                            }
                        }
                    }
                    //add row to the array
                    $asyncCsv[] = $asyncCsvRow;
                }
                try {
                    WorkbenchContext::get()->getAsyncBulkConnection()->createBatch($job, convertArrayToCsv($asyncCsv));
                } catch (Exception $e) {
                    displayError($e->getMessage(), true, true);
                }
            }
        }
        try {
            $job = WorkbenchContext::get()->getAsyncBulkConnection()->updateJobState($job->getId(), "Closed");
        } catch (Exception $e) {
            displayError($e->getMessage(), true, true);
        }
        header("Location: asyncStatus.php?jobId=" . $job->getId());
    }
}
 /**
  * Create a new Batch with the given data and associate with the given job
  *
  * @param JobInfo $job
  * @param  $data
  * @return BatchInfo
  */
 public function createBatch(JobInfo $job, $data)
 {
     if ($job->getContentType() == self::CSV) {
         $contentType = self::CONTENT_TYPE_CSV;
     } else {
         if ($job->getContentType() == self::XML) {
             $contentType = self::CONTENT_TYPE_XML;
         } else {
             if ($job->getContentType() == self::ZIP_CSV) {
                 $contentType = self::CONTENT_TYPE_ZIP_CSV;
             } else {
                 if ($job->getContentType() == self::ZIP_XML) {
                     $contentType = self::CONTENT_TYPE_ZIP_XML;
                 } else {
                     throw new Exception("Invalid content type specified for batch");
                 }
             }
         }
     }
     return new BatchInfo($this->post($this->url(array(self::JOB, $job->getId(), self::BATCH)), $contentType, $data));
 }
//
// If these required parameters are not provided, you will be redirected to index.php,
// which has a form to conveniently provide these parameters to any script in this folder.
if (!isset($_REQUEST["partnerApiEndpoint"]) || !isset($_REQUEST["sessionId"])) {
    header("Location: index.php");
}
// STEP 2: INITIALIZE THE BULK API CLIENT
require_once '../BulkApiClient.php';
$myBulkApiConnection = new BulkApiClient($_REQUEST["partnerApiEndpoint"], $_REQUEST["sessionId"]);
$myBulkApiConnection->setLoggingEnabled(true);
//optional, but using here for demo purposes
$myBulkApiConnection->setCompressionEnabled(true);
//optional, but recommended. defaults to true.
// STEP 3: CREATE A NEW JOB
// create in-memory representation of the job
$job = new JobInfo();
$job->setObject("Contact");
$job->setOpertion("insert");
$job->setContentType("XML");
$job->setConcurrencyMode("Parallel");
//can also set to Serial
//$job->setExternalIdFieldName("My_Contact_External_Id");     //used with Upsert operations
//$job->setAssignmentRuleId("01Q60000000EPDU");               //optional for objects that support Assignment Rules
//send the job to the Bulk API and pass back returned JobInfo to the same variable
$job = $myBulkApiConnection->createJob($job);
// STEP 4. CREATE A NEW BATCH
//prep the data. normally this would be loaded from a file,
//but showing in plain text for demo purposes
$xmlData = "<sObjects xmlns=\"http://www.force.com/2009/06/asyncapi/dataload\">\n" . "    <sObject>\n" . "        <FirstName>Tom</FirstName>\n" . "        <LastName>Collins</LastName>\n" . "        <Email>tom@collins.com</Email>\n" . "    </sObject>\n" . "    <sObject>\n" . "        <FirstName>Mary</FirstName>\n" . "       <LastName>Martini</LastName>\n" . "        <Email nil=\"true\"/>\n" . "   </sObject>\n" . "</sObjects>";
$batch = $myBulkApiConnection->createBatch($job, $xmlData);
//add more and more batches.... (here, we will only do one)
//
// If these required parameters are not provided, you will be redirected to index.php,
// which has a form to conveniently provide these parameters to any script in this folder.
if (!isset($_REQUEST["partnerApiEndpoint"]) || !isset($_REQUEST["sessionId"])) {
    header("Location: index.php");
}
// STEP 2: INITIALIZE THE BULK API CLIENT
require_once '../BulkApiClient.php';
$myBulkApiConnection = new BulkApiClient($_REQUEST["partnerApiEndpoint"], $_REQUEST["sessionId"]);
$myBulkApiConnection->setLoggingEnabled(false);
// optional
$myBulkApiConnection->setCompressionEnabled(true);
//optional, but recommended. defaults to true.
// STEP 3: CREATE A NEW JOB
// create in-memory representation of the job
$job = new JobInfo();
// $job->setObject("Contact");
$job->setObject("Contact");
$job->setOpertion("query");
$job->setContentType("CSV");
$job->setConcurrencyMode("Parallel");
//can also set to Serial
//send the job to the Bulk API and pass back returned JobInfo to the same variable
$job = $myBulkApiConnection->createJob($job);
// STEP 4. CREATE A NEW BATCH
//prep the query and create a batch from it
$soql = "SELECT Id, Name, Email FROM Contact";
$batch = $myBulkApiConnection->createBatch($job, $soql);
//add more and more batches.... (here, we will only do one)
// STEP 5. CLOSE THE JOB
$myBulkApiConnection->updateJobState($job->getId(), "Closed");
//
// If these required parameters are not provided, you will be redirected to index.php,
// which has a form to conveniently provide these parameters to any script in this folder.
if (!isset($_REQUEST["partnerApiEndpoint"]) || !isset($_REQUEST["sessionId"])) {
    header("Location: index.php");
}
// STEP 2: INITIALIZE THE BULK API CLIENT
require_once '../BulkApiClient.php';
$myBulkApiConnection = new BulkApiClient($_REQUEST["partnerApiEndpoint"], $_REQUEST["sessionId"]);
$myBulkApiConnection->setLoggingEnabled(true);
//optional, but using here for demo purposes
$myBulkApiConnection->setCompressionEnabled(true);
//optional, but recommended. defaults to true.
// STEP 3: CREATE A NEW JOB
// create in-memory representation of the job
$job = new JobInfo();
$job->setObject("Document");
$job->setOpertion("insert");
$job->setContentType("ZIP_CSV");
$job->setConcurrencyMode("Parallel");
//can also set to Serial
//send the job to the Bulk API and pass back returned JobInfo to the same variable
$job = $myBulkApiConnection->createJob($job);
// STEP 4. CREATE A NEW BATCH
//prep the data. normally this would be loaded from a file,
//but showing in plain text for demo purposes
$zipData = file_get_contents("sampleCsvBinary.zip");
$batch = $myBulkApiConnection->createBatch($job, $zipData);
//add more and more batches.... (here, we will only do one)
// STEP 5. CLOSE THE JOB
$myBulkApiConnection->updateJobState($job->getId(), "Closed");
Exemple #7
0
function queryAsync($queryRequest)
{
    if ($queryRequest->getQueryAction() == "QueryAll") {
        throw new WorkbenchHandledException("Including deleted and archived records not supported by Bulk Queries.");
    }
    $asyncConnection = WorkbenchContext::get()->getAsyncBulkConnection();
    $job = new JobInfo();
    // try to find actual object in FROM clause in case it is different from object set in form
    preg_match("/FROM\\s(\\w+)/i", $queryRequest->getSoqlQuery(), $fromMatches);
    // if we can't find it, go ahead and use the object from the form.
    // it's probably a malformed query anyway, but let SFDC error on it instead of Workbench
    $job->setObject(isset($fromMatches[1]) ? $fromMatches[1] : $queryRequest->getObject());
    $job->setOpertion("query");
    $job->setContentType(substr($queryRequest->getExportTo(), strlen("async_")));
    $job->setConcurrencyMode(WorkbenchConfig::get()->value("asyncConcurrencyMode"));
    try {
        $job = $asyncConnection->createJob($job);
    } catch (Exception $e) {
        if (strpos($e->getMessage(), 'Unable to find object') > -1 || strpos($e->getMessage(), 'InvalidEntity') > -1) {
            throw new WorkbenchHandledException($e->getMessage());
        } else {
            throw $e;
        }
    }
    $asyncConnection->createBatch($job, $queryRequest->getSoqlQuery());
    $job = $asyncConnection->updateJobState($job->getId(), "Closed");
    header("Location: asyncStatus.php?jobId=" . $job->getId());
}