function scan_dir_files($logsdir, $wildcard, $subdir_wildcard, $write_dir, $dbhost, $dbport, $dbuser, $dbpass, $processid)
{
    if (!file_exists($logsdir)) {
        echo "Dir: " . $logsdir . " does not exist\n";
        exit(1);
    }
    $dirHandle = opendir("{$logsdir}");
    //List files in images directory
    while (($file = readdir($dirHandle)) !== false) {
        $file_list[] = $file;
    }
    closedir($dirHandle);
    $filtered_file_list = preg_grep($wildcard, $file_list);
    if (count($filtered_file_list) > 0) {
        $files_to_retrieve = filter_file_list($filtered_file_list, $processid, $dbhost, $dbport, $dbuser, $dbpass);
        foreach ($files_to_retrieve as $file) {
            echo "Copying " . $file . "\n";
            copy($logsdir . '/' . $file, $write_dir . '/' . $file);
        }
    } else {
        echo "No files to copy from the logs dir(" . $logsdir . ").\n";
    }
    $filtered_subdir_list = preg_grep($subdir_wildcard, $file_list);
    foreach ($filtered_subdir_list as $subdir) {
        if (is_dir($logsdir . "/" . $subdir)) {
            echo "Scanning " . $subdir . "\n";
            scan_dir_files($logsdir . "/" . $subdir, $wildcard, $subdir_wildcard, $write_dir, $dbhost, $dbport, $dbuser, $dbpass, $processid);
        }
    }
}
// Provided that the bucket was created successfully...
if ($create_bucket_response->isOK()) {
    /* Since AWS follows an "eventual consistency" model, sleep and poll
       until the bucket is available. */
    $exists = $s3->if_bucket_exists($bucket);
    while (!$exists) {
        // Not yet? Sleep for 1 second, then check again
        sleep(1);
        $exists = $s3->if_bucket_exists($bucket);
    }
    /*
    	Get a list of files to upload. We'll use some helper functions we've
    	defined below. This assumes that you have a directory called "test_files"
    	that actually contains some files you want to upload.
    */
    $list_of_files = filter_file_list(glob('./test_files/*'));
    // Prepare to hold the individual filenames
    $individual_filenames = array();
    // Loop over the list, referring to a single file at a time
    foreach ($list_of_files as $file) {
        // Grab only the filename part of the path
        $filename = explode(DIRECTORY_SEPARATOR, $file);
        $filename = array_pop($filename);
        // Store the filename for later use
        $individual_filenames[] = $filename;
        /* Prepare to upload the file to our new S3 bucket. Add this
           request to a queue that we won't execute quite yet. */
        $s3->batch()->create_object($bucket, $filename, array('fileUpload' => $file));
    }
    /* Execute our queue of batched requests. This may take a few seconds to a
       few minutes depending on the size of the files and how fast your upload
    echo "could not login to ftp server " . $ftphost . " with u/p " . $ftpuser . "/" . $ftppass . "\n";
    exit(1);
}
ftp_pasv($ftpconn, true);
if ($remote_dir) {
    ftp_chdir($ftpconn, $remote_dir);
}
// Get a filtered file list from FTP
echo "Getting file list...";
$file_list = ftp_nlist($ftpconn, '.');
echo "Done\n";
if (!$file_list or count($file_list) == 0) {
    echo "No files to download from FTP.  FTP Directory is empty.\n";
    exit(0);
}
echo "Found " . count($file_list) . " files. Filtering by wildcard: " . $ftpwildcard . "\n";
$filtered_file_list = preg_grep($ftpwildcard, $file_list);
if (count($filtered_file_list) == 0) {
    echo "No files to download from FTP.\n";
    exit(0);
}
echo "Found " . count($filtered_file_list) . " files\n";
$files_to_retrieve = filter_file_list($filtered_file_list, $processid, $dbhost, $dbport, $dbuser, $dbpass);
echo count($files_to_retrieve) . " new files found\nDownloading:\n";
foreach ($files_to_retrieve as $file) {
    echo $prefix . $file . "\n";
    if (!ftp_get($ftpconn, $write_dir . "/" . $prefix . $file, $file, FTP_BINARY)) {
        exit(1);
    }
}
ftp_close($ftpconn);