|
24 | 24 | // Include Google Cloud dependendencies using Composer
|
25 | 25 | require_once __DIR__ . '/../vendor/autoload.php';
|
26 | 26 |
|
27 |
| -if (count($argv) < 6 || count($argv) > 7) { |
28 |
| - return printf("Usage: php %s PROJECT_ID DATASET_ID TABLE_ID BUCKET_NAME OBJECT_NAME [FORMAT]\n", __FILE__); |
| 27 | +if (count($argv) != 5) { |
| 28 | + return printf("Usage: php %s PROJECT_ID DATASET_ID TABLE_ID BUCKET_NAME\n", __FILE__); |
29 | 29 | }
|
30 | 30 |
|
31 |
| -list($_, $projectId, $datasetId, $tableId, $bucketName, $objectName) = $argv; |
32 |
| -$format = isset($argv[6]) ? $argv[6] : 'csv'; |
| 31 | +list($_, $projectId, $datasetId, $tableId, $bucketName) = $argv; |
33 | 32 |
|
34 | 33 | # [START bigquery_extract_table]
|
35 | 34 | use Google\Cloud\BigQuery\BigQueryClient;
|
36 |
| -use Google\Cloud\Storage\StorageClient; |
37 |
| -use Google\Cloud\Core\ExponentialBackoff; |
38 | 35 |
|
39 | 36 | /** Uncomment and populate these variables in your code */
|
40 | 37 | // $projectId = 'The Google project ID';
|
41 | 38 | // $datasetId = 'The BigQuery dataset ID';
|
42 | 39 | // $tableId = 'The BigQuery table ID';
|
43 | 40 | // $bucketName = 'The Cloud Storage bucket Name';
|
44 |
| -// $objectName = 'The Cloud Storage object Name'; |
45 |
| -// $format = 'The extract format, either "csv" or "json"'; |
46 | 41 |
|
47 | 42 | $bigQuery = new BigQueryClient([
|
48 | 43 | 'projectId' => $projectId,
|
49 | 44 | ]);
|
50 | 45 | $dataset = $bigQuery->dataset($datasetId);
|
51 | 46 | $table = $dataset->table($tableId);
|
52 |
| -// load the storage object |
53 |
| -$storage = new StorageClient([ |
54 |
| - 'projectId' => $projectId, |
55 |
| -]); |
56 |
| -$destinationObject = $storage->bucket($bucketName)->object($objectName); |
57 |
| -// create the extract job |
58 |
| -$options = ['destinationFormat' => $format]; |
59 |
| -$extractConfig = $table->extract($destinationObject, $options); |
60 |
| -$job = $table->runJob($extractConfig); |
61 |
| -// poll the job until it is complete |
62 |
| -$backoff = new ExponentialBackoff(10); |
63 |
| -$backoff->execute(function () use ($job) { |
64 |
| - print('Waiting for job to complete' . PHP_EOL); |
65 |
| - $job->reload(); |
66 |
| - if (!$job->isComplete()) { |
67 |
| - throw new Exception('Job has not yet completed', 500); |
68 |
| - } |
69 |
| -}); |
70 |
| -// check if the job has errors |
71 |
| -if (isset($job->info()['status']['errorResult'])) { |
72 |
| - $error = $job->info()['status']['errorResult']['message']; |
73 |
| - printf('Error running job: %s' . PHP_EOL, $error); |
74 |
| -} else { |
75 |
| - print('Data extracted successfully' . PHP_EOL); |
76 |
| -} |
| 47 | +$destinationUri = "gs://{$bucketName}/{$tableId}.json"; |
| 48 | +// Define the format to use. If the format is not specified, 'CSV' will be used. |
| 49 | +$format = 'NEWLINE_DELIMITED_JSON'; |
| 50 | +// Create the extract job |
| 51 | +$extractConfig = $table->extract($destinationUri)->destinationFormat($format); |
| 52 | +// Run the job |
| 53 | +$job = $table->runJob($extractConfig); // Waits for the job to complete |
| 54 | +printf('Exported %s to %s' . PHP_EOL, $table->id(), $destinationUri); |
77 | 55 | # [END bigquery_extract_table]
|
0 commit comments