Skip to content

Commit 87e6ba7

Browse files
yash30201bshaffer
andauthored
fix(GoogleCloudPlatform#1704): Refractored all BigQuery samples as functions (GoogleCloudPlatform#1720)
* refractored all samples * Fixed the scope of Exception class * Update bigquery/api/src/stream_row.php * Removed typo Co-authored-by: Brent Shaffer
1 parent 5a20734 commit 87e6ba7

30 files changed

+1018
-912
lines changed

bigquery/api/src/add_column_load_append.php

Lines changed: 45 additions & 41 deletions
Original file line numberDiff line numberDiff line change
@@ -21,55 +21,59 @@
2121
* @see https://github.com/GoogleCloudPlatform/php-docs-samples/tree/master/bigquery/api/README.md
2222
*/
2323

24-
// Include Google Cloud dependendencies using Composer
25-
require_once __DIR__ . '/../vendor/autoload.php';
26-
27-
if (count($argv) != 4) {
28-
return printf("Usage: php %s PROJECT_ID DATASET_ID TABLE_ID\n", __FILE__);
29-
}
30-
list($_, $projectId, $datasetId, $tableId) = $argv;
24+
namespace Google\Cloud\Samples\BigQuery;
3125

3226
# [START bigquery_add_column_load_append]
3327
use Google\Cloud\BigQuery\BigQueryClient;
3428

35-
/** Uncomment and populate these variables in your code */
36-
// $projectId = 'The Google project ID';
37-
// $datasetId = 'The BigQuery dataset ID';
38-
// $tableId = 'Table ID of the table in dataset';
39-
40-
$bigQuery = new BigQueryClient([
41-
'projectId' => $projectId,
42-
]);
43-
$dataset = $bigQuery->dataset($datasetId);
44-
$table = $dataset->table($tableId);
45-
// In this example, the existing table contains only the 'Name' and 'Title'.
46-
// A new column 'Description' gets added after load job.
29+
/**
30+
* Append a column using a load job.
31+
*
32+
* @param string $projectId The project Id of your Google Cloud Project.
33+
* @param string $datasetId The BigQuery dataset ID.
34+
* @param string $tableId The BigQuery table ID.
35+
*/
36+
function add_column_load_append(
37+
string $projectId,
38+
string $datasetId,
39+
string $tableId
40+
): void {
41+
$bigQuery = new BigQueryClient([
42+
'projectId' => $projectId,
43+
]);
44+
$dataset = $bigQuery->dataset($datasetId);
45+
$table = $dataset->table($tableId);
46+
// In this example, the existing table contains only the 'Name' and 'Title'.
47+
// A new column 'Description' gets added after load job.
4748

48-
$schema = [
49-
'fields' => [
50-
['name' => 'name', 'type' => 'string', 'mode' => 'nullable'],
51-
['name' => 'title', 'type' => 'string', 'mode' => 'nullable'],
52-
['name' => 'description', 'type' => 'string', 'mode' => 'nullable']
53-
]
54-
];
49+
$schema = [
50+
'fields' => [
51+
['name' => 'name', 'type' => 'string', 'mode' => 'nullable'],
52+
['name' => 'title', 'type' => 'string', 'mode' => 'nullable'],
53+
['name' => 'description', 'type' => 'string', 'mode' => 'nullable']
54+
]
55+
];
5556

56-
$source = __DIR__ . '/../test/data/test_data_extra_column.csv';
57+
$source = __DIR__ . '/../test/data/test_data_extra_column.csv';
5758

58-
// Set job configs
59-
$loadConfig = $table->load(fopen($source, 'r'));
60-
$loadConfig->destinationTable($table);
61-
$loadConfig->schema($schema);
62-
$loadConfig->schemaUpdateOptions(['ALLOW_FIELD_ADDITION']);
63-
$loadConfig->sourceFormat('CSV');
64-
$loadConfig->writeDisposition('WRITE_APPEND');
59+
// Set job configs
60+
$loadConfig = $table->load(fopen($source, 'r'));
61+
$loadConfig->destinationTable($table);
62+
$loadConfig->schema($schema);
63+
$loadConfig->schemaUpdateOptions(['ALLOW_FIELD_ADDITION']);
64+
$loadConfig->sourceFormat('CSV');
65+
$loadConfig->writeDisposition('WRITE_APPEND');
6566

66-
// Run the job with load config
67-
$job = $bigQuery->runJob($loadConfig);
67+
// Run the job with load config
68+
$job = $bigQuery->runJob($loadConfig);
6869

69-
// Print all the columns
70-
$columns = $table->info()['schema']['fields'];
71-
printf('The columns in the table are ');
72-
foreach ($columns as $column) {
73-
printf('%s ', $column['name']);
70+
// Print all the columns
71+
$columns = $table->info()['schema']['fields'];
72+
printf('The columns in the table are ');
73+
foreach ($columns as $column) {
74+
printf('%s ', $column['name']);
75+
}
7476
}
7577
# [END bigquery_add_column_load_append]
78+
require_once __DIR__ . '/../../../testing/sample_helpers.php';
79+
\Google\Cloud\Samples\execute_sample(__FILE__, __NAMESPACE__, $argv);

bigquery/api/src/add_column_query_append.php

Lines changed: 37 additions & 33 deletions
Original file line numberDiff line numberDiff line change
@@ -21,47 +21,51 @@
2121
* @see https://github.com/GoogleCloudPlatform/php-docs-samples/tree/master/bigquery/api/README.md
2222
*/
2323

24-
// Include Google Cloud dependendencies using Composer
25-
require_once __DIR__ . '/../vendor/autoload.php';
26-
27-
if (count($argv) != 4) {
28-
return printf("Usage: php %s PROJECT_ID DATASET_ID TABLE_ID\n", __FILE__);
29-
}
30-
list($_, $projectId, $datasetId, $tableId) = $argv;
24+
namespace Google\Cloud\Samples\BigQuery;
3125

3226
# [START bigquery_add_column_query_append]
3327
use Google\Cloud\BigQuery\BigQueryClient;
3428

35-
/** Uncomment and populate these variables in your code */
36-
// $projectId = 'The Google project ID';
37-
// $datasetId = 'The BigQuery dataset ID';
38-
// $tableId = 'Table ID of the table in dataset';
39-
40-
$bigQuery = new BigQueryClient([
41-
'projectId' => $projectId,
42-
]);
43-
$dataset = $bigQuery->dataset($datasetId);
44-
$table = $dataset->table($tableId);
29+
/**
30+
* Append a column using a query job.
31+
*
32+
* @param string $projectId The project Id of your Google Cloud Project.
33+
* @param string $datasetId The BigQuery dataset ID.
34+
* @param string $tableId The BigQuery table ID.
35+
*/
36+
function add_column_query_append(
37+
string $projectId,
38+
string $datasetId,
39+
string $tableId
40+
): void {
41+
$bigQuery = new BigQueryClient([
42+
'projectId' => $projectId,
43+
]);
44+
$dataset = $bigQuery->dataset($datasetId);
45+
$table = $dataset->table($tableId);
4546

46-
// In this example, the existing table contains only the 'Name' and 'Title'.
47-
// A new column 'Description' gets added after the query job.
47+
// In this example, the existing table contains only the 'Name' and 'Title'.
48+
// A new column 'Description' gets added after the query job.
4849

49-
// Define query
50-
$query = sprintf('SELECT "John" as name, "Unknown" as title, "Dummy person" as description;');
50+
// Define query
51+
$query = sprintf('SELECT "John" as name, "Unknown" as title, "Dummy person" as description;');
5152

52-
// Set job configs
53-
$queryJobConfig = $bigQuery->query($query);
54-
$queryJobConfig->destinationTable($table);
55-
$queryJobConfig->schemaUpdateOptions(['ALLOW_FIELD_ADDITION']);
56-
$queryJobConfig->writeDisposition('WRITE_APPEND');
53+
// Set job configs
54+
$queryJobConfig = $bigQuery->query($query);
55+
$queryJobConfig->destinationTable($table);
56+
$queryJobConfig->schemaUpdateOptions(['ALLOW_FIELD_ADDITION']);
57+
$queryJobConfig->writeDisposition('WRITE_APPEND');
5758

58-
// Run query with query job configuration
59-
$bigQuery->runQuery($queryJobConfig);
59+
// Run query with query job configuration
60+
$bigQuery->runQuery($queryJobConfig);
6061

61-
// Print all the columns
62-
$columns = $table->info()['schema']['fields'];
63-
printf('The columns in the table are ');
64-
foreach ($columns as $column) {
65-
printf('%s ', $column['name']);
62+
// Print all the columns
63+
$columns = $table->info()['schema']['fields'];
64+
printf('The columns in the table are ');
65+
foreach ($columns as $column) {
66+
printf('%s ', $column['name']);
67+
}
6668
}
6769
# [END bigquery_add_column_query_append]
70+
require_once __DIR__ . '/../../../testing/sample_helpers.php';
71+
\Google\Cloud\Samples\execute_sample(__FILE__, __NAMESPACE__, $argv);

bigquery/api/src/browse_table.php

Lines changed: 35 additions & 31 deletions
Original file line numberDiff line numberDiff line change
@@ -21,42 +21,46 @@
2121
* @see https://github.com/GoogleCloudPlatform/php-docs-samples/tree/master/bigquery/api/README.md
2222
*/
2323

24-
// Include Google Cloud dependendencies using Composer
25-
require_once __DIR__ . '/../vendor/autoload.php';
26-
27-
if (count($argv) < 4 || count($argv) > 5) {
28-
return printf("Usage: php %s PROJECT_ID DATASET_ID TABLE_ID [START_INDEX]\n", __FILE__);
29-
}
30-
list($_, $projectId, $datasetId, $tableId) = $argv;
31-
$startIndex = isset($argv[4]) ? $argv[4] : 0;
24+
namespace Google\Cloud\Samples\BigQuery;
3225

3326
# [START bigquery_browse_table]
3427
use Google\Cloud\BigQuery\BigQueryClient;
3528

36-
/** Uncomment and populate these variables in your code */
37-
// $projectId = 'The Google project ID';
38-
// $datasetId = 'The BigQuery dataset ID';
39-
// $tableId = 'The BigQuery table ID';
40-
// $startIndex = 0;
41-
42-
$maxResults = 10;
29+
/**
30+
* Browses the given table for data
31+
*
32+
* @param string $projectId The project Id of your Google Cloud Project.
33+
* @param string $datasetId The BigQuery dataset ID.
34+
* @param string $tableId The BigQuery table ID.
35+
* @param int $startIndex Zero-based index of the starting row.
36+
*/
37+
function browse_table(
38+
string $projectId,
39+
string $datasetId,
40+
string $tableId,
41+
int $startIndex = 0
42+
): void {
43+
// Query options
44+
$maxResults = 10;
45+
$options = [
46+
'maxResults' => $maxResults,
47+
'startIndex' => $startIndex
48+
];
4349

44-
$options = [
45-
'maxResults' => $maxResults,
46-
'startIndex' => $startIndex
47-
];
48-
$bigQuery = new BigQueryClient([
49-
'projectId' => $projectId,
50-
]);
51-
$dataset = $bigQuery->dataset($datasetId);
52-
$table = $dataset->table($tableId);
53-
$numRows = 0;
54-
foreach ($table->rows($options) as $row) {
55-
print('---');
56-
foreach ($row as $column => $value) {
57-
printf('%s: %s' . PHP_EOL, $column, $value);
50+
$bigQuery = new BigQueryClient([
51+
'projectId' => $projectId,
52+
]);
53+
$dataset = $bigQuery->dataset($datasetId);
54+
$table = $dataset->table($tableId);
55+
$numRows = 0;
56+
foreach ($table->rows($options) as $row) {
57+
print('---');
58+
foreach ($row as $column => $value) {
59+
printf('%s: %s' . PHP_EOL, $column, $value);
60+
}
61+
$numRows++;
5862
}
59-
$numRows++;
6063
}
6164
# [END bigquery_browse_table]
62-
return $numRows;
65+
require_once __DIR__ . '/../../../testing/sample_helpers.php';
66+
\Google\Cloud\Samples\execute_sample(__FILE__, __NAMESPACE__, $argv);

bigquery/api/src/copy_table.php

Lines changed: 40 additions & 35 deletions
Original file line numberDiff line numberDiff line change
@@ -21,47 +21,52 @@
2121
* @see https://github.com/GoogleCloudPlatform/php-docs-samples/tree/master/bigquery/api/README.md
2222
*/
2323

24-
// Include Google Cloud dependendencies using Composer
25-
require_once __DIR__ . '/../vendor/autoload.php';
26-
27-
if (count($argv) != 5) {
28-
return printf("Usage: php %s PROJECT_ID DATASET_ID SOURCE_TABLE_ID DESTINATION_TABLE_ID\n", __FILE__);
29-
}
30-
list($_, $projectId, $datasetId, $sourceTableId, $destinationTableId) = $argv;
24+
namespace Google\Cloud\Samples\BigQuery;
3125

3226
# [START bigquery_copy_table]
3327
use Google\Cloud\BigQuery\BigQueryClient;
3428
use Google\Cloud\Core\ExponentialBackoff;
3529

36-
/** Uncomment and populate these variables in your code */
37-
// $projectId = 'The Google project ID';
38-
// $datasetId = 'The BigQuery dataset ID';
39-
// $sourceTableId = 'The BigQuery table ID to copy from';
40-
// $destinationTableId = 'The BigQuery table ID to copy to';
41-
42-
$bigQuery = new BigQueryClient([
43-
'projectId' => $projectId,
44-
]);
45-
$dataset = $bigQuery->dataset($datasetId);
46-
$sourceTable = $dataset->table($sourceTableId);
47-
$destinationTable = $dataset->table($destinationTableId);
48-
$copyConfig = $sourceTable->copy($destinationTable);
49-
$job = $sourceTable->runJob($copyConfig);
30+
/**
31+
* Copy the contents of table from source table to destination table.
32+
*
33+
* @param string $projectId The project Id of your Google Cloud Project.
34+
* @param string $datasetId The BigQuery dataset ID.
35+
* @param string $sourceTableId Source tableId in dataset.
36+
* @param string $destinationTableId Destination tableId in dataset.
37+
*/
38+
function copy_table(
39+
string $projectId,
40+
string $datasetId,
41+
string $sourceTableId,
42+
string $destinationTableId
43+
): void {
44+
$bigQuery = new BigQueryClient([
45+
'projectId' => $projectId,
46+
]);
47+
$dataset = $bigQuery->dataset($datasetId);
48+
$sourceTable = $dataset->table($sourceTableId);
49+
$destinationTable = $dataset->table($destinationTableId);
50+
$copyConfig = $sourceTable->copy($destinationTable);
51+
$job = $sourceTable->runJob($copyConfig);
5052

51-
// poll the job until it is complete
52-
$backoff = new ExponentialBackoff(10);
53-
$backoff->execute(function () use ($job) {
54-
print('Waiting for job to complete' . PHP_EOL);
55-
$job->reload();
56-
if (!$job->isComplete()) {
57-
throw new Exception('Job has not yet completed', 500);
53+
// poll the job until it is complete
54+
$backoff = new ExponentialBackoff(10);
55+
$backoff->execute(function () use ($job) {
56+
print('Waiting for job to complete' . PHP_EOL);
57+
$job->reload();
58+
if (!$job->isComplete()) {
59+
throw new \Exception('Job has not yet completed', 500);
60+
}
61+
});
62+
// check if the job has errors
63+
if (isset($job->info()['status']['errorResult'])) {
64+
$error = $job->info()['status']['errorResult']['message'];
65+
printf('Error running job: %s' . PHP_EOL, $error);
66+
} else {
67+
print('Table copied successfully' . PHP_EOL);
5868
}
59-
});
60-
// check if the job has errors
61-
if (isset($job->info()['status']['errorResult'])) {
62-
$error = $job->info()['status']['errorResult']['message'];
63-
printf('Error running job: %s' . PHP_EOL, $error);
64-
} else {
65-
print('Table copied successfully' . PHP_EOL);
6669
}
6770
# [END bigquery_copy_table]
71+
require_once __DIR__ . '/../../../testing/sample_helpers.php';
72+
\Google\Cloud\Samples\execute_sample(__FILE__, __NAMESPACE__, $argv);

bigquery/api/src/create_dataset.php

Lines changed: 17 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -21,25 +21,25 @@
2121
* @see https://github.com/GoogleCloudPlatform/php-docs-samples/tree/master/bigquery/api/README.md
2222
*/
2323

24-
// Include Google Cloud dependendencies using Composer
25-
require_once __DIR__ . '/../vendor/autoload.php';
26-
27-
if (count($argv) != 3) {
28-
return printf("Usage: php %s PROJECT_ID DATASET_ID\n", __FILE__);
29-
}
30-
list($_, $projectId, $datasetId) = $argv;
24+
namespace Google\Cloud\Samples\BigQuery;
3125

3226
# [START bigquery_create_dataset]
3327
use Google\Cloud\BigQuery\BigQueryClient;
3428

35-
/** Uncomment and populate these variables in your code */
36-
// $projectId = 'The Google project ID';
37-
// $datasetId = 'The BigQuery dataset ID';
38-
39-
$bigQuery = new BigQueryClient([
40-
'projectId' => $projectId,
41-
]);
42-
$dataset = $bigQuery->createDataset($datasetId);
43-
printf('Created dataset %s' . PHP_EOL, $datasetId);
29+
/**
30+
* Creates a dataset with the given dataset ID.
31+
*
32+
* @param string $projectId The project Id of your Google Cloud Project.
33+
* @param string $datasetId The BigQuery dataset ID.
34+
*/
35+
function create_dataset(string $projectId, string $datasetId): void
36+
{
37+
$bigQuery = new BigQueryClient([
38+
'projectId' => $projectId,
39+
]);
40+
$dataset = $bigQuery->createDataset($datasetId);
41+
printf('Created dataset %s' . PHP_EOL, $datasetId);
42+
}
4443
# [END bigquery_create_dataset]
45-
return $dataset;
44+
require_once __DIR__ . '/../../../testing/sample_helpers.php';
45+
\Google\Cloud\Samples\execute_sample(__FILE__, __NAMESPACE__, $argv);

0 commit comments

Comments
 (0)