Skip to content

Commit a1e6bee

Browse files
authored
fix: storage and bigquery nightlies (GoogleCloudPlatform#1222)
1 parent b13373a commit a1e6bee

8 files changed

+44
-38
lines changed

bigquery/api/src/import_from_storage_csv.php

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -23,25 +23,27 @@
2323

2424
// Include Google Cloud dependendencies using Composer
2525
require_once __DIR__ . '/../vendor/autoload.php';
26-
if (count($argv) != 3) {
27-
return printf("Usage: php %s PROJECT_ID DATASET_ID\n", __FILE__);
26+
if (count($argv) < 3 || count($argv) > 4) {
27+
return printf("Usage: php %s PROJECT_ID DATASET_ID [TABLE_ID]\n", __FILE__);
2828
}
2929

3030
list($_, $projectId, $datasetId) = $argv;
31+
$tableId = isset($argv[3]) ? $argv[3] : 'us_states';
3132
# [START bigquery_load_table_gcs_csv]
3233
use Google\Cloud\BigQuery\BigQueryClient;
3334
use Google\Cloud\Core\ExponentialBackoff;
3435

3536
/** Uncomment and populate these variables in your code */
3637
// $projectId = 'The Google project ID';
3738
// $datasetId = 'The BigQuery dataset ID';
39+
// $tableId = 'us_states';
3840

3941
// instantiate the bigquery table service
4042
$bigQuery = new BigQueryClient([
4143
'projectId' => $projectId,
4244
]);
4345
$dataset = $bigQuery->dataset($datasetId);
44-
$table = $dataset->table('us_states');
46+
$table = $dataset->table($tableId);
4547

4648
// create the import job
4749
$gcsUri = 'gs://cloud-samples-data/bigquery/us-states/us-states.csv';

bigquery/api/src/import_from_storage_csv_autodetect.php

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -23,25 +23,27 @@
2323

2424
// Include Google Cloud dependendencies using Composer
2525
require_once __DIR__ . '/../vendor/autoload.php';
26-
if (count($argv) != 3) {
27-
return printf("Usage: php %s PROJECT_ID DATASET_ID\n", __FILE__);
26+
if (count($argv) < 3 || count($argv) > 4) {
27+
return printf("Usage: php %s PROJECT_ID DATASET_ID [TABLE_ID]\n", __FILE__);
2828
}
2929

3030
list($_, $projectId, $datasetId) = $argv;
31+
$tableId = isset($argv[3]) ? $argv[3] : 'us_states';
3132
# [START bigquery_load_table_gcs_csv_autodetect]
3233
use Google\Cloud\BigQuery\BigQueryClient;
3334
use Google\Cloud\Core\ExponentialBackoff;
3435

3536
/** Uncomment and populate these variables in your code */
3637
// $projectId = 'The Google project ID';
3738
// $datasetId = 'The BigQuery dataset ID';
39+
// $tableId = 'us_states';
3840

3941
// instantiate the bigquery table service
4042
$bigQuery = new BigQueryClient([
4143
'projectId' => $projectId,
4244
]);
4345
$dataset = $bigQuery->dataset($datasetId);
44-
$table = $dataset->table('us_states');
46+
$table = $dataset->table($tableId);
4547

4648
// create the import job
4749
$gcsUri = 'gs://cloud-samples-data/bigquery/us-states/us-states.csv';

bigquery/api/src/import_from_storage_json.php

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -23,25 +23,27 @@
2323

2424
// Include Google Cloud dependendencies using Composer
2525
require_once __DIR__ . '/../vendor/autoload.php';
26-
if (count($argv) != 3) {
27-
return printf("Usage: php %s PROJECT_ID DATASET_ID\n", __FILE__);
26+
if (count($argv) < 3 || count($argv) > 4) {
27+
return printf("Usage: php %s PROJECT_ID DATASET_ID [TABLE_ID]\n", __FILE__);
2828
}
2929

3030
list($_, $projectId, $datasetId) = $argv;
31+
$tableId = isset($argv[3]) ? $argv[3] : 'us_states';
3132
# [START bigquery_load_table_gcs_json]
3233
use Google\Cloud\BigQuery\BigQueryClient;
3334
use Google\Cloud\Core\ExponentialBackoff;
3435

3536
/** Uncomment and populate these variables in your code */
3637
// $projectId = 'The Google project ID';
3738
// $datasetId = 'The BigQuery dataset ID';
39+
// $tableId = 'us_states';
3840

3941
// instantiate the bigquery table service
4042
$bigQuery = new BigQueryClient([
4143
'projectId' => $projectId,
4244
]);
4345
$dataset = $bigQuery->dataset($datasetId);
44-
$table = $dataset->table('us_states');
46+
$table = $dataset->table($tableId);
4547

4648
// create the import job
4749
$gcsUri = 'gs://cloud-samples-data/bigquery/us-states/us-states.json';

bigquery/api/src/import_from_storage_json_autodetect.php

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -23,25 +23,27 @@
2323

2424
// Include Google Cloud dependendencies using Composer
2525
require_once __DIR__ . '/../vendor/autoload.php';
26-
if (count($argv) != 3) {
27-
return printf("Usage: php %s PROJECT_ID DATASET_ID\n", __FILE__);
26+
if (count($argv) < 3 || count($argv) > 4) {
27+
return printf("Usage: php %s PROJECT_ID DATASET_ID [TABLE_ID]\n", __FILE__);
2828
}
2929

3030
list($_, $projectId, $datasetId) = $argv;
31+
$tableId = isset($argv[3]) ? $argv[3] : 'us_states';
3132
# [START bigquery_load_table_gcs_json_autodetect]
3233
use Google\Cloud\BigQuery\BigQueryClient;
3334
use Google\Cloud\Core\ExponentialBackoff;
3435

3536
/** Uncomment and populate these variables in your code */
3637
// $projectId = 'The Google project ID';
3738
// $datasetId = 'The BigQuery dataset ID';
39+
// $tableId = 'us_states';
3840

3941
// instantiate the bigquery table service
4042
$bigQuery = new BigQueryClient([
4143
'projectId' => $projectId,
4244
]);
4345
$dataset = $bigQuery->dataset($datasetId);
44-
$table = $dataset->table('us_states');
46+
$table = $dataset->table($tableId);
4547

4648
// create the import job
4749
$gcsUri = 'gs://cloud-samples-data/bigquery/us-states/us-states.json';

bigquery/api/src/import_from_storage_orc.php

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -23,25 +23,27 @@
2323

2424
// Include Google Cloud dependendencies using Composer
2525
require_once __DIR__ . '/../vendor/autoload.php';
26-
if (count($argv) != 3) {
27-
return printf("Usage: php %s PROJECT_ID DATASET_ID\n", __FILE__);
26+
if (count($argv) < 3 || count($argv) > 4) {
27+
return printf("Usage: php %s PROJECT_ID DATASET_ID [TABLE_ID]\n", __FILE__);
2828
}
2929

3030
list($_, $projectId, $datasetId) = $argv;
31+
$tableId = isset($argv[3]) ? $argv[3] : 'us_states';
3132
# [START bigquery_load_table_gcs_orc]
3233
use Google\Cloud\BigQuery\BigQueryClient;
3334
use Google\Cloud\Core\ExponentialBackoff;
3435

3536
/** Uncomment and populate these variables in your code */
3637
// $projectId = 'The Google project ID';
3738
// $datasetId = 'The BigQuery dataset ID';
39+
// $tableId = 'us_states';
3840

3941
// instantiate the bigquery table service
4042
$bigQuery = new BigQueryClient([
4143
'projectId' => $projectId,
4244
]);
4345
$dataset = $bigQuery->dataset($datasetId);
44-
$table = $dataset->table('us_states');
46+
$table = $dataset->table($tableId);
4547

4648
// create the import job
4749
$gcsUri = 'gs://cloud-samples-data/bigquery/us-states/us-states.orc';

bigquery/api/src/import_from_storage_parquet.php

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -23,25 +23,27 @@
2323

2424
// Include Google Cloud dependendencies using Composer
2525
require_once __DIR__ . '/../vendor/autoload.php';
26-
if (count($argv) != 3) {
27-
return printf("Usage: php %s PROJECT_ID DATASET_ID\n", __FILE__);
26+
if (count($argv) < 3 || count($argv) > 4) {
27+
return printf("Usage: php %s PROJECT_ID DATASET_ID [TABLE_ID]\n", __FILE__);
2828
}
2929

3030
list($_, $projectId, $datasetId) = $argv;
31+
$tableId = isset($argv[3]) ? $argv[3] : 'us_states';
3132
# [START bigquery_load_table_gcs_parquet]
3233
use Google\Cloud\BigQuery\BigQueryClient;
3334
use Google\Cloud\Core\ExponentialBackoff;
3435

3536
/** Uncomment and populate these variables in your code */
3637
// $projectId = 'The Google project ID';
3738
// $datasetId = 'The BigQuery dataset ID';
39+
// $tableId = 'us_states';
3840

3941
// instantiate the bigquery table service
4042
$bigQuery = new BigQueryClient([
4143
'projectId' => $projectId,
4244
]);
4345
$dataset = $bigQuery->dataset($datasetId);
44-
$table = $dataset->table('us_states');
46+
$table = $dataset->table($tableId);
4547

4648
// create the import job
4749
$gcsUri = 'gs://cloud-samples-data/bigquery/us-states/us-states.parquet';

bigquery/api/test/bigqueryTest.php

Lines changed: 3 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,6 @@ class FunctionsTest extends TestCase
3333

3434
private static $datasetId;
3535
private static $dataset;
36-
private static $tempTables = [];
3736

3837
public static function setUpBeforeClass()
3938
{
@@ -173,17 +172,18 @@ public function testImportFromFile()
173172
*/
174173
public function testImportFromStorage($snippet, $runTruncateSnippet = false)
175174
{
175+
$tableId = sprintf('%s_%s', $snippet, rand());
176+
176177
// run the import
177178
$output = $this->runSnippet($snippet, [
178179
self::$datasetId,
180+
$tableId,
179181
]);
180182

181183
$this->assertContains('Data imported successfully', $output);
182-
$tableId = 'us_states';
183184

184185
// verify table contents
185186
$table = self::$dataset->table($tableId);
186-
self::$tempTables[] = $table;
187187
$this->verifyTable($table, 'Washington', 50);
188188

189189
if ($runTruncateSnippet) {
@@ -340,15 +340,6 @@ private function verifyTable($table, $expectedValue, $expectedRowCount)
340340
$this->runEventuallyConsistentTest($testFunction);
341341
}
342342

343-
public function tearDown()
344-
{
345-
if (self::$tempTables) {
346-
while ($tempTable = array_pop(self::$tempTables)) {
347-
$tempTable->delete();
348-
}
349-
}
350-
}
351-
352343
public static function tearDownAfterClass()
353344
{
354345
self::$dataset->delete(['deleteContents' => true]);

storage/test/storageTest.php

Lines changed: 11 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,7 @@
1818

1919
namespace Google\Cloud\Samples\Storage;
2020

21+
use Google\Auth\CredentialsLoader;
2122
use Google\Cloud\Storage\StorageClient;
2223
use Google\Cloud\TestUtils\TestTrait;
2324
use Google\Cloud\TestUtils\ExecuteCommandTrait;
@@ -64,41 +65,43 @@ public function testBucketAcl()
6465

6566
public function testManageBucketAcl()
6667
{
68+
$jsonKey = CredentialsLoader::fromEnv();
6769
$acl = self::$tempBucket->acl();
70+
$entity = sprintf('user-%s', $jsonKey['client_email']);
6871
$bucketUrl = sprintf('gs://%s', self::$tempBucket->name());
6972

7073
$output = $this->runCommand('bucket-acl', [
7174
'bucket' => self::$tempBucket->name(),
72-
'--entity' => 'allAuthenticatedUsers',
75+
'--entity' => $entity,
7376
'--create' => true,
7477
]);
7578

76-
$expected = "Added allAuthenticatedUsers (READER) to $bucketUrl ACL\n";
79+
$expected = "Added $entity (READER) to $bucketUrl ACL\n";
7780
$this->assertEquals($expected, $output);
7881

79-
$aclInfo = $acl->get(['entity' => 'allAuthenticatedUsers']);
82+
$aclInfo = $acl->get(['entity' => $entity]);
8083
$this->assertArrayHasKey('role', $aclInfo);
8184
$this->assertEquals('READER', $aclInfo['role']);
8285

8386
$output = $this->runCommand('bucket-acl', [
8487
'bucket' => self::$tempBucket->name(),
85-
'--entity' => 'allAuthenticatedUsers',
88+
'--entity' => $entity,
8689
]);
8790

88-
$expected = "allAuthenticatedUsers: READER\n";
91+
$expected = "$entity: READER\n";
8992
$this->assertEquals($expected, $output);
9093

9194
$output = $this->runCommand('bucket-acl', [
9295
'bucket' => self::$tempBucket->name(),
93-
'--entity' => 'allAuthenticatedUsers',
96+
'--entity' => $entity,
9497
'--delete' => true,
9598
]);
9699

97-
$expected = "Deleted allAuthenticatedUsers from $bucketUrl ACL\n";
100+
$expected = "Deleted $entity from $bucketUrl ACL\n";
98101
$this->assertEquals($expected, $output);
99102

100103
try {
101-
$acl->get(['entity' => 'allAuthenticatedUsers']);
104+
$acl->get(['entity' => $entity]);
102105
$this->fail();
103106
} catch (NotFoundException $e) {
104107
$this->assertTrue(true);

0 commit comments

Comments
 (0)