AppProfile |
App profile is a configuration object describing how Cloud Bigtable should treat traffic from a particular end user application.
|
AppProfileArgs |
|
AppProfileArgs.Builder |
|
BigqueryFunctions |
|
BiReservation |
Represents a BI Reservation.
|
BiReservationArgs |
|
BiReservationArgs.Builder |
|
CapacityCommitment |
Capacity commitment is a way to purchase compute capacity for BigQuery jobs (in the form of slots) with some committed period of usage.
|
CapacityCommitmentArgs |
|
CapacityCommitmentArgs.Builder |
|
Connection |
A connection allows BigQuery connections to external data sources..
|
ConnectionArgs |
|
ConnectionArgs.Builder |
|
ConnectionIamBinding |
Three different resources help you manage your IAM policy for BigQuery Connection Connection.
|
ConnectionIamBindingArgs |
|
ConnectionIamBindingArgs.Builder |
|
ConnectionIamMember |
Three different resources help you manage your IAM policy for BigQuery Connection Connection.
|
ConnectionIamMemberArgs |
|
ConnectionIamMemberArgs.Builder |
|
ConnectionIamPolicy |
Three different resources help you manage your IAM policy for BigQuery Connection Connection.
|
ConnectionIamPolicyArgs |
|
ConnectionIamPolicyArgs.Builder |
|
Dataset |
## Example Usage
### Bigquery Dataset Basic
```java
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.serviceaccount.Account;
import com.pulumi.gcp.serviceaccount.AccountArgs;
import com.pulumi.gcp.bigquery.Dataset;
import com.pulumi.gcp.bigquery.DatasetArgs;
import com.pulumi.gcp.bigquery.inputs.DatasetAccessArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var bqowner = new Account("bqowner", AccountArgs.builder()
.accountId("bqowner")
.build());
var dataset = new Dataset("dataset", DatasetArgs.builder()
.datasetId("example_dataset")
.friendlyName("test")
.description("This is a test description")
.location("EU")
.defaultTableExpirationMs(3600000)
.labels(Map.of("env", "default"))
.accesses(
DatasetAccessArgs.builder()
.role("OWNER")
.userByEmail(bqowner.email())
.build(),
DatasetAccessArgs.builder()
.role("READER")
.domain("hashicorp.com")
.build())
.build());
}
}
```
### Bigquery Dataset Cmek
```java
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.kms.KeyRing;
import com.pulumi.gcp.kms.KeyRingArgs;
import com.pulumi.gcp.kms.CryptoKey;
import com.pulumi.gcp.kms.CryptoKeyArgs;
import com.pulumi.gcp.bigquery.Dataset;
import com.pulumi.gcp.bigquery.DatasetArgs;
import com.pulumi.gcp.bigquery.inputs.DatasetDefaultEncryptionConfigurationArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var keyRing = new KeyRing("keyRing", KeyRingArgs.builder()
.location("us")
.build());
var cryptoKey = new CryptoKey("cryptoKey", CryptoKeyArgs.builder()
.keyRing(keyRing.id())
.build());
var dataset = new Dataset("dataset", DatasetArgs.builder()
.datasetId("example_dataset")
.friendlyName("test")
.description("This is a test description")
.location("US")
.defaultTableExpirationMs(3600000)
.defaultEncryptionConfiguration(DatasetDefaultEncryptionConfigurationArgs.builder()
.kmsKeyName(cryptoKey.id())
.build())
.build());
}
}
```
### Bigquery Dataset Authorized Dataset
```java
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.serviceaccount.Account;
import com.pulumi.gcp.serviceaccount.AccountArgs;
import com.pulumi.gcp.bigquery.Dataset;
import com.pulumi.gcp.bigquery.DatasetArgs;
import com.pulumi.gcp.bigquery.inputs.DatasetAccessArgs;
import com.pulumi.gcp.bigquery.inputs.DatasetAccessDatasetArgs;
import com.pulumi.gcp.bigquery.inputs.DatasetAccessDatasetDatasetArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var bqowner = new Account("bqowner", AccountArgs.builder()
.accountId("bqowner")
.build());
var public_ = new Dataset("public", DatasetArgs.builder()
.datasetId("public")
.friendlyName("test")
.description("This dataset is public")
.location("EU")
.defaultTableExpirationMs(3600000)
.labels(Map.of("env", "default"))
.accesses(
DatasetAccessArgs.builder()
.role("OWNER")
.userByEmail(bqowner.email())
.build(),
DatasetAccessArgs.builder()
.role("READER")
.domain("hashicorp.com")
.build())
.build());
var dataset = new Dataset("dataset", DatasetArgs.builder()
.datasetId("private")
.friendlyName("test")
.description("This dataset is private")
.location("EU")
.defaultTableExpirationMs(3600000)
.labels(Map.of("env", "default"))
.accesses(
DatasetAccessArgs.builder()
.role("OWNER")
.userByEmail(bqowner.email())
.build(),
DatasetAccessArgs.builder()
.role("READER")
.domain("hashicorp.com")
.build(),
DatasetAccessArgs.builder()
.dataset(DatasetAccessDatasetArgs.builder()
.dataset(DatasetAccessDatasetDatasetArgs.builder()
.projectId(public_.project())
.datasetId(public_.datasetId())
.build())
.targetTypes("VIEWS")
.build())
.build())
.build());
}
}
```
### Bigquery Dataset Authorized Routine
```java
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.bigquery.Dataset;
import com.pulumi.gcp.bigquery.DatasetArgs;
import com.pulumi.gcp.bigquery.Routine;
import com.pulumi.gcp.bigquery.RoutineArgs;
import com.pulumi.gcp.bigquery.inputs.RoutineArgumentArgs;
import com.pulumi.gcp.bigquery.inputs.DatasetAccessArgs;
import com.pulumi.gcp.bigquery.inputs.DatasetAccessRoutineArgs;
import static com.pulumi.codegen.internal.Serialization.*;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var publicDataset = new Dataset("publicDataset", DatasetArgs.builder()
.datasetId("public_dataset")
.description("This dataset is public")
.build());
var publicRoutine = new Routine("publicRoutine", RoutineArgs.builder()
.datasetId(publicDataset.datasetId())
.routineId("public_routine")
.routineType("TABLE_VALUED_FUNCTION")
.language("SQL")
.definitionBody("""
SELECT 1 + value AS value
""")
.arguments(RoutineArgumentArgs.builder()
.name("value")
.argumentKind("FIXED_TYPE")
.dataType(serializeJson(
jsonObject(
jsonProperty("typeKind", "INT64")
)))
.build())
.returnTableType(serializeJson(
jsonObject(
jsonProperty("columns", jsonArray(jsonObject(
jsonProperty("name", "value"),
jsonProperty("type", jsonObject(
jsonProperty("typeKind", "INT64")
))
)))
)))
.build());
var private_ = new Dataset("private", DatasetArgs.builder()
.datasetId("private_dataset")
.description("This dataset is private")
.accesses(
DatasetAccessArgs.builder()
.role("OWNER")
.userByEmail(" [email protected]")
.build(),
DatasetAccessArgs.builder()
.routine(DatasetAccessRoutineArgs.builder()
.projectId(publicRoutine.project())
.datasetId(publicRoutine.datasetId())
.routineId(publicRoutine.routineId())
.build())
.build())
.build());
}
}
```
### Bigquery Dataset External Reference Aws Docs
```java
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.bigquery.Dataset;
import com.pulumi.gcp.bigquery.DatasetArgs;
import com.pulumi.gcp.bigquery.inputs.DatasetExternalDatasetReferenceArgs;
import com.pulumi.resources.CustomResourceOptions;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var dataset = new Dataset("dataset", DatasetArgs.builder()
.datasetId("example_dataset")
.friendlyName("test")
.description("This is a test description")
.location("aws-us-east-1")
.externalDatasetReference(DatasetExternalDatasetReferenceArgs.builder()
.externalSource("aws-glue://arn:aws:glue:us-east-1:999999999999:database/database")
.connection("projects/project/locations/aws-us-east-1/connections/connection")
.build())
.build(), CustomResourceOptions.builder()
.provider(google_beta)
.build());
}
}
```
## Import
Dataset can be imported using any of these accepted formats* `projects/{{project}}/datasets/{{dataset_id}}` * `{{project}}/{{dataset_id}}` * `{{dataset_id}}` When using the `pulumi import` command, Dataset can be imported using one of the formats above.
|
DatasetAccess |
## Example Usage
### Bigquery Dataset Access Basic User
```java
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.bigquery.Dataset;
import com.pulumi.gcp.bigquery.DatasetArgs;
import com.pulumi.gcp.serviceaccount.Account;
import com.pulumi.gcp.serviceaccount.AccountArgs;
import com.pulumi.gcp.bigquery.DatasetAccess;
import com.pulumi.gcp.bigquery.DatasetAccessArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var dataset = new Dataset("dataset", DatasetArgs.builder()
.datasetId("example_dataset")
.build());
var bqowner = new Account("bqowner", AccountArgs.builder()
.accountId("bqowner")
.build());
var access = new DatasetAccess("access", DatasetAccessArgs.builder()
.datasetId(dataset.datasetId())
.role("OWNER")
.userByEmail(bqowner.email())
.build());
}
}
```
### Bigquery Dataset Access View
```java
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.bigquery.Dataset;
import com.pulumi.gcp.bigquery.DatasetArgs;
import com.pulumi.gcp.bigquery.Table;
import com.pulumi.gcp.bigquery.TableArgs;
import com.pulumi.gcp.bigquery.inputs.TableViewArgs;
import com.pulumi.gcp.bigquery.DatasetAccess;
import com.pulumi.gcp.bigquery.DatasetAccessArgs;
import com.pulumi.gcp.bigquery.inputs.DatasetAccessViewArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var private_ = new Dataset("private", DatasetArgs.builder()
.datasetId("example_dataset")
.build());
var publicDataset = new Dataset("publicDataset", DatasetArgs.builder()
.datasetId("example_dataset2")
.build());
var publicTable = new Table("publicTable", TableArgs.builder()
.deletionProtection(false)
.datasetId(publicDataset.datasetId())
.tableId("example_table")
.view(TableViewArgs.builder()
.query("SELECT state FROM [lookerdata:cdc.project_tycho_reports]")
.useLegacySql(false)
.build())
.build());
var access = new DatasetAccess("access", DatasetAccessArgs.builder()
.datasetId(private_.datasetId())
.view(DatasetAccessViewArgs.builder()
.projectId(publicTable.project())
.datasetId(publicDataset.datasetId())
.tableId(publicTable.tableId())
.build())
.build());
}
}
```
### Bigquery Dataset Access Authorized Dataset
```java
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.bigquery.Dataset;
import com.pulumi.gcp.bigquery.DatasetArgs;
import com.pulumi.gcp.bigquery.DatasetAccess;
import com.pulumi.gcp.bigquery.DatasetAccessArgs;
import com.pulumi.gcp.bigquery.inputs.DatasetAccessAuthorizedDatasetArgs;
import com.pulumi.gcp.bigquery.inputs.DatasetAccessAuthorizedDatasetDatasetArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var private_ = new Dataset("private", DatasetArgs.builder()
.datasetId("private")
.build());
var public_ = new Dataset("public", DatasetArgs.builder()
.datasetId("public")
.build());
var access = new DatasetAccess("access", DatasetAccessArgs.builder()
.datasetId(private_.datasetId())
.authorizedDataset(DatasetAccessAuthorizedDatasetArgs.builder()
.dataset(DatasetAccessAuthorizedDatasetDatasetArgs.builder()
.projectId(public_.project())
.datasetId(public_.datasetId())
.build())
.targetTypes("VIEWS")
.build())
.build());
}
}
```
### Bigquery Dataset Access Authorized Routine
```java
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.bigquery.Dataset;
import com.pulumi.gcp.bigquery.DatasetArgs;
import com.pulumi.gcp.bigquery.Routine;
import com.pulumi.gcp.bigquery.RoutineArgs;
import com.pulumi.gcp.bigquery.inputs.RoutineArgumentArgs;
import com.pulumi.gcp.bigquery.DatasetAccess;
import com.pulumi.gcp.bigquery.DatasetAccessArgs;
import com.pulumi.gcp.bigquery.inputs.DatasetAccessRoutineArgs;
import static com.pulumi.codegen.internal.Serialization.*;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var publicDataset = new Dataset("publicDataset", DatasetArgs.builder()
.datasetId("public_dataset")
.description("This dataset is public")
.build());
var publicRoutine = new Routine("publicRoutine", RoutineArgs.builder()
.datasetId(publicDataset.datasetId())
.routineId("public_routine")
.routineType("TABLE_VALUED_FUNCTION")
.language("SQL")
.definitionBody("""
SELECT 1 + value AS value
""")
.arguments(RoutineArgumentArgs.builder()
.name("value")
.argumentKind("FIXED_TYPE")
.dataType(serializeJson(
jsonObject(
jsonProperty("typeKind", "INT64")
)))
.build())
.returnTableType(serializeJson(
jsonObject(
jsonProperty("columns", jsonArray(jsonObject(
jsonProperty("name", "value"),
jsonProperty("type", jsonObject(
jsonProperty("typeKind", "INT64")
))
)))
)))
.build());
var private_ = new Dataset("private", DatasetArgs.builder()
.datasetId("private_dataset")
.description("This dataset is private")
.build());
var authorizedRoutine = new DatasetAccess("authorizedRoutine", DatasetAccessArgs.builder()
.datasetId(private_.datasetId())
.routine(DatasetAccessRoutineArgs.builder()
.projectId(publicRoutine.project())
.datasetId(publicRoutine.datasetId())
.routineId(publicRoutine.routineId())
.build())
.build());
}
}
```
## Import
This resource does not support import.
|
DatasetAccessArgs |
|
DatasetAccessArgs.Builder |
|
DatasetArgs |
|
DatasetArgs.Builder |
|
DatasetIamBinding |
Three different resources help you manage your IAM policy for BigQuery dataset.
|
DatasetIamBindingArgs |
|
DatasetIamBindingArgs.Builder |
|
DatasetIamMember |
Three different resources help you manage your IAM policy for BigQuery dataset.
|
DatasetIamMemberArgs |
|
DatasetIamMemberArgs.Builder |
|
DatasetIamPolicy |
Three different resources help you manage your IAM policy for BigQuery dataset.
|
DatasetIamPolicyArgs |
|
DatasetIamPolicyArgs.Builder |
|
DataTransferConfig |
Represents a data transfer configuration.
|
DataTransferConfigArgs |
|
DataTransferConfigArgs.Builder |
|
IamBinding |
Three different resources help you manage your IAM policy for BigQuery Table.
|
IamBindingArgs |
|
IamBindingArgs.Builder |
|
IamMember |
Three different resources help you manage your IAM policy for BigQuery Table.
|
IamMemberArgs |
|
IamMemberArgs.Builder |
|
IamPolicy |
Three different resources help you manage your IAM policy for BigQuery Table.
|
IamPolicyArgs |
|
IamPolicyArgs.Builder |
|
Job |
Jobs are actions that BigQuery runs on your behalf to load data, export data, query data, or copy data.
|
JobArgs |
|
JobArgs.Builder |
|
Reservation |
A reservation is a mechanism used to guarantee BigQuery slots to users.
|
ReservationArgs |
|
ReservationArgs.Builder |
|
ReservationAssignment |
The BigqueryReservation Assignment resource
## Example Usage
### Basic
```java
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.bigquery.Reservation;
import com.pulumi.gcp.bigquery.ReservationArgs;
import com.pulumi.gcp.bigquery.ReservationAssignment;
import com.pulumi.gcp.bigquery.ReservationAssignmentArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var basic = new Reservation("basic", ReservationArgs.builder()
.project("my-project-name")
.location("us-central1")
.slotCapacity(0)
.ignoreIdleSlots(false)
.build());
var primary = new ReservationAssignment("primary", ReservationAssignmentArgs.builder()
.assignee("projects/my-project-name")
.jobType("PIPELINE")
.reservation(basic.id())
.build());
}
}
```
## Import
Assignment can be imported using any of these accepted formats* `projects/{{project}}/locations/{{location}}/reservations/{{reservation}}/assignments/{{name}}` * `{{project}}/{{location}}/{{reservation}}/{{name}}` * `{{location}}/{{reservation}}/{{name}}` When using the `pulumi import` command, Assignment can be imported using one of the formats above.
|
ReservationAssignmentArgs |
|
ReservationAssignmentArgs.Builder |
|
Routine |
A user-defined function or a stored procedure that belongs to a Dataset
To get more information about Routine, see:
* [API documentation](https://cloud.google.com/bigquery/docs/reference/rest/v2/routines)
* How-to Guides
* [Routines Intro](https://cloud.google.com/bigquery/docs/reference/rest/v2/routines)
## Example Usage
### Big Query Routine Basic
```java
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.bigquery.Dataset;
import com.pulumi.gcp.bigquery.DatasetArgs;
import com.pulumi.gcp.bigquery.Routine;
import com.pulumi.gcp.bigquery.RoutineArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var test = new Dataset("test", DatasetArgs.builder()
.datasetId("dataset_id")
.build());
var sproc = new Routine("sproc", RoutineArgs.builder()
.datasetId(test.datasetId())
.routineId("routine_id")
.routineType("PROCEDURE")
.language("SQL")
.definitionBody("CREATE FUNCTION Add(x FLOAT64, y FLOAT64) RETURNS FLOAT64 AS (x + y);")
.build());
}
}
```
### Big Query Routine Json
```java
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.bigquery.Dataset;
import com.pulumi.gcp.bigquery.DatasetArgs;
import com.pulumi.gcp.bigquery.Routine;
import com.pulumi.gcp.bigquery.RoutineArgs;
import com.pulumi.gcp.bigquery.inputs.RoutineArgumentArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var test = new Dataset("test", DatasetArgs.builder()
.datasetId("dataset_id")
.build());
var sproc = new Routine("sproc", RoutineArgs.builder()
.datasetId(test.datasetId())
.routineId("routine_id")
.routineType("SCALAR_FUNCTION")
.language("JAVASCRIPT")
.definitionBody("CREATE FUNCTION multiplyInputs return x*y;")
.arguments(
RoutineArgumentArgs.builder()
.name("x")
.dataType("{\"typeKind\" : \"FLOAT64\"}")
.build(),
RoutineArgumentArgs.builder()
.name("y")
.dataType("{\"typeKind\" : \"FLOAT64\"}")
.build())
.returnType("{\"typeKind\" : \"FLOAT64\"}")
.build());
}
}
```
### Big Query Routine Tvf
```java
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.bigquery.Dataset;
import com.pulumi.gcp.bigquery.DatasetArgs;
import com.pulumi.gcp.bigquery.Routine;
import com.pulumi.gcp.bigquery.RoutineArgs;
import com.pulumi.gcp.bigquery.inputs.RoutineArgumentArgs;
import static com.pulumi.codegen.internal.Serialization.*;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var test = new Dataset("test", DatasetArgs.builder()
.datasetId("dataset_id")
.build());
var sproc = new Routine("sproc", RoutineArgs.builder()
.datasetId(test.datasetId())
.routineId("routine_id")
.routineType("TABLE_VALUED_FUNCTION")
.language("SQL")
.definitionBody("""
SELECT 1 + value AS value
""")
.arguments(RoutineArgumentArgs.builder()
.name("value")
.argumentKind("FIXED_TYPE")
.dataType(serializeJson(
jsonObject(
jsonProperty("typeKind", "INT64")
)))
.build())
.returnTableType(serializeJson(
jsonObject(
jsonProperty("columns", jsonArray(jsonObject(
jsonProperty("name", "value"),
jsonProperty("type", jsonObject(
jsonProperty("typeKind", "INT64")
))
)))
)))
.build());
}
}
```
### Big Query Routine Pyspark
```java
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.bigquery.Dataset;
import com.pulumi.gcp.bigquery.DatasetArgs;
import com.pulumi.gcp.bigquery.Connection;
import com.pulumi.gcp.bigquery.ConnectionArgs;
import com.pulumi.gcp.bigquery.inputs.ConnectionSparkArgs;
import com.pulumi.gcp.bigquery.Routine;
import com.pulumi.gcp.bigquery.RoutineArgs;
import com.pulumi.gcp.bigquery.inputs.RoutineSparkOptionsArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var testDataset = new Dataset("testDataset", DatasetArgs.builder()
.datasetId("dataset_id")
.build());
var testConnection = new Connection("testConnection", ConnectionArgs.builder()
.connectionId("connection_id")
.location("US")
.spark()
.build());
var pyspark = new Routine("pyspark", RoutineArgs.builder()
.datasetId(testDataset.datasetId())
.routineId("routine_id")
.routineType("PROCEDURE")
.language("PYTHON")
.definitionBody("""
from pyspark.sql import SparkSession
spark = SparkSession.builder.appName("spark-bigquery-demo").getOrCreate()
# Load data from BigQuery.
|
RoutineArgs |
|
RoutineArgs.Builder |
|
Table |
Creates a table resource in a dataset for Google BigQuery.
|
TableArgs |
|
TableArgs.Builder |
|