1. Packages
  2. Google Cloud (GCP) Classic
  3. API Docs
  4. bigquery
  5. Connection
Google Cloud v8.14.0 published on Wednesday, Jan 15, 2025 by Pulumi

gcp.bigquery.Connection

Explore with Pulumi AI

A connection allows BigQuery connections to external data sources..

To get more information about Connection, see:

Example Usage

Bigquery Connection Cloud Resource

import * as pulumi from "@pulumi/pulumi";
import * as gcp from "@pulumi/gcp";

const connection = new gcp.bigquery.Connection("connection", {
    connectionId: "my-connection",
    location: "US",
    friendlyName: "👋",
    description: "a riveting description",
    cloudResource: {},
});
Copy
import pulumi
import pulumi_gcp as gcp

connection = gcp.bigquery.Connection("connection",
    connection_id="my-connection",
    location="US",
    friendly_name="👋",
    description="a riveting description",
    cloud_resource={})
Copy
package main

import (
	"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/bigquery"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)

func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		_, err := bigquery.NewConnection(ctx, "connection", &bigquery.ConnectionArgs{
			ConnectionId:  pulumi.String("my-connection"),
			Location:      pulumi.String("US"),
			FriendlyName:  pulumi.String("👋"),
			Description:   pulumi.String("a riveting description"),
			CloudResource: &bigquery.ConnectionCloudResourceArgs{},
		})
		if err != nil {
			return err
		}
		return nil
	})
}
Copy
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Gcp = Pulumi.Gcp;

return await Deployment.RunAsync(() => 
{
    var connection = new Gcp.BigQuery.Connection("connection", new()
    {
        ConnectionId = "my-connection",
        Location = "US",
        FriendlyName = "👋",
        Description = "a riveting description",
        CloudResource = null,
    });

});
Copy
package generated_program;

import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.bigquery.Connection;
import com.pulumi.gcp.bigquery.ConnectionArgs;
import com.pulumi.gcp.bigquery.inputs.ConnectionCloudResourceArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;

public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }

    public static void stack(Context ctx) {
        var connection = new Connection("connection", ConnectionArgs.builder()
            .connectionId("my-connection")
            .location("US")
            .friendlyName("👋")
            .description("a riveting description")
            .cloudResource()
            .build());

    }
}
Copy
resources:
  connection:
    type: gcp:bigquery:Connection
    properties:
      connectionId: my-connection
      location: US
      friendlyName: "\U0001F44B"
      description: a riveting description
      cloudResource: {}
Copy

Bigquery Connection Basic

import * as pulumi from "@pulumi/pulumi";
import * as gcp from "@pulumi/gcp";
import * as random from "@pulumi/random";

const instance = new gcp.sql.DatabaseInstance("instance", {
    name: "my-database-instance",
    databaseVersion: "POSTGRES_11",
    region: "us-central1",
    settings: {
        tier: "db-f1-micro",
    },
    deletionProtection: true,
});
const db = new gcp.sql.Database("db", {
    instance: instance.name,
    name: "db",
});
const pwd = new random.RandomPassword("pwd", {
    length: 16,
    special: false,
});
const user = new gcp.sql.User("user", {
    name: "user",
    instance: instance.name,
    password: pwd.result,
});
const connection = new gcp.bigquery.Connection("connection", {
    friendlyName: "👋",
    description: "a riveting description",
    location: "US",
    cloudSql: {
        instanceId: instance.connectionName,
        database: db.name,
        type: "POSTGRES",
        credential: {
            username: user.name,
            password: user.password,
        },
    },
});
Copy
import pulumi
import pulumi_gcp as gcp
import pulumi_random as random

instance = gcp.sql.DatabaseInstance("instance",
    name="my-database-instance",
    database_version="POSTGRES_11",
    region="us-central1",
    settings={
        "tier": "db-f1-micro",
    },
    deletion_protection=True)
db = gcp.sql.Database("db",
    instance=instance.name,
    name="db")
pwd = random.RandomPassword("pwd",
    length=16,
    special=False)
user = gcp.sql.User("user",
    name="user",
    instance=instance.name,
    password=pwd.result)
connection = gcp.bigquery.Connection("connection",
    friendly_name="👋",
    description="a riveting description",
    location="US",
    cloud_sql={
        "instance_id": instance.connection_name,
        "database": db.name,
        "type": "POSTGRES",
        "credential": {
            "username": user.name,
            "password": user.password,
        },
    })
Copy
package main

import (
	"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/bigquery"
	"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/sql"
	"github.com/pulumi/pulumi-random/sdk/v4/go/random"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)

func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		instance, err := sql.NewDatabaseInstance(ctx, "instance", &sql.DatabaseInstanceArgs{
			Name:            pulumi.String("my-database-instance"),
			DatabaseVersion: pulumi.String("POSTGRES_11"),
			Region:          pulumi.String("us-central1"),
			Settings: &sql.DatabaseInstanceSettingsArgs{
				Tier: pulumi.String("db-f1-micro"),
			},
			DeletionProtection: pulumi.Bool(true),
		})
		if err != nil {
			return err
		}
		db, err := sql.NewDatabase(ctx, "db", &sql.DatabaseArgs{
			Instance: instance.Name,
			Name:     pulumi.String("db"),
		})
		if err != nil {
			return err
		}
		pwd, err := random.NewRandomPassword(ctx, "pwd", &random.RandomPasswordArgs{
			Length:  pulumi.Int(16),
			Special: pulumi.Bool(false),
		})
		if err != nil {
			return err
		}
		user, err := sql.NewUser(ctx, "user", &sql.UserArgs{
			Name:     pulumi.String("user"),
			Instance: instance.Name,
			Password: pwd.Result,
		})
		if err != nil {
			return err
		}
		_, err = bigquery.NewConnection(ctx, "connection", &bigquery.ConnectionArgs{
			FriendlyName: pulumi.String("👋"),
			Description:  pulumi.String("a riveting description"),
			Location:     pulumi.String("US"),
			CloudSql: &bigquery.ConnectionCloudSqlArgs{
				InstanceId: instance.ConnectionName,
				Database:   db.Name,
				Type:       pulumi.String("POSTGRES"),
				Credential: &bigquery.ConnectionCloudSqlCredentialArgs{
					Username: user.Name,
					Password: user.Password,
				},
			},
		})
		if err != nil {
			return err
		}
		return nil
	})
}
Copy
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Gcp = Pulumi.Gcp;
using Random = Pulumi.Random;

return await Deployment.RunAsync(() => 
{
    var instance = new Gcp.Sql.DatabaseInstance("instance", new()
    {
        Name = "my-database-instance",
        DatabaseVersion = "POSTGRES_11",
        Region = "us-central1",
        Settings = new Gcp.Sql.Inputs.DatabaseInstanceSettingsArgs
        {
            Tier = "db-f1-micro",
        },
        DeletionProtection = true,
    });

    var db = new Gcp.Sql.Database("db", new()
    {
        Instance = instance.Name,
        Name = "db",
    });

    var pwd = new Random.RandomPassword("pwd", new()
    {
        Length = 16,
        Special = false,
    });

    var user = new Gcp.Sql.User("user", new()
    {
        Name = "user",
        Instance = instance.Name,
        Password = pwd.Result,
    });

    var connection = new Gcp.BigQuery.Connection("connection", new()
    {
        FriendlyName = "👋",
        Description = "a riveting description",
        Location = "US",
        CloudSql = new Gcp.BigQuery.Inputs.ConnectionCloudSqlArgs
        {
            InstanceId = instance.ConnectionName,
            Database = db.Name,
            Type = "POSTGRES",
            Credential = new Gcp.BigQuery.Inputs.ConnectionCloudSqlCredentialArgs
            {
                Username = user.Name,
                Password = user.Password,
            },
        },
    });

});
Copy
package generated_program;

import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.sql.DatabaseInstance;
import com.pulumi.gcp.sql.DatabaseInstanceArgs;
import com.pulumi.gcp.sql.inputs.DatabaseInstanceSettingsArgs;
import com.pulumi.gcp.sql.Database;
import com.pulumi.gcp.sql.DatabaseArgs;
import com.pulumi.random.RandomPassword;
import com.pulumi.random.RandomPasswordArgs;
import com.pulumi.gcp.sql.User;
import com.pulumi.gcp.sql.UserArgs;
import com.pulumi.gcp.bigquery.Connection;
import com.pulumi.gcp.bigquery.ConnectionArgs;
import com.pulumi.gcp.bigquery.inputs.ConnectionCloudSqlArgs;
import com.pulumi.gcp.bigquery.inputs.ConnectionCloudSqlCredentialArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;

public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }

    public static void stack(Context ctx) {
        var instance = new DatabaseInstance("instance", DatabaseInstanceArgs.builder()
            .name("my-database-instance")
            .databaseVersion("POSTGRES_11")
            .region("us-central1")
            .settings(DatabaseInstanceSettingsArgs.builder()
                .tier("db-f1-micro")
                .build())
            .deletionProtection(true)
            .build());

        var db = new Database("db", DatabaseArgs.builder()
            .instance(instance.name())
            .name("db")
            .build());

        var pwd = new RandomPassword("pwd", RandomPasswordArgs.builder()
            .length(16)
            .special(false)
            .build());

        var user = new User("user", UserArgs.builder()
            .name("user")
            .instance(instance.name())
            .password(pwd.result())
            .build());

        var connection = new Connection("connection", ConnectionArgs.builder()
            .friendlyName("👋")
            .description("a riveting description")
            .location("US")
            .cloudSql(ConnectionCloudSqlArgs.builder()
                .instanceId(instance.connectionName())
                .database(db.name())
                .type("POSTGRES")
                .credential(ConnectionCloudSqlCredentialArgs.builder()
                    .username(user.name())
                    .password(user.password())
                    .build())
                .build())
            .build());

    }
}
Copy
resources:
  instance:
    type: gcp:sql:DatabaseInstance
    properties:
      name: my-database-instance
      databaseVersion: POSTGRES_11
      region: us-central1
      settings:
        tier: db-f1-micro
      deletionProtection: true
  db:
    type: gcp:sql:Database
    properties:
      instance: ${instance.name}
      name: db
  pwd:
    type: random:RandomPassword
    properties:
      length: 16
      special: false
  user:
    type: gcp:sql:User
    properties:
      name: user
      instance: ${instance.name}
      password: ${pwd.result}
  connection:
    type: gcp:bigquery:Connection
    properties:
      friendlyName: "\U0001F44B"
      description: a riveting description
      location: US
      cloudSql:
        instanceId: ${instance.connectionName}
        database: ${db.name}
        type: POSTGRES
        credential:
          username: ${user.name}
          password: ${user.password}
Copy

Bigquery Connection Full

import * as pulumi from "@pulumi/pulumi";
import * as gcp from "@pulumi/gcp";
import * as random from "@pulumi/random";

const instance = new gcp.sql.DatabaseInstance("instance", {
    name: "my-database-instance",
    databaseVersion: "POSTGRES_11",
    region: "us-central1",
    settings: {
        tier: "db-f1-micro",
    },
    deletionProtection: true,
});
const db = new gcp.sql.Database("db", {
    instance: instance.name,
    name: "db",
});
const pwd = new random.RandomPassword("pwd", {
    length: 16,
    special: false,
});
const user = new gcp.sql.User("user", {
    name: "user",
    instance: instance.name,
    password: pwd.result,
});
const connection = new gcp.bigquery.Connection("connection", {
    connectionId: "my-connection",
    location: "US",
    friendlyName: "👋",
    description: "a riveting description",
    cloudSql: {
        instanceId: instance.connectionName,
        database: db.name,
        type: "POSTGRES",
        credential: {
            username: user.name,
            password: user.password,
        },
    },
});
Copy
import pulumi
import pulumi_gcp as gcp
import pulumi_random as random

instance = gcp.sql.DatabaseInstance("instance",
    name="my-database-instance",
    database_version="POSTGRES_11",
    region="us-central1",
    settings={
        "tier": "db-f1-micro",
    },
    deletion_protection=True)
db = gcp.sql.Database("db",
    instance=instance.name,
    name="db")
pwd = random.RandomPassword("pwd",
    length=16,
    special=False)
user = gcp.sql.User("user",
    name="user",
    instance=instance.name,
    password=pwd.result)
connection = gcp.bigquery.Connection("connection",
    connection_id="my-connection",
    location="US",
    friendly_name="👋",
    description="a riveting description",
    cloud_sql={
        "instance_id": instance.connection_name,
        "database": db.name,
        "type": "POSTGRES",
        "credential": {
            "username": user.name,
            "password": user.password,
        },
    })
Copy
package main

import (
	"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/bigquery"
	"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/sql"
	"github.com/pulumi/pulumi-random/sdk/v4/go/random"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)

func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		instance, err := sql.NewDatabaseInstance(ctx, "instance", &sql.DatabaseInstanceArgs{
			Name:            pulumi.String("my-database-instance"),
			DatabaseVersion: pulumi.String("POSTGRES_11"),
			Region:          pulumi.String("us-central1"),
			Settings: &sql.DatabaseInstanceSettingsArgs{
				Tier: pulumi.String("db-f1-micro"),
			},
			DeletionProtection: pulumi.Bool(true),
		})
		if err != nil {
			return err
		}
		db, err := sql.NewDatabase(ctx, "db", &sql.DatabaseArgs{
			Instance: instance.Name,
			Name:     pulumi.String("db"),
		})
		if err != nil {
			return err
		}
		pwd, err := random.NewRandomPassword(ctx, "pwd", &random.RandomPasswordArgs{
			Length:  pulumi.Int(16),
			Special: pulumi.Bool(false),
		})
		if err != nil {
			return err
		}
		user, err := sql.NewUser(ctx, "user", &sql.UserArgs{
			Name:     pulumi.String("user"),
			Instance: instance.Name,
			Password: pwd.Result,
		})
		if err != nil {
			return err
		}
		_, err = bigquery.NewConnection(ctx, "connection", &bigquery.ConnectionArgs{
			ConnectionId: pulumi.String("my-connection"),
			Location:     pulumi.String("US"),
			FriendlyName: pulumi.String("👋"),
			Description:  pulumi.String("a riveting description"),
			CloudSql: &bigquery.ConnectionCloudSqlArgs{
				InstanceId: instance.ConnectionName,
				Database:   db.Name,
				Type:       pulumi.String("POSTGRES"),
				Credential: &bigquery.ConnectionCloudSqlCredentialArgs{
					Username: user.Name,
					Password: user.Password,
				},
			},
		})
		if err != nil {
			return err
		}
		return nil
	})
}
Copy
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Gcp = Pulumi.Gcp;
using Random = Pulumi.Random;

return await Deployment.RunAsync(() => 
{
    var instance = new Gcp.Sql.DatabaseInstance("instance", new()
    {
        Name = "my-database-instance",
        DatabaseVersion = "POSTGRES_11",
        Region = "us-central1",
        Settings = new Gcp.Sql.Inputs.DatabaseInstanceSettingsArgs
        {
            Tier = "db-f1-micro",
        },
        DeletionProtection = true,
    });

    var db = new Gcp.Sql.Database("db", new()
    {
        Instance = instance.Name,
        Name = "db",
    });

    var pwd = new Random.RandomPassword("pwd", new()
    {
        Length = 16,
        Special = false,
    });

    var user = new Gcp.Sql.User("user", new()
    {
        Name = "user",
        Instance = instance.Name,
        Password = pwd.Result,
    });

    var connection = new Gcp.BigQuery.Connection("connection", new()
    {
        ConnectionId = "my-connection",
        Location = "US",
        FriendlyName = "👋",
        Description = "a riveting description",
        CloudSql = new Gcp.BigQuery.Inputs.ConnectionCloudSqlArgs
        {
            InstanceId = instance.ConnectionName,
            Database = db.Name,
            Type = "POSTGRES",
            Credential = new Gcp.BigQuery.Inputs.ConnectionCloudSqlCredentialArgs
            {
                Username = user.Name,
                Password = user.Password,
            },
        },
    });

});
Copy
package generated_program;

import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.sql.DatabaseInstance;
import com.pulumi.gcp.sql.DatabaseInstanceArgs;
import com.pulumi.gcp.sql.inputs.DatabaseInstanceSettingsArgs;
import com.pulumi.gcp.sql.Database;
import com.pulumi.gcp.sql.DatabaseArgs;
import com.pulumi.random.RandomPassword;
import com.pulumi.random.RandomPasswordArgs;
import com.pulumi.gcp.sql.User;
import com.pulumi.gcp.sql.UserArgs;
import com.pulumi.gcp.bigquery.Connection;
import com.pulumi.gcp.bigquery.ConnectionArgs;
import com.pulumi.gcp.bigquery.inputs.ConnectionCloudSqlArgs;
import com.pulumi.gcp.bigquery.inputs.ConnectionCloudSqlCredentialArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;

public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }

    public static void stack(Context ctx) {
        var instance = new DatabaseInstance("instance", DatabaseInstanceArgs.builder()
            .name("my-database-instance")
            .databaseVersion("POSTGRES_11")
            .region("us-central1")
            .settings(DatabaseInstanceSettingsArgs.builder()
                .tier("db-f1-micro")
                .build())
            .deletionProtection(true)
            .build());

        var db = new Database("db", DatabaseArgs.builder()
            .instance(instance.name())
            .name("db")
            .build());

        var pwd = new RandomPassword("pwd", RandomPasswordArgs.builder()
            .length(16)
            .special(false)
            .build());

        var user = new User("user", UserArgs.builder()
            .name("user")
            .instance(instance.name())
            .password(pwd.result())
            .build());

        var connection = new Connection("connection", ConnectionArgs.builder()
            .connectionId("my-connection")
            .location("US")
            .friendlyName("👋")
            .description("a riveting description")
            .cloudSql(ConnectionCloudSqlArgs.builder()
                .instanceId(instance.connectionName())
                .database(db.name())
                .type("POSTGRES")
                .credential(ConnectionCloudSqlCredentialArgs.builder()
                    .username(user.name())
                    .password(user.password())
                    .build())
                .build())
            .build());

    }
}
Copy
resources:
  instance:
    type: gcp:sql:DatabaseInstance
    properties:
      name: my-database-instance
      databaseVersion: POSTGRES_11
      region: us-central1
      settings:
        tier: db-f1-micro
      deletionProtection: true
  db:
    type: gcp:sql:Database
    properties:
      instance: ${instance.name}
      name: db
  pwd:
    type: random:RandomPassword
    properties:
      length: 16
      special: false
  user:
    type: gcp:sql:User
    properties:
      name: user
      instance: ${instance.name}
      password: ${pwd.result}
  connection:
    type: gcp:bigquery:Connection
    properties:
      connectionId: my-connection
      location: US
      friendlyName: "\U0001F44B"
      description: a riveting description
      cloudSql:
        instanceId: ${instance.connectionName}
        database: ${db.name}
        type: POSTGRES
        credential:
          username: ${user.name}
          password: ${user.password}
Copy

Bigquery Connection Aws

import * as pulumi from "@pulumi/pulumi";
import * as gcp from "@pulumi/gcp";

const connection = new gcp.bigquery.Connection("connection", {
    connectionId: "my-connection",
    location: "aws-us-east-1",
    friendlyName: "👋",
    description: "a riveting description",
    aws: {
        accessRole: {
            iamRoleId: "arn:aws:iam::999999999999:role/omnirole",
        },
    },
});
Copy
import pulumi
import pulumi_gcp as gcp

connection = gcp.bigquery.Connection("connection",
    connection_id="my-connection",
    location="aws-us-east-1",
    friendly_name="👋",
    description="a riveting description",
    aws={
        "access_role": {
            "iam_role_id": "arn:aws:iam::999999999999:role/omnirole",
        },
    })
Copy
package main

import (
	"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/bigquery"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)

func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		_, err := bigquery.NewConnection(ctx, "connection", &bigquery.ConnectionArgs{
			ConnectionId: pulumi.String("my-connection"),
			Location:     pulumi.String("aws-us-east-1"),
			FriendlyName: pulumi.String("👋"),
			Description:  pulumi.String("a riveting description"),
			Aws: &bigquery.ConnectionAwsArgs{
				AccessRole: &bigquery.ConnectionAwsAccessRoleArgs{
					IamRoleId: pulumi.String("arn:aws:iam::999999999999:role/omnirole"),
				},
			},
		})
		if err != nil {
			return err
		}
		return nil
	})
}
Copy
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Gcp = Pulumi.Gcp;

return await Deployment.RunAsync(() => 
{
    var connection = new Gcp.BigQuery.Connection("connection", new()
    {
        ConnectionId = "my-connection",
        Location = "aws-us-east-1",
        FriendlyName = "👋",
        Description = "a riveting description",
        Aws = new Gcp.BigQuery.Inputs.ConnectionAwsArgs
        {
            AccessRole = new Gcp.BigQuery.Inputs.ConnectionAwsAccessRoleArgs
            {
                IamRoleId = "arn:aws:iam::999999999999:role/omnirole",
            },
        },
    });

});
Copy
package generated_program;

import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.bigquery.Connection;
import com.pulumi.gcp.bigquery.ConnectionArgs;
import com.pulumi.gcp.bigquery.inputs.ConnectionAwsArgs;
import com.pulumi.gcp.bigquery.inputs.ConnectionAwsAccessRoleArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;

public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }

    public static void stack(Context ctx) {
        var connection = new Connection("connection", ConnectionArgs.builder()
            .connectionId("my-connection")
            .location("aws-us-east-1")
            .friendlyName("👋")
            .description("a riveting description")
            .aws(ConnectionAwsArgs.builder()
                .accessRole(ConnectionAwsAccessRoleArgs.builder()
                    .iamRoleId("arn:aws:iam::999999999999:role/omnirole")
                    .build())
                .build())
            .build());

    }
}
Copy
resources:
  connection:
    type: gcp:bigquery:Connection
    properties:
      connectionId: my-connection
      location: aws-us-east-1
      friendlyName: "\U0001F44B"
      description: a riveting description
      aws:
        accessRole:
          iamRoleId: arn:aws:iam::999999999999:role/omnirole
Copy

Bigquery Connection Azure

import * as pulumi from "@pulumi/pulumi";
import * as gcp from "@pulumi/gcp";

const connection = new gcp.bigquery.Connection("connection", {
    connectionId: "my-connection",
    location: "azure-eastus2",
    friendlyName: "👋",
    description: "a riveting description",
    azure: {
        customerTenantId: "customer-tenant-id",
        federatedApplicationClientId: "b43eeeee-eeee-eeee-eeee-a480155501ce",
    },
});
Copy
import pulumi
import pulumi_gcp as gcp

connection = gcp.bigquery.Connection("connection",
    connection_id="my-connection",
    location="azure-eastus2",
    friendly_name="👋",
    description="a riveting description",
    azure={
        "customer_tenant_id": "customer-tenant-id",
        "federated_application_client_id": "b43eeeee-eeee-eeee-eeee-a480155501ce",
    })
Copy
package main

import (
	"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/bigquery"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)

func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		_, err := bigquery.NewConnection(ctx, "connection", &bigquery.ConnectionArgs{
			ConnectionId: pulumi.String("my-connection"),
			Location:     pulumi.String("azure-eastus2"),
			FriendlyName: pulumi.String("👋"),
			Description:  pulumi.String("a riveting description"),
			Azure: &bigquery.ConnectionAzureArgs{
				CustomerTenantId:             pulumi.String("customer-tenant-id"),
				FederatedApplicationClientId: pulumi.String("b43eeeee-eeee-eeee-eeee-a480155501ce"),
			},
		})
		if err != nil {
			return err
		}
		return nil
	})
}
Copy
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Gcp = Pulumi.Gcp;

return await Deployment.RunAsync(() => 
{
    var connection = new Gcp.BigQuery.Connection("connection", new()
    {
        ConnectionId = "my-connection",
        Location = "azure-eastus2",
        FriendlyName = "👋",
        Description = "a riveting description",
        Azure = new Gcp.BigQuery.Inputs.ConnectionAzureArgs
        {
            CustomerTenantId = "customer-tenant-id",
            FederatedApplicationClientId = "b43eeeee-eeee-eeee-eeee-a480155501ce",
        },
    });

});
Copy
package generated_program;

import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.bigquery.Connection;
import com.pulumi.gcp.bigquery.ConnectionArgs;
import com.pulumi.gcp.bigquery.inputs.ConnectionAzureArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;

public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }

    public static void stack(Context ctx) {
        var connection = new Connection("connection", ConnectionArgs.builder()
            .connectionId("my-connection")
            .location("azure-eastus2")
            .friendlyName("👋")
            .description("a riveting description")
            .azure(ConnectionAzureArgs.builder()
                .customerTenantId("customer-tenant-id")
                .federatedApplicationClientId("b43eeeee-eeee-eeee-eeee-a480155501ce")
                .build())
            .build());

    }
}
Copy
resources:
  connection:
    type: gcp:bigquery:Connection
    properties:
      connectionId: my-connection
      location: azure-eastus2
      friendlyName: "\U0001F44B"
      description: a riveting description
      azure:
        customerTenantId: customer-tenant-id
        federatedApplicationClientId: b43eeeee-eeee-eeee-eeee-a480155501ce
Copy

Bigquery Connection Cloudspanner

import * as pulumi from "@pulumi/pulumi";
import * as gcp from "@pulumi/gcp";

const connection = new gcp.bigquery.Connection("connection", {
    connectionId: "my-connection",
    location: "US",
    friendlyName: "👋",
    description: "a riveting description",
    cloudSpanner: {
        database: "projects/project/instances/instance/databases/database",
        databaseRole: "database_role",
    },
});
Copy
import pulumi
import pulumi_gcp as gcp

connection = gcp.bigquery.Connection("connection",
    connection_id="my-connection",
    location="US",
    friendly_name="👋",
    description="a riveting description",
    cloud_spanner={
        "database": "projects/project/instances/instance/databases/database",
        "database_role": "database_role",
    })
Copy
package main

import (
	"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/bigquery"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)

func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		_, err := bigquery.NewConnection(ctx, "connection", &bigquery.ConnectionArgs{
			ConnectionId: pulumi.String("my-connection"),
			Location:     pulumi.String("US"),
			FriendlyName: pulumi.String("👋"),
			Description:  pulumi.String("a riveting description"),
			CloudSpanner: &bigquery.ConnectionCloudSpannerArgs{
				Database:     pulumi.String("projects/project/instances/instance/databases/database"),
				DatabaseRole: pulumi.String("database_role"),
			},
		})
		if err != nil {
			return err
		}
		return nil
	})
}
Copy
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Gcp = Pulumi.Gcp;

return await Deployment.RunAsync(() => 
{
    var connection = new Gcp.BigQuery.Connection("connection", new()
    {
        ConnectionId = "my-connection",
        Location = "US",
        FriendlyName = "👋",
        Description = "a riveting description",
        CloudSpanner = new Gcp.BigQuery.Inputs.ConnectionCloudSpannerArgs
        {
            Database = "projects/project/instances/instance/databases/database",
            DatabaseRole = "database_role",
        },
    });

});
Copy
package generated_program;

import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.bigquery.Connection;
import com.pulumi.gcp.bigquery.ConnectionArgs;
import com.pulumi.gcp.bigquery.inputs.ConnectionCloudSpannerArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;

public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }

    public static void stack(Context ctx) {
        var connection = new Connection("connection", ConnectionArgs.builder()
            .connectionId("my-connection")
            .location("US")
            .friendlyName("👋")
            .description("a riveting description")
            .cloudSpanner(ConnectionCloudSpannerArgs.builder()
                .database("projects/project/instances/instance/databases/database")
                .databaseRole("database_role")
                .build())
            .build());

    }
}
Copy
resources:
  connection:
    type: gcp:bigquery:Connection
    properties:
      connectionId: my-connection
      location: US
      friendlyName: "\U0001F44B"
      description: a riveting description
      cloudSpanner:
        database: projects/project/instances/instance/databases/database
        databaseRole: database_role
Copy

Bigquery Connection Cloudspanner Databoost

import * as pulumi from "@pulumi/pulumi";
import * as gcp from "@pulumi/gcp";

const connection = new gcp.bigquery.Connection("connection", {
    connectionId: "my-connection",
    location: "US",
    friendlyName: "👋",
    description: "a riveting description",
    cloudSpanner: {
        database: "projects/project/instances/instance/databases/database",
        useParallelism: true,
        useDataBoost: true,
        maxParallelism: 100,
    },
});
Copy
import pulumi
import pulumi_gcp as gcp

connection = gcp.bigquery.Connection("connection",
    connection_id="my-connection",
    location="US",
    friendly_name="👋",
    description="a riveting description",
    cloud_spanner={
        "database": "projects/project/instances/instance/databases/database",
        "use_parallelism": True,
        "use_data_boost": True,
        "max_parallelism": 100,
    })
Copy
package main

import (
	"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/bigquery"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)

func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		_, err := bigquery.NewConnection(ctx, "connection", &bigquery.ConnectionArgs{
			ConnectionId: pulumi.String("my-connection"),
			Location:     pulumi.String("US"),
			FriendlyName: pulumi.String("👋"),
			Description:  pulumi.String("a riveting description"),
			CloudSpanner: &bigquery.ConnectionCloudSpannerArgs{
				Database:       pulumi.String("projects/project/instances/instance/databases/database"),
				UseParallelism: pulumi.Bool(true),
				UseDataBoost:   pulumi.Bool(true),
				MaxParallelism: pulumi.Int(100),
			},
		})
		if err != nil {
			return err
		}
		return nil
	})
}
Copy
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Gcp = Pulumi.Gcp;

return await Deployment.RunAsync(() => 
{
    var connection = new Gcp.BigQuery.Connection("connection", new()
    {
        ConnectionId = "my-connection",
        Location = "US",
        FriendlyName = "👋",
        Description = "a riveting description",
        CloudSpanner = new Gcp.BigQuery.Inputs.ConnectionCloudSpannerArgs
        {
            Database = "projects/project/instances/instance/databases/database",
            UseParallelism = true,
            UseDataBoost = true,
            MaxParallelism = 100,
        },
    });

});
Copy
package generated_program;

import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.bigquery.Connection;
import com.pulumi.gcp.bigquery.ConnectionArgs;
import com.pulumi.gcp.bigquery.inputs.ConnectionCloudSpannerArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;

public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }

    public static void stack(Context ctx) {
        var connection = new Connection("connection", ConnectionArgs.builder()
            .connectionId("my-connection")
            .location("US")
            .friendlyName("👋")
            .description("a riveting description")
            .cloudSpanner(ConnectionCloudSpannerArgs.builder()
                .database("projects/project/instances/instance/databases/database")
                .useParallelism(true)
                .useDataBoost(true)
                .maxParallelism(100)
                .build())
            .build());

    }
}
Copy
resources:
  connection:
    type: gcp:bigquery:Connection
    properties:
      connectionId: my-connection
      location: US
      friendlyName: "\U0001F44B"
      description: a riveting description
      cloudSpanner:
        database: projects/project/instances/instance/databases/database
        useParallelism: true
        useDataBoost: true
        maxParallelism: 100
Copy

Bigquery Connection Spark

import * as pulumi from "@pulumi/pulumi";
import * as gcp from "@pulumi/gcp";

const basic = new gcp.dataproc.Cluster("basic", {
    name: "my-connection",
    region: "us-central1",
    clusterConfig: {
        softwareConfig: {
            overrideProperties: {
                "dataproc:dataproc.allow.zero.workers": "true",
            },
        },
        masterConfig: {
            numInstances: 1,
            machineType: "e2-standard-2",
            diskConfig: {
                bootDiskSizeGb: 35,
            },
        },
    },
});
const connection = new gcp.bigquery.Connection("connection", {
    connectionId: "my-connection",
    location: "US",
    friendlyName: "👋",
    description: "a riveting description",
    spark: {
        sparkHistoryServerConfig: {
            dataprocCluster: basic.id,
        },
    },
});
Copy
import pulumi
import pulumi_gcp as gcp

basic = gcp.dataproc.Cluster("basic",
    name="my-connection",
    region="us-central1",
    cluster_config={
        "software_config": {
            "override_properties": {
                "dataproc:dataproc.allow.zero.workers": "true",
            },
        },
        "master_config": {
            "num_instances": 1,
            "machine_type": "e2-standard-2",
            "disk_config": {
                "boot_disk_size_gb": 35,
            },
        },
    })
connection = gcp.bigquery.Connection("connection",
    connection_id="my-connection",
    location="US",
    friendly_name="👋",
    description="a riveting description",
    spark={
        "spark_history_server_config": {
            "dataproc_cluster": basic.id,
        },
    })
Copy
package main

import (
	"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/bigquery"
	"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/dataproc"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)

func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		basic, err := dataproc.NewCluster(ctx, "basic", &dataproc.ClusterArgs{
			Name:   pulumi.String("my-connection"),
			Region: pulumi.String("us-central1"),
			ClusterConfig: &dataproc.ClusterClusterConfigArgs{
				SoftwareConfig: &dataproc.ClusterClusterConfigSoftwareConfigArgs{
					OverrideProperties: pulumi.StringMap{
						"dataproc:dataproc.allow.zero.workers": pulumi.String("true"),
					},
				},
				MasterConfig: &dataproc.ClusterClusterConfigMasterConfigArgs{
					NumInstances: pulumi.Int(1),
					MachineType:  pulumi.String("e2-standard-2"),
					DiskConfig: &dataproc.ClusterClusterConfigMasterConfigDiskConfigArgs{
						BootDiskSizeGb: pulumi.Int(35),
					},
				},
			},
		})
		if err != nil {
			return err
		}
		_, err = bigquery.NewConnection(ctx, "connection", &bigquery.ConnectionArgs{
			ConnectionId: pulumi.String("my-connection"),
			Location:     pulumi.String("US"),
			FriendlyName: pulumi.String("👋"),
			Description:  pulumi.String("a riveting description"),
			Spark: &bigquery.ConnectionSparkArgs{
				SparkHistoryServerConfig: &bigquery.ConnectionSparkSparkHistoryServerConfigArgs{
					DataprocCluster: basic.ID(),
				},
			},
		})
		if err != nil {
			return err
		}
		return nil
	})
}
Copy
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Gcp = Pulumi.Gcp;

return await Deployment.RunAsync(() => 
{
    var basic = new Gcp.Dataproc.Cluster("basic", new()
    {
        Name = "my-connection",
        Region = "us-central1",
        ClusterConfig = new Gcp.Dataproc.Inputs.ClusterClusterConfigArgs
        {
            SoftwareConfig = new Gcp.Dataproc.Inputs.ClusterClusterConfigSoftwareConfigArgs
            {
                OverrideProperties = 
                {
                    { "dataproc:dataproc.allow.zero.workers", "true" },
                },
            },
            MasterConfig = new Gcp.Dataproc.Inputs.ClusterClusterConfigMasterConfigArgs
            {
                NumInstances = 1,
                MachineType = "e2-standard-2",
                DiskConfig = new Gcp.Dataproc.Inputs.ClusterClusterConfigMasterConfigDiskConfigArgs
                {
                    BootDiskSizeGb = 35,
                },
            },
        },
    });

    var connection = new Gcp.BigQuery.Connection("connection", new()
    {
        ConnectionId = "my-connection",
        Location = "US",
        FriendlyName = "👋",
        Description = "a riveting description",
        Spark = new Gcp.BigQuery.Inputs.ConnectionSparkArgs
        {
            SparkHistoryServerConfig = new Gcp.BigQuery.Inputs.ConnectionSparkSparkHistoryServerConfigArgs
            {
                DataprocCluster = basic.Id,
            },
        },
    });

});
Copy
package generated_program;

import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.dataproc.Cluster;
import com.pulumi.gcp.dataproc.ClusterArgs;
import com.pulumi.gcp.dataproc.inputs.ClusterClusterConfigArgs;
import com.pulumi.gcp.dataproc.inputs.ClusterClusterConfigSoftwareConfigArgs;
import com.pulumi.gcp.dataproc.inputs.ClusterClusterConfigMasterConfigArgs;
import com.pulumi.gcp.dataproc.inputs.ClusterClusterConfigMasterConfigDiskConfigArgs;
import com.pulumi.gcp.bigquery.Connection;
import com.pulumi.gcp.bigquery.ConnectionArgs;
import com.pulumi.gcp.bigquery.inputs.ConnectionSparkArgs;
import com.pulumi.gcp.bigquery.inputs.ConnectionSparkSparkHistoryServerConfigArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;

public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }

    public static void stack(Context ctx) {
        var basic = new Cluster("basic", ClusterArgs.builder()
            .name("my-connection")
            .region("us-central1")
            .clusterConfig(ClusterClusterConfigArgs.builder()
                .softwareConfig(ClusterClusterConfigSoftwareConfigArgs.builder()
                    .overrideProperties(Map.of("dataproc:dataproc.allow.zero.workers", "true"))
                    .build())
                .masterConfig(ClusterClusterConfigMasterConfigArgs.builder()
                    .numInstances(1)
                    .machineType("e2-standard-2")
                    .diskConfig(ClusterClusterConfigMasterConfigDiskConfigArgs.builder()
                        .bootDiskSizeGb(35)
                        .build())
                    .build())
                .build())
            .build());

        var connection = new Connection("connection", ConnectionArgs.builder()
            .connectionId("my-connection")
            .location("US")
            .friendlyName("👋")
            .description("a riveting description")
            .spark(ConnectionSparkArgs.builder()
                .sparkHistoryServerConfig(ConnectionSparkSparkHistoryServerConfigArgs.builder()
                    .dataprocCluster(basic.id())
                    .build())
                .build())
            .build());

    }
}
Copy
resources:
  connection:
    type: gcp:bigquery:Connection
    properties:
      connectionId: my-connection
      location: US
      friendlyName: "\U0001F44B"
      description: a riveting description
      spark:
        sparkHistoryServerConfig:
          dataprocCluster: ${basic.id}
  basic:
    type: gcp:dataproc:Cluster
    properties:
      name: my-connection
      region: us-central1
      clusterConfig:
        softwareConfig:
          overrideProperties:
            dataproc:dataproc.allow.zero.workers: 'true'
        masterConfig:
          numInstances: 1
          machineType: e2-standard-2
          diskConfig:
            bootDiskSizeGb: 35
Copy

Bigquery Connection Sql With Cmek

import * as pulumi from "@pulumi/pulumi";
import * as gcp from "@pulumi/gcp";

const instance = new gcp.sql.DatabaseInstance("instance", {
    name: "my-database-instance",
    region: "us-central1",
    databaseVersion: "POSTGRES_11",
    settings: {
        tier: "db-f1-micro",
    },
    deletionProtection: true,
});
const db = new gcp.sql.Database("db", {
    instance: instance.name,
    name: "db",
});
const user = new gcp.sql.User("user", {
    name: "user",
    instance: instance.name,
    password: "tf-test-my-password_77884",
});
const bqSa = gcp.bigquery.getDefaultServiceAccount({});
const keySaUser = new gcp.kms.CryptoKeyIAMMember("key_sa_user", {
    cryptoKeyId: "projects/project/locations/us-central1/keyRings/us-central1/cryptoKeys/bq-key",
    role: "roles/cloudkms.cryptoKeyEncrypterDecrypter",
    member: bqSa.then(bqSa => `serviceAccount:${bqSa.email}`),
});
const bq_connection_cmek = new gcp.bigquery.Connection("bq-connection-cmek", {
    friendlyName: "👋",
    description: "a riveting description",
    location: "US",
    kmsKeyName: "projects/project/locations/us-central1/keyRings/us-central1/cryptoKeys/bq-key",
    cloudSql: {
        instanceId: instance.connectionName,
        database: db.name,
        type: "POSTGRES",
        credential: {
            username: user.name,
            password: user.password,
        },
    },
}, {
    dependsOn: [keySaUser],
});
Copy
import pulumi
import pulumi_gcp as gcp

instance = gcp.sql.DatabaseInstance("instance",
    name="my-database-instance",
    region="us-central1",
    database_version="POSTGRES_11",
    settings={
        "tier": "db-f1-micro",
    },
    deletion_protection=True)
db = gcp.sql.Database("db",
    instance=instance.name,
    name="db")
user = gcp.sql.User("user",
    name="user",
    instance=instance.name,
    password="tf-test-my-password_77884")
bq_sa = gcp.bigquery.get_default_service_account()
key_sa_user = gcp.kms.CryptoKeyIAMMember("key_sa_user",
    crypto_key_id="projects/project/locations/us-central1/keyRings/us-central1/cryptoKeys/bq-key",
    role="roles/cloudkms.cryptoKeyEncrypterDecrypter",
    member=f"serviceAccount:{bq_sa.email}")
bq_connection_cmek = gcp.bigquery.Connection("bq-connection-cmek",
    friendly_name="👋",
    description="a riveting description",
    location="US",
    kms_key_name="projects/project/locations/us-central1/keyRings/us-central1/cryptoKeys/bq-key",
    cloud_sql={
        "instance_id": instance.connection_name,
        "database": db.name,
        "type": "POSTGRES",
        "credential": {
            "username": user.name,
            "password": user.password,
        },
    },
    opts = pulumi.ResourceOptions(depends_on=[key_sa_user]))
Copy
package main

import (
	"fmt"

	"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/bigquery"
	"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/kms"
	"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/sql"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)

func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		instance, err := sql.NewDatabaseInstance(ctx, "instance", &sql.DatabaseInstanceArgs{
			Name:            pulumi.String("my-database-instance"),
			Region:          pulumi.String("us-central1"),
			DatabaseVersion: pulumi.String("POSTGRES_11"),
			Settings: &sql.DatabaseInstanceSettingsArgs{
				Tier: pulumi.String("db-f1-micro"),
			},
			DeletionProtection: pulumi.Bool(true),
		})
		if err != nil {
			return err
		}
		db, err := sql.NewDatabase(ctx, "db", &sql.DatabaseArgs{
			Instance: instance.Name,
			Name:     pulumi.String("db"),
		})
		if err != nil {
			return err
		}
		user, err := sql.NewUser(ctx, "user", &sql.UserArgs{
			Name:     pulumi.String("user"),
			Instance: instance.Name,
			Password: pulumi.String("tf-test-my-password_77884"),
		})
		if err != nil {
			return err
		}
		bqSa, err := bigquery.GetDefaultServiceAccount(ctx, &bigquery.GetDefaultServiceAccountArgs{}, nil)
		if err != nil {
			return err
		}
		keySaUser, err := kms.NewCryptoKeyIAMMember(ctx, "key_sa_user", &kms.CryptoKeyIAMMemberArgs{
			CryptoKeyId: pulumi.String("projects/project/locations/us-central1/keyRings/us-central1/cryptoKeys/bq-key"),
			Role:        pulumi.String("roles/cloudkms.cryptoKeyEncrypterDecrypter"),
			Member:      pulumi.Sprintf("serviceAccount:%v", bqSa.Email),
		})
		if err != nil {
			return err
		}
		_, err = bigquery.NewConnection(ctx, "bq-connection-cmek", &bigquery.ConnectionArgs{
			FriendlyName: pulumi.String("👋"),
			Description:  pulumi.String("a riveting description"),
			Location:     pulumi.String("US"),
			KmsKeyName:   pulumi.String("projects/project/locations/us-central1/keyRings/us-central1/cryptoKeys/bq-key"),
			CloudSql: &bigquery.ConnectionCloudSqlArgs{
				InstanceId: instance.ConnectionName,
				Database:   db.Name,
				Type:       pulumi.String("POSTGRES"),
				Credential: &bigquery.ConnectionCloudSqlCredentialArgs{
					Username: user.Name,
					Password: user.Password,
				},
			},
		}, pulumi.DependsOn([]pulumi.Resource{
			keySaUser,
		}))
		if err != nil {
			return err
		}
		return nil
	})
}
Copy
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Gcp = Pulumi.Gcp;

return await Deployment.RunAsync(() => 
{
    var instance = new Gcp.Sql.DatabaseInstance("instance", new()
    {
        Name = "my-database-instance",
        Region = "us-central1",
        DatabaseVersion = "POSTGRES_11",
        Settings = new Gcp.Sql.Inputs.DatabaseInstanceSettingsArgs
        {
            Tier = "db-f1-micro",
        },
        DeletionProtection = true,
    });

    var db = new Gcp.Sql.Database("db", new()
    {
        Instance = instance.Name,
        Name = "db",
    });

    var user = new Gcp.Sql.User("user", new()
    {
        Name = "user",
        Instance = instance.Name,
        Password = "tf-test-my-password_77884",
    });

    var bqSa = Gcp.BigQuery.GetDefaultServiceAccount.Invoke();

    var keySaUser = new Gcp.Kms.CryptoKeyIAMMember("key_sa_user", new()
    {
        CryptoKeyId = "projects/project/locations/us-central1/keyRings/us-central1/cryptoKeys/bq-key",
        Role = "roles/cloudkms.cryptoKeyEncrypterDecrypter",
        Member = $"serviceAccount:{bqSa.Apply(getDefaultServiceAccountResult => getDefaultServiceAccountResult.Email)}",
    });

    var bq_connection_cmek = new Gcp.BigQuery.Connection("bq-connection-cmek", new()
    {
        FriendlyName = "👋",
        Description = "a riveting description",
        Location = "US",
        KmsKeyName = "projects/project/locations/us-central1/keyRings/us-central1/cryptoKeys/bq-key",
        CloudSql = new Gcp.BigQuery.Inputs.ConnectionCloudSqlArgs
        {
            InstanceId = instance.ConnectionName,
            Database = db.Name,
            Type = "POSTGRES",
            Credential = new Gcp.BigQuery.Inputs.ConnectionCloudSqlCredentialArgs
            {
                Username = user.Name,
                Password = user.Password,
            },
        },
    }, new CustomResourceOptions
    {
        DependsOn =
        {
            keySaUser,
        },
    });

});
Copy
package generated_program;

import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.sql.DatabaseInstance;
import com.pulumi.gcp.sql.DatabaseInstanceArgs;
import com.pulumi.gcp.sql.inputs.DatabaseInstanceSettingsArgs;
import com.pulumi.gcp.sql.Database;
import com.pulumi.gcp.sql.DatabaseArgs;
import com.pulumi.gcp.sql.User;
import com.pulumi.gcp.sql.UserArgs;
import com.pulumi.gcp.bigquery.BigqueryFunctions;
import com.pulumi.gcp.bigquery.inputs.GetDefaultServiceAccountArgs;
import com.pulumi.gcp.kms.CryptoKeyIAMMember;
import com.pulumi.gcp.kms.CryptoKeyIAMMemberArgs;
import com.pulumi.gcp.bigquery.Connection;
import com.pulumi.gcp.bigquery.ConnectionArgs;
import com.pulumi.gcp.bigquery.inputs.ConnectionCloudSqlArgs;
import com.pulumi.gcp.bigquery.inputs.ConnectionCloudSqlCredentialArgs;
import com.pulumi.resources.CustomResourceOptions;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;

public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }

    public static void stack(Context ctx) {
        var instance = new DatabaseInstance("instance", DatabaseInstanceArgs.builder()
            .name("my-database-instance")
            .region("us-central1")
            .databaseVersion("POSTGRES_11")
            .settings(DatabaseInstanceSettingsArgs.builder()
                .tier("db-f1-micro")
                .build())
            .deletionProtection(true)
            .build());

        var db = new Database("db", DatabaseArgs.builder()
            .instance(instance.name())
            .name("db")
            .build());

        var user = new User("user", UserArgs.builder()
            .name("user")
            .instance(instance.name())
            .password("tf-test-my-password_77884")
            .build());

        final var bqSa = BigqueryFunctions.getDefaultServiceAccount();

        var keySaUser = new CryptoKeyIAMMember("keySaUser", CryptoKeyIAMMemberArgs.builder()
            .cryptoKeyId("projects/project/locations/us-central1/keyRings/us-central1/cryptoKeys/bq-key")
            .role("roles/cloudkms.cryptoKeyEncrypterDecrypter")
            .member(String.format("serviceAccount:%s", bqSa.applyValue(getDefaultServiceAccountResult -> getDefaultServiceAccountResult.email())))
            .build());

        var bq_connection_cmek = new Connection("bq-connection-cmek", ConnectionArgs.builder()
            .friendlyName("👋")
            .description("a riveting description")
            .location("US")
            .kmsKeyName("projects/project/locations/us-central1/keyRings/us-central1/cryptoKeys/bq-key")
            .cloudSql(ConnectionCloudSqlArgs.builder()
                .instanceId(instance.connectionName())
                .database(db.name())
                .type("POSTGRES")
                .credential(ConnectionCloudSqlCredentialArgs.builder()
                    .username(user.name())
                    .password(user.password())
                    .build())
                .build())
            .build(), CustomResourceOptions.builder()
                .dependsOn(keySaUser)
                .build());

    }
}
Copy
resources:
  instance:
    type: gcp:sql:DatabaseInstance
    properties:
      name: my-database-instance
      region: us-central1
      databaseVersion: POSTGRES_11
      settings:
        tier: db-f1-micro
      deletionProtection: true
  db:
    type: gcp:sql:Database
    properties:
      instance: ${instance.name}
      name: db
  user:
    type: gcp:sql:User
    properties:
      name: user
      instance: ${instance.name}
      password: tf-test-my-password_77884
  keySaUser:
    type: gcp:kms:CryptoKeyIAMMember
    name: key_sa_user
    properties:
      cryptoKeyId: projects/project/locations/us-central1/keyRings/us-central1/cryptoKeys/bq-key
      role: roles/cloudkms.cryptoKeyEncrypterDecrypter
      member: serviceAccount:${bqSa.email}
  bq-connection-cmek:
    type: gcp:bigquery:Connection
    properties:
      friendlyName: "\U0001F44B"
      description: a riveting description
      location: US
      kmsKeyName: projects/project/locations/us-central1/keyRings/us-central1/cryptoKeys/bq-key
      cloudSql:
        instanceId: ${instance.connectionName}
        database: ${db.name}
        type: POSTGRES
        credential:
          username: ${user.name}
          password: ${user.password}
    options:
      dependsOn:
        - ${keySaUser}
variables:
  bqSa:
    fn::invoke:
      function: gcp:bigquery:getDefaultServiceAccount
      arguments: {}
Copy

Create Connection Resource

Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.

Constructor syntax

new Connection(name: string, args?: ConnectionArgs, opts?: CustomResourceOptions);
@overload
def Connection(resource_name: str,
               args: Optional[ConnectionArgs] = None,
               opts: Optional[ResourceOptions] = None)

@overload
def Connection(resource_name: str,
               opts: Optional[ResourceOptions] = None,
               aws: Optional[ConnectionAwsArgs] = None,
               azure: Optional[ConnectionAzureArgs] = None,
               cloud_resource: Optional[ConnectionCloudResourceArgs] = None,
               cloud_spanner: Optional[ConnectionCloudSpannerArgs] = None,
               cloud_sql: Optional[ConnectionCloudSqlArgs] = None,
               connection_id: Optional[str] = None,
               description: Optional[str] = None,
               friendly_name: Optional[str] = None,
               kms_key_name: Optional[str] = None,
               location: Optional[str] = None,
               project: Optional[str] = None,
               spark: Optional[ConnectionSparkArgs] = None)
func NewConnection(ctx *Context, name string, args *ConnectionArgs, opts ...ResourceOption) (*Connection, error)
public Connection(string name, ConnectionArgs? args = null, CustomResourceOptions? opts = null)
public Connection(String name, ConnectionArgs args)
public Connection(String name, ConnectionArgs args, CustomResourceOptions options)
type: gcp:bigquery:Connection
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.

Parameters

name This property is required. string
The unique name of the resource.
args ConnectionArgs
The arguments to resource properties.
opts CustomResourceOptions
Bag of options to control resource's behavior.
resource_name This property is required. str
The unique name of the resource.
args ConnectionArgs
The arguments to resource properties.
opts ResourceOptions
Bag of options to control resource's behavior.
ctx Context
Context object for the current deployment.
name This property is required. string
The unique name of the resource.
args ConnectionArgs
The arguments to resource properties.
opts ResourceOption
Bag of options to control resource's behavior.
name This property is required. string
The unique name of the resource.
args ConnectionArgs
The arguments to resource properties.
opts CustomResourceOptions
Bag of options to control resource's behavior.
name This property is required. String
The unique name of the resource.
args This property is required. ConnectionArgs
The arguments to resource properties.
options CustomResourceOptions
Bag of options to control resource's behavior.

Constructor example

The following reference example uses placeholder values for all input properties.

var connectionResource = new Gcp.BigQuery.Connection("connectionResource", new()
{
    Aws = new Gcp.BigQuery.Inputs.ConnectionAwsArgs
    {
        AccessRole = new Gcp.BigQuery.Inputs.ConnectionAwsAccessRoleArgs
        {
            IamRoleId = "string",
            Identity = "string",
        },
    },
    Azure = new Gcp.BigQuery.Inputs.ConnectionAzureArgs
    {
        CustomerTenantId = "string",
        Application = "string",
        ClientId = "string",
        FederatedApplicationClientId = "string",
        Identity = "string",
        ObjectId = "string",
        RedirectUri = "string",
    },
    CloudResource = new Gcp.BigQuery.Inputs.ConnectionCloudResourceArgs
    {
        ServiceAccountId = "string",
    },
    CloudSpanner = new Gcp.BigQuery.Inputs.ConnectionCloudSpannerArgs
    {
        Database = "string",
        DatabaseRole = "string",
        MaxParallelism = 0,
        UseDataBoost = false,
        UseParallelism = false,
    },
    CloudSql = new Gcp.BigQuery.Inputs.ConnectionCloudSqlArgs
    {
        Credential = new Gcp.BigQuery.Inputs.ConnectionCloudSqlCredentialArgs
        {
            Password = "string",
            Username = "string",
        },
        Database = "string",
        InstanceId = "string",
        Type = "string",
        ServiceAccountId = "string",
    },
    ConnectionId = "string",
    Description = "string",
    FriendlyName = "string",
    KmsKeyName = "string",
    Location = "string",
    Project = "string",
    Spark = new Gcp.BigQuery.Inputs.ConnectionSparkArgs
    {
        MetastoreServiceConfig = new Gcp.BigQuery.Inputs.ConnectionSparkMetastoreServiceConfigArgs
        {
            MetastoreService = "string",
        },
        ServiceAccountId = "string",
        SparkHistoryServerConfig = new Gcp.BigQuery.Inputs.ConnectionSparkSparkHistoryServerConfigArgs
        {
            DataprocCluster = "string",
        },
    },
});
Copy
example, err := bigquery.NewConnection(ctx, "connectionResource", &bigquery.ConnectionArgs{
	Aws: &bigquery.ConnectionAwsArgs{
		AccessRole: &bigquery.ConnectionAwsAccessRoleArgs{
			IamRoleId: pulumi.String("string"),
			Identity:  pulumi.String("string"),
		},
	},
	Azure: &bigquery.ConnectionAzureArgs{
		CustomerTenantId:             pulumi.String("string"),
		Application:                  pulumi.String("string"),
		ClientId:                     pulumi.String("string"),
		FederatedApplicationClientId: pulumi.String("string"),
		Identity:                     pulumi.String("string"),
		ObjectId:                     pulumi.String("string"),
		RedirectUri:                  pulumi.String("string"),
	},
	CloudResource: &bigquery.ConnectionCloudResourceArgs{
		ServiceAccountId: pulumi.String("string"),
	},
	CloudSpanner: &bigquery.ConnectionCloudSpannerArgs{
		Database:       pulumi.String("string"),
		DatabaseRole:   pulumi.String("string"),
		MaxParallelism: pulumi.Int(0),
		UseDataBoost:   pulumi.Bool(false),
		UseParallelism: pulumi.Bool(false),
	},
	CloudSql: &bigquery.ConnectionCloudSqlArgs{
		Credential: &bigquery.ConnectionCloudSqlCredentialArgs{
			Password: pulumi.String("string"),
			Username: pulumi.String("string"),
		},
		Database:         pulumi.String("string"),
		InstanceId:       pulumi.String("string"),
		Type:             pulumi.String("string"),
		ServiceAccountId: pulumi.String("string"),
	},
	ConnectionId: pulumi.String("string"),
	Description:  pulumi.String("string"),
	FriendlyName: pulumi.String("string"),
	KmsKeyName:   pulumi.String("string"),
	Location:     pulumi.String("string"),
	Project:      pulumi.String("string"),
	Spark: &bigquery.ConnectionSparkArgs{
		MetastoreServiceConfig: &bigquery.ConnectionSparkMetastoreServiceConfigArgs{
			MetastoreService: pulumi.String("string"),
		},
		ServiceAccountId: pulumi.String("string"),
		SparkHistoryServerConfig: &bigquery.ConnectionSparkSparkHistoryServerConfigArgs{
			DataprocCluster: pulumi.String("string"),
		},
	},
})
Copy
var connectionResource = new Connection("connectionResource", ConnectionArgs.builder()
    .aws(ConnectionAwsArgs.builder()
        .accessRole(ConnectionAwsAccessRoleArgs.builder()
            .iamRoleId("string")
            .identity("string")
            .build())
        .build())
    .azure(ConnectionAzureArgs.builder()
        .customerTenantId("string")
        .application("string")
        .clientId("string")
        .federatedApplicationClientId("string")
        .identity("string")
        .objectId("string")
        .redirectUri("string")
        .build())
    .cloudResource(ConnectionCloudResourceArgs.builder()
        .serviceAccountId("string")
        .build())
    .cloudSpanner(ConnectionCloudSpannerArgs.builder()
        .database("string")
        .databaseRole("string")
        .maxParallelism(0)
        .useDataBoost(false)
        .useParallelism(false)
        .build())
    .cloudSql(ConnectionCloudSqlArgs.builder()
        .credential(ConnectionCloudSqlCredentialArgs.builder()
            .password("string")
            .username("string")
            .build())
        .database("string")
        .instanceId("string")
        .type("string")
        .serviceAccountId("string")
        .build())
    .connectionId("string")
    .description("string")
    .friendlyName("string")
    .kmsKeyName("string")
    .location("string")
    .project("string")
    .spark(ConnectionSparkArgs.builder()
        .metastoreServiceConfig(ConnectionSparkMetastoreServiceConfigArgs.builder()
            .metastoreService("string")
            .build())
        .serviceAccountId("string")
        .sparkHistoryServerConfig(ConnectionSparkSparkHistoryServerConfigArgs.builder()
            .dataprocCluster("string")
            .build())
        .build())
    .build());
Copy
connection_resource = gcp.bigquery.Connection("connectionResource",
    aws={
        "access_role": {
            "iam_role_id": "string",
            "identity": "string",
        },
    },
    azure={
        "customer_tenant_id": "string",
        "application": "string",
        "client_id": "string",
        "federated_application_client_id": "string",
        "identity": "string",
        "object_id": "string",
        "redirect_uri": "string",
    },
    cloud_resource={
        "service_account_id": "string",
    },
    cloud_spanner={
        "database": "string",
        "database_role": "string",
        "max_parallelism": 0,
        "use_data_boost": False,
        "use_parallelism": False,
    },
    cloud_sql={
        "credential": {
            "password": "string",
            "username": "string",
        },
        "database": "string",
        "instance_id": "string",
        "type": "string",
        "service_account_id": "string",
    },
    connection_id="string",
    description="string",
    friendly_name="string",
    kms_key_name="string",
    location="string",
    project="string",
    spark={
        "metastore_service_config": {
            "metastore_service": "string",
        },
        "service_account_id": "string",
        "spark_history_server_config": {
            "dataproc_cluster": "string",
        },
    })
Copy
const connectionResource = new gcp.bigquery.Connection("connectionResource", {
    aws: {
        accessRole: {
            iamRoleId: "string",
            identity: "string",
        },
    },
    azure: {
        customerTenantId: "string",
        application: "string",
        clientId: "string",
        federatedApplicationClientId: "string",
        identity: "string",
        objectId: "string",
        redirectUri: "string",
    },
    cloudResource: {
        serviceAccountId: "string",
    },
    cloudSpanner: {
        database: "string",
        databaseRole: "string",
        maxParallelism: 0,
        useDataBoost: false,
        useParallelism: false,
    },
    cloudSql: {
        credential: {
            password: "string",
            username: "string",
        },
        database: "string",
        instanceId: "string",
        type: "string",
        serviceAccountId: "string",
    },
    connectionId: "string",
    description: "string",
    friendlyName: "string",
    kmsKeyName: "string",
    location: "string",
    project: "string",
    spark: {
        metastoreServiceConfig: {
            metastoreService: "string",
        },
        serviceAccountId: "string",
        sparkHistoryServerConfig: {
            dataprocCluster: "string",
        },
    },
});
Copy
type: gcp:bigquery:Connection
properties:
    aws:
        accessRole:
            iamRoleId: string
            identity: string
    azure:
        application: string
        clientId: string
        customerTenantId: string
        federatedApplicationClientId: string
        identity: string
        objectId: string
        redirectUri: string
    cloudResource:
        serviceAccountId: string
    cloudSpanner:
        database: string
        databaseRole: string
        maxParallelism: 0
        useDataBoost: false
        useParallelism: false
    cloudSql:
        credential:
            password: string
            username: string
        database: string
        instanceId: string
        serviceAccountId: string
        type: string
    connectionId: string
    description: string
    friendlyName: string
    kmsKeyName: string
    location: string
    project: string
    spark:
        metastoreServiceConfig:
            metastoreService: string
        serviceAccountId: string
        sparkHistoryServerConfig:
            dataprocCluster: string
Copy

Connection Resource Properties

To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.

Inputs

In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.

The Connection resource accepts the following input properties:

Aws ConnectionAws
Connection properties specific to Amazon Web Services. Structure is documented below.
Azure ConnectionAzure
Container for connection properties specific to Azure. Structure is documented below.
CloudResource ConnectionCloudResource
Container for connection properties for delegation of access to GCP resources. Structure is documented below.
CloudSpanner ConnectionCloudSpanner
Connection properties specific to Cloud Spanner Structure is documented below.
CloudSql ConnectionCloudSql
Connection properties specific to the Cloud SQL. Structure is documented below.
ConnectionId Changes to this property will trigger replacement. string
Optional connection id that should be assigned to the created connection.
Description string
A descriptive description for the connection
FriendlyName string
A descriptive name for the connection
KmsKeyName string
Optional. The Cloud KMS key that is used for encryption. Example: projects/[kms_project_id]/locations/[region]/keyRings/[key_region]/cryptoKeys/[key]
Location Changes to this property will trigger replacement. string
The geographic location where the connection should reside. Cloud SQL instance must be in the same location as the connection with following exceptions: Cloud SQL us-central1 maps to BigQuery US, Cloud SQL europe-west1 maps to BigQuery EU. Examples: US, EU, asia-northeast1, us-central1, europe-west1. Spanner Connections same as spanner region AWS allowed regions are aws-us-east-1 Azure allowed regions are azure-eastus2
Project Changes to this property will trigger replacement. string
The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
Spark ConnectionSpark
Container for connection properties to execute stored procedures for Apache Spark. resources. Structure is documented below.
Aws ConnectionAwsArgs
Connection properties specific to Amazon Web Services. Structure is documented below.
Azure ConnectionAzureArgs
Container for connection properties specific to Azure. Structure is documented below.
CloudResource ConnectionCloudResourceArgs
Container for connection properties for delegation of access to GCP resources. Structure is documented below.
CloudSpanner ConnectionCloudSpannerArgs
Connection properties specific to Cloud Spanner Structure is documented below.
CloudSql ConnectionCloudSqlArgs
Connection properties specific to the Cloud SQL. Structure is documented below.
ConnectionId Changes to this property will trigger replacement. string
Optional connection id that should be assigned to the created connection.
Description string
A descriptive description for the connection
FriendlyName string
A descriptive name for the connection
KmsKeyName string
Optional. The Cloud KMS key that is used for encryption. Example: projects/[kms_project_id]/locations/[region]/keyRings/[key_region]/cryptoKeys/[key]
Location Changes to this property will trigger replacement. string
The geographic location where the connection should reside. Cloud SQL instance must be in the same location as the connection with following exceptions: Cloud SQL us-central1 maps to BigQuery US, Cloud SQL europe-west1 maps to BigQuery EU. Examples: US, EU, asia-northeast1, us-central1, europe-west1. Spanner Connections same as spanner region AWS allowed regions are aws-us-east-1 Azure allowed regions are azure-eastus2
Project Changes to this property will trigger replacement. string
The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
Spark ConnectionSparkArgs
Container for connection properties to execute stored procedures for Apache Spark. resources. Structure is documented below.
aws ConnectionAws
Connection properties specific to Amazon Web Services. Structure is documented below.
azure ConnectionAzure
Container for connection properties specific to Azure. Structure is documented below.
cloudResource ConnectionCloudResource
Container for connection properties for delegation of access to GCP resources. Structure is documented below.
cloudSpanner ConnectionCloudSpanner
Connection properties specific to Cloud Spanner Structure is documented below.
cloudSql ConnectionCloudSql
Connection properties specific to the Cloud SQL. Structure is documented below.
connectionId Changes to this property will trigger replacement. String
Optional connection id that should be assigned to the created connection.
description String
A descriptive description for the connection
friendlyName String
A descriptive name for the connection
kmsKeyName String
Optional. The Cloud KMS key that is used for encryption. Example: projects/[kms_project_id]/locations/[region]/keyRings/[key_region]/cryptoKeys/[key]
location Changes to this property will trigger replacement. String
The geographic location where the connection should reside. Cloud SQL instance must be in the same location as the connection with following exceptions: Cloud SQL us-central1 maps to BigQuery US, Cloud SQL europe-west1 maps to BigQuery EU. Examples: US, EU, asia-northeast1, us-central1, europe-west1. Spanner Connections same as spanner region AWS allowed regions are aws-us-east-1 Azure allowed regions are azure-eastus2
project Changes to this property will trigger replacement. String
The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
spark ConnectionSpark
Container for connection properties to execute stored procedures for Apache Spark. resources. Structure is documented below.
aws ConnectionAws
Connection properties specific to Amazon Web Services. Structure is documented below.
azure ConnectionAzure
Container for connection properties specific to Azure. Structure is documented below.
cloudResource ConnectionCloudResource
Container for connection properties for delegation of access to GCP resources. Structure is documented below.
cloudSpanner ConnectionCloudSpanner
Connection properties specific to Cloud Spanner Structure is documented below.
cloudSql ConnectionCloudSql
Connection properties specific to the Cloud SQL. Structure is documented below.
connectionId Changes to this property will trigger replacement. string
Optional connection id that should be assigned to the created connection.
description string
A descriptive description for the connection
friendlyName string
A descriptive name for the connection
kmsKeyName string
Optional. The Cloud KMS key that is used for encryption. Example: projects/[kms_project_id]/locations/[region]/keyRings/[key_region]/cryptoKeys/[key]
location Changes to this property will trigger replacement. string
The geographic location where the connection should reside. Cloud SQL instance must be in the same location as the connection with following exceptions: Cloud SQL us-central1 maps to BigQuery US, Cloud SQL europe-west1 maps to BigQuery EU. Examples: US, EU, asia-northeast1, us-central1, europe-west1. Spanner Connections same as spanner region AWS allowed regions are aws-us-east-1 Azure allowed regions are azure-eastus2
project Changes to this property will trigger replacement. string
The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
spark ConnectionSpark
Container for connection properties to execute stored procedures for Apache Spark. resources. Structure is documented below.
aws ConnectionAwsArgs
Connection properties specific to Amazon Web Services. Structure is documented below.
azure ConnectionAzureArgs
Container for connection properties specific to Azure. Structure is documented below.
cloud_resource ConnectionCloudResourceArgs
Container for connection properties for delegation of access to GCP resources. Structure is documented below.
cloud_spanner ConnectionCloudSpannerArgs
Connection properties specific to Cloud Spanner Structure is documented below.
cloud_sql ConnectionCloudSqlArgs
Connection properties specific to the Cloud SQL. Structure is documented below.
connection_id Changes to this property will trigger replacement. str
Optional connection id that should be assigned to the created connection.
description str
A descriptive description for the connection
friendly_name str
A descriptive name for the connection
kms_key_name str
Optional. The Cloud KMS key that is used for encryption. Example: projects/[kms_project_id]/locations/[region]/keyRings/[key_region]/cryptoKeys/[key]
location Changes to this property will trigger replacement. str
The geographic location where the connection should reside. Cloud SQL instance must be in the same location as the connection with following exceptions: Cloud SQL us-central1 maps to BigQuery US, Cloud SQL europe-west1 maps to BigQuery EU. Examples: US, EU, asia-northeast1, us-central1, europe-west1. Spanner Connections same as spanner region AWS allowed regions are aws-us-east-1 Azure allowed regions are azure-eastus2
project Changes to this property will trigger replacement. str
The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
spark ConnectionSparkArgs
Container for connection properties to execute stored procedures for Apache Spark. resources. Structure is documented below.
aws Property Map
Connection properties specific to Amazon Web Services. Structure is documented below.
azure Property Map
Container for connection properties specific to Azure. Structure is documented below.
cloudResource Property Map
Container for connection properties for delegation of access to GCP resources. Structure is documented below.
cloudSpanner Property Map
Connection properties specific to Cloud Spanner Structure is documented below.
cloudSql Property Map
Connection properties specific to the Cloud SQL. Structure is documented below.
connectionId Changes to this property will trigger replacement. String
Optional connection id that should be assigned to the created connection.
description String
A descriptive description for the connection
friendlyName String
A descriptive name for the connection
kmsKeyName String
Optional. The Cloud KMS key that is used for encryption. Example: projects/[kms_project_id]/locations/[region]/keyRings/[key_region]/cryptoKeys/[key]
location Changes to this property will trigger replacement. String
The geographic location where the connection should reside. Cloud SQL instance must be in the same location as the connection with following exceptions: Cloud SQL us-central1 maps to BigQuery US, Cloud SQL europe-west1 maps to BigQuery EU. Examples: US, EU, asia-northeast1, us-central1, europe-west1. Spanner Connections same as spanner region AWS allowed regions are aws-us-east-1 Azure allowed regions are azure-eastus2
project Changes to this property will trigger replacement. String
The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
spark Property Map
Container for connection properties to execute stored procedures for Apache Spark. resources. Structure is documented below.

Outputs

All input properties are implicitly available as output properties. Additionally, the Connection resource produces the following output properties:

HasCredential bool
True if the connection has credential assigned.
Id string
The provider-assigned unique ID for this managed resource.
Name string
The resource name of the connection in the form of: "projects/{project_id}/locations/{location_id}/connections/{connectionId}"
HasCredential bool
True if the connection has credential assigned.
Id string
The provider-assigned unique ID for this managed resource.
Name string
The resource name of the connection in the form of: "projects/{project_id}/locations/{location_id}/connections/{connectionId}"
hasCredential Boolean
True if the connection has credential assigned.
id String
The provider-assigned unique ID for this managed resource.
name String
The resource name of the connection in the form of: "projects/{project_id}/locations/{location_id}/connections/{connectionId}"
hasCredential boolean
True if the connection has credential assigned.
id string
The provider-assigned unique ID for this managed resource.
name string
The resource name of the connection in the form of: "projects/{project_id}/locations/{location_id}/connections/{connectionId}"
has_credential bool
True if the connection has credential assigned.
id str
The provider-assigned unique ID for this managed resource.
name str
The resource name of the connection in the form of: "projects/{project_id}/locations/{location_id}/connections/{connectionId}"
hasCredential Boolean
True if the connection has credential assigned.
id String
The provider-assigned unique ID for this managed resource.
name String
The resource name of the connection in the form of: "projects/{project_id}/locations/{location_id}/connections/{connectionId}"

Look up Existing Connection Resource

Get an existing Connection resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.

public static get(name: string, id: Input<ID>, state?: ConnectionState, opts?: CustomResourceOptions): Connection
@staticmethod
def get(resource_name: str,
        id: str,
        opts: Optional[ResourceOptions] = None,
        aws: Optional[ConnectionAwsArgs] = None,
        azure: Optional[ConnectionAzureArgs] = None,
        cloud_resource: Optional[ConnectionCloudResourceArgs] = None,
        cloud_spanner: Optional[ConnectionCloudSpannerArgs] = None,
        cloud_sql: Optional[ConnectionCloudSqlArgs] = None,
        connection_id: Optional[str] = None,
        description: Optional[str] = None,
        friendly_name: Optional[str] = None,
        has_credential: Optional[bool] = None,
        kms_key_name: Optional[str] = None,
        location: Optional[str] = None,
        name: Optional[str] = None,
        project: Optional[str] = None,
        spark: Optional[ConnectionSparkArgs] = None) -> Connection
func GetConnection(ctx *Context, name string, id IDInput, state *ConnectionState, opts ...ResourceOption) (*Connection, error)
public static Connection Get(string name, Input<string> id, ConnectionState? state, CustomResourceOptions? opts = null)
public static Connection get(String name, Output<String> id, ConnectionState state, CustomResourceOptions options)
Resource lookup is not supported in YAML
name This property is required.
The unique name of the resulting resource.
id This property is required.
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
resource_name This property is required.
The unique name of the resulting resource.
id This property is required.
The unique provider ID of the resource to lookup.
name This property is required.
The unique name of the resulting resource.
id This property is required.
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
name This property is required.
The unique name of the resulting resource.
id This property is required.
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
name This property is required.
The unique name of the resulting resource.
id This property is required.
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
The following state arguments are supported:
Aws ConnectionAws
Connection properties specific to Amazon Web Services. Structure is documented below.
Azure ConnectionAzure
Container for connection properties specific to Azure. Structure is documented below.
CloudResource ConnectionCloudResource
Container for connection properties for delegation of access to GCP resources. Structure is documented below.
CloudSpanner ConnectionCloudSpanner
Connection properties specific to Cloud Spanner Structure is documented below.
CloudSql ConnectionCloudSql
Connection properties specific to the Cloud SQL. Structure is documented below.
ConnectionId Changes to this property will trigger replacement. string
Optional connection id that should be assigned to the created connection.
Description string
A descriptive description for the connection
FriendlyName string
A descriptive name for the connection
HasCredential bool
True if the connection has credential assigned.
KmsKeyName string
Optional. The Cloud KMS key that is used for encryption. Example: projects/[kms_project_id]/locations/[region]/keyRings/[key_region]/cryptoKeys/[key]
Location Changes to this property will trigger replacement. string
The geographic location where the connection should reside. Cloud SQL instance must be in the same location as the connection with following exceptions: Cloud SQL us-central1 maps to BigQuery US, Cloud SQL europe-west1 maps to BigQuery EU. Examples: US, EU, asia-northeast1, us-central1, europe-west1. Spanner Connections same as spanner region AWS allowed regions are aws-us-east-1 Azure allowed regions are azure-eastus2
Name string
The resource name of the connection in the form of: "projects/{project_id}/locations/{location_id}/connections/{connectionId}"
Project Changes to this property will trigger replacement. string
The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
Spark ConnectionSpark
Container for connection properties to execute stored procedures for Apache Spark. resources. Structure is documented below.
Aws ConnectionAwsArgs
Connection properties specific to Amazon Web Services. Structure is documented below.
Azure ConnectionAzureArgs
Container for connection properties specific to Azure. Structure is documented below.
CloudResource ConnectionCloudResourceArgs
Container for connection properties for delegation of access to GCP resources. Structure is documented below.
CloudSpanner ConnectionCloudSpannerArgs
Connection properties specific to Cloud Spanner Structure is documented below.
CloudSql ConnectionCloudSqlArgs
Connection properties specific to the Cloud SQL. Structure is documented below.
ConnectionId Changes to this property will trigger replacement. string
Optional connection id that should be assigned to the created connection.
Description string
A descriptive description for the connection
FriendlyName string
A descriptive name for the connection
HasCredential bool
True if the connection has credential assigned.
KmsKeyName string
Optional. The Cloud KMS key that is used for encryption. Example: projects/[kms_project_id]/locations/[region]/keyRings/[key_region]/cryptoKeys/[key]
Location Changes to this property will trigger replacement. string
The geographic location where the connection should reside. Cloud SQL instance must be in the same location as the connection with following exceptions: Cloud SQL us-central1 maps to BigQuery US, Cloud SQL europe-west1 maps to BigQuery EU. Examples: US, EU, asia-northeast1, us-central1, europe-west1. Spanner Connections same as spanner region AWS allowed regions are aws-us-east-1 Azure allowed regions are azure-eastus2
Name string
The resource name of the connection in the form of: "projects/{project_id}/locations/{location_id}/connections/{connectionId}"
Project Changes to this property will trigger replacement. string
The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
Spark ConnectionSparkArgs
Container for connection properties to execute stored procedures for Apache Spark. resources. Structure is documented below.
aws ConnectionAws
Connection properties specific to Amazon Web Services. Structure is documented below.
azure ConnectionAzure
Container for connection properties specific to Azure. Structure is documented below.
cloudResource ConnectionCloudResource
Container for connection properties for delegation of access to GCP resources. Structure is documented below.
cloudSpanner ConnectionCloudSpanner
Connection properties specific to Cloud Spanner Structure is documented below.
cloudSql ConnectionCloudSql
Connection properties specific to the Cloud SQL. Structure is documented below.
connectionId Changes to this property will trigger replacement. String
Optional connection id that should be assigned to the created connection.
description String
A descriptive description for the connection
friendlyName String
A descriptive name for the connection
hasCredential Boolean
True if the connection has credential assigned.
kmsKeyName String
Optional. The Cloud KMS key that is used for encryption. Example: projects/[kms_project_id]/locations/[region]/keyRings/[key_region]/cryptoKeys/[key]
location Changes to this property will trigger replacement. String
The geographic location where the connection should reside. Cloud SQL instance must be in the same location as the connection with following exceptions: Cloud SQL us-central1 maps to BigQuery US, Cloud SQL europe-west1 maps to BigQuery EU. Examples: US, EU, asia-northeast1, us-central1, europe-west1. Spanner Connections same as spanner region AWS allowed regions are aws-us-east-1 Azure allowed regions are azure-eastus2
name String
The resource name of the connection in the form of: "projects/{project_id}/locations/{location_id}/connections/{connectionId}"
project Changes to this property will trigger replacement. String
The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
spark ConnectionSpark
Container for connection properties to execute stored procedures for Apache Spark. resources. Structure is documented below.
aws ConnectionAws
Connection properties specific to Amazon Web Services. Structure is documented below.
azure ConnectionAzure
Container for connection properties specific to Azure. Structure is documented below.
cloudResource ConnectionCloudResource
Container for connection properties for delegation of access to GCP resources. Structure is documented below.
cloudSpanner ConnectionCloudSpanner
Connection properties specific to Cloud Spanner Structure is documented below.
cloudSql ConnectionCloudSql
Connection properties specific to the Cloud SQL. Structure is documented below.
connectionId Changes to this property will trigger replacement. string
Optional connection id that should be assigned to the created connection.
description string
A descriptive description for the connection
friendlyName string
A descriptive name for the connection
hasCredential boolean
True if the connection has credential assigned.
kmsKeyName string
Optional. The Cloud KMS key that is used for encryption. Example: projects/[kms_project_id]/locations/[region]/keyRings/[key_region]/cryptoKeys/[key]
location Changes to this property will trigger replacement. string
The geographic location where the connection should reside. Cloud SQL instance must be in the same location as the connection with following exceptions: Cloud SQL us-central1 maps to BigQuery US, Cloud SQL europe-west1 maps to BigQuery EU. Examples: US, EU, asia-northeast1, us-central1, europe-west1. Spanner Connections same as spanner region AWS allowed regions are aws-us-east-1 Azure allowed regions are azure-eastus2
name string
The resource name of the connection in the form of: "projects/{project_id}/locations/{location_id}/connections/{connectionId}"
project Changes to this property will trigger replacement. string
The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
spark ConnectionSpark
Container for connection properties to execute stored procedures for Apache Spark. resources. Structure is documented below.
aws ConnectionAwsArgs
Connection properties specific to Amazon Web Services. Structure is documented below.
azure ConnectionAzureArgs
Container for connection properties specific to Azure. Structure is documented below.
cloud_resource ConnectionCloudResourceArgs
Container for connection properties for delegation of access to GCP resources. Structure is documented below.
cloud_spanner ConnectionCloudSpannerArgs
Connection properties specific to Cloud Spanner Structure is documented below.
cloud_sql ConnectionCloudSqlArgs
Connection properties specific to the Cloud SQL. Structure is documented below.
connection_id Changes to this property will trigger replacement. str
Optional connection id that should be assigned to the created connection.
description str
A descriptive description for the connection
friendly_name str
A descriptive name for the connection
has_credential bool
True if the connection has credential assigned.
kms_key_name str
Optional. The Cloud KMS key that is used for encryption. Example: projects/[kms_project_id]/locations/[region]/keyRings/[key_region]/cryptoKeys/[key]
location Changes to this property will trigger replacement. str
The geographic location where the connection should reside. Cloud SQL instance must be in the same location as the connection with following exceptions: Cloud SQL us-central1 maps to BigQuery US, Cloud SQL europe-west1 maps to BigQuery EU. Examples: US, EU, asia-northeast1, us-central1, europe-west1. Spanner Connections same as spanner region AWS allowed regions are aws-us-east-1 Azure allowed regions are azure-eastus2
name str
The resource name of the connection in the form of: "projects/{project_id}/locations/{location_id}/connections/{connectionId}"
project Changes to this property will trigger replacement. str
The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
spark ConnectionSparkArgs
Container for connection properties to execute stored procedures for Apache Spark. resources. Structure is documented below.
aws Property Map
Connection properties specific to Amazon Web Services. Structure is documented below.
azure Property Map
Container for connection properties specific to Azure. Structure is documented below.
cloudResource Property Map
Container for connection properties for delegation of access to GCP resources. Structure is documented below.
cloudSpanner Property Map
Connection properties specific to Cloud Spanner Structure is documented below.
cloudSql Property Map
Connection properties specific to the Cloud SQL. Structure is documented below.
connectionId Changes to this property will trigger replacement. String
Optional connection id that should be assigned to the created connection.
description String
A descriptive description for the connection
friendlyName String
A descriptive name for the connection
hasCredential Boolean
True if the connection has credential assigned.
kmsKeyName String
Optional. The Cloud KMS key that is used for encryption. Example: projects/[kms_project_id]/locations/[region]/keyRings/[key_region]/cryptoKeys/[key]
location Changes to this property will trigger replacement. String
The geographic location where the connection should reside. Cloud SQL instance must be in the same location as the connection with following exceptions: Cloud SQL us-central1 maps to BigQuery US, Cloud SQL europe-west1 maps to BigQuery EU. Examples: US, EU, asia-northeast1, us-central1, europe-west1. Spanner Connections same as spanner region AWS allowed regions are aws-us-east-1 Azure allowed regions are azure-eastus2
name String
The resource name of the connection in the form of: "projects/{project_id}/locations/{location_id}/connections/{connectionId}"
project Changes to this property will trigger replacement. String
The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
spark Property Map
Container for connection properties to execute stored procedures for Apache Spark. resources. Structure is documented below.

Supporting Types

ConnectionAws
, ConnectionAwsArgs

AccessRole This property is required. ConnectionAwsAccessRole
Authentication using Google owned service account to assume into customer's AWS IAM Role. Structure is documented below.
AccessRole This property is required. ConnectionAwsAccessRole
Authentication using Google owned service account to assume into customer's AWS IAM Role. Structure is documented below.
accessRole This property is required. ConnectionAwsAccessRole
Authentication using Google owned service account to assume into customer's AWS IAM Role. Structure is documented below.
accessRole This property is required. ConnectionAwsAccessRole
Authentication using Google owned service account to assume into customer's AWS IAM Role. Structure is documented below.
access_role This property is required. ConnectionAwsAccessRole
Authentication using Google owned service account to assume into customer's AWS IAM Role. Structure is documented below.
accessRole This property is required. Property Map
Authentication using Google owned service account to assume into customer's AWS IAM Role. Structure is documented below.

ConnectionAwsAccessRole
, ConnectionAwsAccessRoleArgs

IamRoleId This property is required. string
The user’s AWS IAM Role that trusts the Google-owned AWS IAM user Connection.
Identity string
(Output) A unique Google-owned and Google-generated identity for the Connection. This identity will be used to access the user's AWS IAM Role.
IamRoleId This property is required. string
The user’s AWS IAM Role that trusts the Google-owned AWS IAM user Connection.
Identity string
(Output) A unique Google-owned and Google-generated identity for the Connection. This identity will be used to access the user's AWS IAM Role.
iamRoleId This property is required. String
The user’s AWS IAM Role that trusts the Google-owned AWS IAM user Connection.
identity String
(Output) A unique Google-owned and Google-generated identity for the Connection. This identity will be used to access the user's AWS IAM Role.
iamRoleId This property is required. string
The user’s AWS IAM Role that trusts the Google-owned AWS IAM user Connection.
identity string
(Output) A unique Google-owned and Google-generated identity for the Connection. This identity will be used to access the user's AWS IAM Role.
iam_role_id This property is required. str
The user’s AWS IAM Role that trusts the Google-owned AWS IAM user Connection.
identity str
(Output) A unique Google-owned and Google-generated identity for the Connection. This identity will be used to access the user's AWS IAM Role.
iamRoleId This property is required. String
The user’s AWS IAM Role that trusts the Google-owned AWS IAM user Connection.
identity String
(Output) A unique Google-owned and Google-generated identity for the Connection. This identity will be used to access the user's AWS IAM Role.

ConnectionAzure
, ConnectionAzureArgs

CustomerTenantId This property is required. string
The id of customer's directory that host the data.
Application string
(Output) The name of the Azure Active Directory Application.
ClientId string
(Output) The client id of the Azure Active Directory Application.
FederatedApplicationClientId string
The Azure Application (client) ID where the federated credentials will be hosted.
Identity string
(Output) A unique Google-owned and Google-generated identity for the Connection. This identity will be used to access the user's Azure Active Directory Application.
ObjectId string
(Output) The object id of the Azure Active Directory Application.
RedirectUri string
(Output) The URL user will be redirected to after granting consent during connection setup.
CustomerTenantId This property is required. string
The id of customer's directory that host the data.
Application string
(Output) The name of the Azure Active Directory Application.
ClientId string
(Output) The client id of the Azure Active Directory Application.
FederatedApplicationClientId string
The Azure Application (client) ID where the federated credentials will be hosted.
Identity string
(Output) A unique Google-owned and Google-generated identity for the Connection. This identity will be used to access the user's Azure Active Directory Application.
ObjectId string
(Output) The object id of the Azure Active Directory Application.
RedirectUri string
(Output) The URL user will be redirected to after granting consent during connection setup.
customerTenantId This property is required. String
The id of customer's directory that host the data.
application String
(Output) The name of the Azure Active Directory Application.
clientId String
(Output) The client id of the Azure Active Directory Application.
federatedApplicationClientId String
The Azure Application (client) ID where the federated credentials will be hosted.
identity String
(Output) A unique Google-owned and Google-generated identity for the Connection. This identity will be used to access the user's Azure Active Directory Application.
objectId String
(Output) The object id of the Azure Active Directory Application.
redirectUri String
(Output) The URL user will be redirected to after granting consent during connection setup.
customerTenantId This property is required. string
The id of customer's directory that host the data.
application string
(Output) The name of the Azure Active Directory Application.
clientId string
(Output) The client id of the Azure Active Directory Application.
federatedApplicationClientId string
The Azure Application (client) ID where the federated credentials will be hosted.
identity string
(Output) A unique Google-owned and Google-generated identity for the Connection. This identity will be used to access the user's Azure Active Directory Application.
objectId string
(Output) The object id of the Azure Active Directory Application.
redirectUri string
(Output) The URL user will be redirected to after granting consent during connection setup.
customer_tenant_id This property is required. str
The id of customer's directory that host the data.
application str
(Output) The name of the Azure Active Directory Application.
client_id str
(Output) The client id of the Azure Active Directory Application.
federated_application_client_id str
The Azure Application (client) ID where the federated credentials will be hosted.
identity str
(Output) A unique Google-owned and Google-generated identity for the Connection. This identity will be used to access the user's Azure Active Directory Application.
object_id str
(Output) The object id of the Azure Active Directory Application.
redirect_uri str
(Output) The URL user will be redirected to after granting consent during connection setup.
customerTenantId This property is required. String
The id of customer's directory that host the data.
application String
(Output) The name of the Azure Active Directory Application.
clientId String
(Output) The client id of the Azure Active Directory Application.
federatedApplicationClientId String
The Azure Application (client) ID where the federated credentials will be hosted.
identity String
(Output) A unique Google-owned and Google-generated identity for the Connection. This identity will be used to access the user's Azure Active Directory Application.
objectId String
(Output) The object id of the Azure Active Directory Application.
redirectUri String
(Output) The URL user will be redirected to after granting consent during connection setup.

ConnectionCloudResource
, ConnectionCloudResourceArgs

ServiceAccountId string
(Output) The account ID of the service created for the purpose of this connection.
ServiceAccountId string
(Output) The account ID of the service created for the purpose of this connection.
serviceAccountId String
(Output) The account ID of the service created for the purpose of this connection.
serviceAccountId string
(Output) The account ID of the service created for the purpose of this connection.
service_account_id str
(Output) The account ID of the service created for the purpose of this connection.
serviceAccountId String
(Output) The account ID of the service created for the purpose of this connection.

ConnectionCloudSpanner
, ConnectionCloudSpannerArgs

Database This property is required. string
Cloud Spanner database in the form `project/instance/database'.
DatabaseRole string
Cloud Spanner database role for fine-grained access control. The Cloud Spanner admin should have provisioned the database role with appropriate permissions, such as SELECT and INSERT. Other users should only use roles provided by their Cloud Spanner admins. The database role name must start with a letter, and can only contain letters, numbers, and underscores. For more details, see https://cloud.google.com/spanner/docs/fgac-about.
MaxParallelism int
Allows setting max parallelism per query when executing on Spanner independent compute resources. If unspecified, default values of parallelism are chosen that are dependent on the Cloud Spanner instance configuration. useParallelism and useDataBoost must be set when setting max parallelism.
UseDataBoost bool
If set, the request will be executed via Spanner independent compute resources. use_parallelism must be set when using data boost.
UseParallelism bool
If parallelism should be used when reading from Cloud Spanner.
UseServerlessAnalytics bool

(Optional, Deprecated) If the serverless analytics service should be used to read data from Cloud Spanner. useParallelism must be set when using serverless analytics.

Warning: useServerlessAnalytics is deprecated and will be removed in a future major release. Use useDataBoost instead.

Deprecated: useServerlessAnalytics is deprecated and will be removed in a future major release. Use useDataBoost instead.

Database This property is required. string
Cloud Spanner database in the form `project/instance/database'.
DatabaseRole string
Cloud Spanner database role for fine-grained access control. The Cloud Spanner admin should have provisioned the database role with appropriate permissions, such as SELECT and INSERT. Other users should only use roles provided by their Cloud Spanner admins. The database role name must start with a letter, and can only contain letters, numbers, and underscores. For more details, see https://cloud.google.com/spanner/docs/fgac-about.
MaxParallelism int
Allows setting max parallelism per query when executing on Spanner independent compute resources. If unspecified, default values of parallelism are chosen that are dependent on the Cloud Spanner instance configuration. useParallelism and useDataBoost must be set when setting max parallelism.
UseDataBoost bool
If set, the request will be executed via Spanner independent compute resources. use_parallelism must be set when using data boost.
UseParallelism bool
If parallelism should be used when reading from Cloud Spanner.
UseServerlessAnalytics bool

(Optional, Deprecated) If the serverless analytics service should be used to read data from Cloud Spanner. useParallelism must be set when using serverless analytics.

Warning: useServerlessAnalytics is deprecated and will be removed in a future major release. Use useDataBoost instead.

Deprecated: useServerlessAnalytics is deprecated and will be removed in a future major release. Use useDataBoost instead.

database This property is required. String
Cloud Spanner database in the form `project/instance/database'.
databaseRole String
Cloud Spanner database role for fine-grained access control. The Cloud Spanner admin should have provisioned the database role with appropriate permissions, such as SELECT and INSERT. Other users should only use roles provided by their Cloud Spanner admins. The database role name must start with a letter, and can only contain letters, numbers, and underscores. For more details, see https://cloud.google.com/spanner/docs/fgac-about.
maxParallelism Integer
Allows setting max parallelism per query when executing on Spanner independent compute resources. If unspecified, default values of parallelism are chosen that are dependent on the Cloud Spanner instance configuration. useParallelism and useDataBoost must be set when setting max parallelism.
useDataBoost Boolean
If set, the request will be executed via Spanner independent compute resources. use_parallelism must be set when using data boost.
useParallelism Boolean
If parallelism should be used when reading from Cloud Spanner.
useServerlessAnalytics Boolean

(Optional, Deprecated) If the serverless analytics service should be used to read data from Cloud Spanner. useParallelism must be set when using serverless analytics.

Warning: useServerlessAnalytics is deprecated and will be removed in a future major release. Use useDataBoost instead.

Deprecated: useServerlessAnalytics is deprecated and will be removed in a future major release. Use useDataBoost instead.

database This property is required. string
Cloud Spanner database in the form `project/instance/database'.
databaseRole string
Cloud Spanner database role for fine-grained access control. The Cloud Spanner admin should have provisioned the database role with appropriate permissions, such as SELECT and INSERT. Other users should only use roles provided by their Cloud Spanner admins. The database role name must start with a letter, and can only contain letters, numbers, and underscores. For more details, see https://cloud.google.com/spanner/docs/fgac-about.
maxParallelism number
Allows setting max parallelism per query when executing on Spanner independent compute resources. If unspecified, default values of parallelism are chosen that are dependent on the Cloud Spanner instance configuration. useParallelism and useDataBoost must be set when setting max parallelism.
useDataBoost boolean
If set, the request will be executed via Spanner independent compute resources. use_parallelism must be set when using data boost.
useParallelism boolean
If parallelism should be used when reading from Cloud Spanner.
useServerlessAnalytics boolean

(Optional, Deprecated) If the serverless analytics service should be used to read data from Cloud Spanner. useParallelism must be set when using serverless analytics.

Warning: useServerlessAnalytics is deprecated and will be removed in a future major release. Use useDataBoost instead.

Deprecated: useServerlessAnalytics is deprecated and will be removed in a future major release. Use useDataBoost instead.

database This property is required. str
Cloud Spanner database in the form `project/instance/database'.
database_role str
Cloud Spanner database role for fine-grained access control. The Cloud Spanner admin should have provisioned the database role with appropriate permissions, such as SELECT and INSERT. Other users should only use roles provided by their Cloud Spanner admins. The database role name must start with a letter, and can only contain letters, numbers, and underscores. For more details, see https://cloud.google.com/spanner/docs/fgac-about.
max_parallelism int
Allows setting max parallelism per query when executing on Spanner independent compute resources. If unspecified, default values of parallelism are chosen that are dependent on the Cloud Spanner instance configuration. useParallelism and useDataBoost must be set when setting max parallelism.
use_data_boost bool
If set, the request will be executed via Spanner independent compute resources. use_parallelism must be set when using data boost.
use_parallelism bool
If parallelism should be used when reading from Cloud Spanner.
use_serverless_analytics bool

(Optional, Deprecated) If the serverless analytics service should be used to read data from Cloud Spanner. useParallelism must be set when using serverless analytics.

Warning: useServerlessAnalytics is deprecated and will be removed in a future major release. Use useDataBoost instead.

Deprecated: useServerlessAnalytics is deprecated and will be removed in a future major release. Use useDataBoost instead.

database This property is required. String
Cloud Spanner database in the form `project/instance/database'.
databaseRole String
Cloud Spanner database role for fine-grained access control. The Cloud Spanner admin should have provisioned the database role with appropriate permissions, such as SELECT and INSERT. Other users should only use roles provided by their Cloud Spanner admins. The database role name must start with a letter, and can only contain letters, numbers, and underscores. For more details, see https://cloud.google.com/spanner/docs/fgac-about.
maxParallelism Number
Allows setting max parallelism per query when executing on Spanner independent compute resources. If unspecified, default values of parallelism are chosen that are dependent on the Cloud Spanner instance configuration. useParallelism and useDataBoost must be set when setting max parallelism.
useDataBoost Boolean
If set, the request will be executed via Spanner independent compute resources. use_parallelism must be set when using data boost.
useParallelism Boolean
If parallelism should be used when reading from Cloud Spanner.
useServerlessAnalytics Boolean

(Optional, Deprecated) If the serverless analytics service should be used to read data from Cloud Spanner. useParallelism must be set when using serverless analytics.

Warning: useServerlessAnalytics is deprecated and will be removed in a future major release. Use useDataBoost instead.

Deprecated: useServerlessAnalytics is deprecated and will be removed in a future major release. Use useDataBoost instead.

ConnectionCloudSql
, ConnectionCloudSqlArgs

Credential This property is required. ConnectionCloudSqlCredential
Cloud SQL properties. Structure is documented below.
Database This property is required. string
Database name.
InstanceId This property is required. string
Cloud SQL instance ID in the form project:location:instance.
Type This property is required. string
Type of the Cloud SQL database. Possible values are: DATABASE_TYPE_UNSPECIFIED, POSTGRES, MYSQL.
ServiceAccountId string
(Output) When the connection is used in the context of an operation in BigQuery, this service account will serve as the identity being used for connecting to the CloudSQL instance specified in this connection.
Credential This property is required. ConnectionCloudSqlCredential
Cloud SQL properties. Structure is documented below.
Database This property is required. string
Database name.
InstanceId This property is required. string
Cloud SQL instance ID in the form project:location:instance.
Type This property is required. string
Type of the Cloud SQL database. Possible values are: DATABASE_TYPE_UNSPECIFIED, POSTGRES, MYSQL.
ServiceAccountId string
(Output) When the connection is used in the context of an operation in BigQuery, this service account will serve as the identity being used for connecting to the CloudSQL instance specified in this connection.
credential This property is required. ConnectionCloudSqlCredential
Cloud SQL properties. Structure is documented below.
database This property is required. String
Database name.
instanceId This property is required. String
Cloud SQL instance ID in the form project:location:instance.
type This property is required. String
Type of the Cloud SQL database. Possible values are: DATABASE_TYPE_UNSPECIFIED, POSTGRES, MYSQL.
serviceAccountId String
(Output) When the connection is used in the context of an operation in BigQuery, this service account will serve as the identity being used for connecting to the CloudSQL instance specified in this connection.
credential This property is required. ConnectionCloudSqlCredential
Cloud SQL properties. Structure is documented below.
database This property is required. string
Database name.
instanceId This property is required. string
Cloud SQL instance ID in the form project:location:instance.
type This property is required. string
Type of the Cloud SQL database. Possible values are: DATABASE_TYPE_UNSPECIFIED, POSTGRES, MYSQL.
serviceAccountId string
(Output) When the connection is used in the context of an operation in BigQuery, this service account will serve as the identity being used for connecting to the CloudSQL instance specified in this connection.
credential This property is required. ConnectionCloudSqlCredential
Cloud SQL properties. Structure is documented below.
database This property is required. str
Database name.
instance_id This property is required. str
Cloud SQL instance ID in the form project:location:instance.
type This property is required. str
Type of the Cloud SQL database. Possible values are: DATABASE_TYPE_UNSPECIFIED, POSTGRES, MYSQL.
service_account_id str
(Output) When the connection is used in the context of an operation in BigQuery, this service account will serve as the identity being used for connecting to the CloudSQL instance specified in this connection.
credential This property is required. Property Map
Cloud SQL properties. Structure is documented below.
database This property is required. String
Database name.
instanceId This property is required. String
Cloud SQL instance ID in the form project:location:instance.
type This property is required. String
Type of the Cloud SQL database. Possible values are: DATABASE_TYPE_UNSPECIFIED, POSTGRES, MYSQL.
serviceAccountId String
(Output) When the connection is used in the context of an operation in BigQuery, this service account will serve as the identity being used for connecting to the CloudSQL instance specified in this connection.

ConnectionCloudSqlCredential
, ConnectionCloudSqlCredentialArgs

Password This property is required. string
Password for database. Note: This property is sensitive and will not be displayed in the plan.
Username This property is required. string
Username for database.
Password This property is required. string
Password for database. Note: This property is sensitive and will not be displayed in the plan.
Username This property is required. string
Username for database.
password This property is required. String
Password for database. Note: This property is sensitive and will not be displayed in the plan.
username This property is required. String
Username for database.
password This property is required. string
Password for database. Note: This property is sensitive and will not be displayed in the plan.
username This property is required. string
Username for database.
password This property is required. str
Password for database. Note: This property is sensitive and will not be displayed in the plan.
username This property is required. str
Username for database.
password This property is required. String
Password for database. Note: This property is sensitive and will not be displayed in the plan.
username This property is required. String
Username for database.

ConnectionSpark
, ConnectionSparkArgs

MetastoreServiceConfig ConnectionSparkMetastoreServiceConfig
Dataproc Metastore Service configuration for the connection. Structure is documented below.
ServiceAccountId string
(Output) The account ID of the service created for the purpose of this connection.
SparkHistoryServerConfig ConnectionSparkSparkHistoryServerConfig
Spark History Server configuration for the connection. Structure is documented below.
MetastoreServiceConfig ConnectionSparkMetastoreServiceConfig
Dataproc Metastore Service configuration for the connection. Structure is documented below.
ServiceAccountId string
(Output) The account ID of the service created for the purpose of this connection.
SparkHistoryServerConfig ConnectionSparkSparkHistoryServerConfig
Spark History Server configuration for the connection. Structure is documented below.
metastoreServiceConfig ConnectionSparkMetastoreServiceConfig
Dataproc Metastore Service configuration for the connection. Structure is documented below.
serviceAccountId String
(Output) The account ID of the service created for the purpose of this connection.
sparkHistoryServerConfig ConnectionSparkSparkHistoryServerConfig
Spark History Server configuration for the connection. Structure is documented below.
metastoreServiceConfig ConnectionSparkMetastoreServiceConfig
Dataproc Metastore Service configuration for the connection. Structure is documented below.
serviceAccountId string
(Output) The account ID of the service created for the purpose of this connection.
sparkHistoryServerConfig ConnectionSparkSparkHistoryServerConfig
Spark History Server configuration for the connection. Structure is documented below.
metastore_service_config ConnectionSparkMetastoreServiceConfig
Dataproc Metastore Service configuration for the connection. Structure is documented below.
service_account_id str
(Output) The account ID of the service created for the purpose of this connection.
spark_history_server_config ConnectionSparkSparkHistoryServerConfig
Spark History Server configuration for the connection. Structure is documented below.
metastoreServiceConfig Property Map
Dataproc Metastore Service configuration for the connection. Structure is documented below.
serviceAccountId String
(Output) The account ID of the service created for the purpose of this connection.
sparkHistoryServerConfig Property Map
Spark History Server configuration for the connection. Structure is documented below.

ConnectionSparkMetastoreServiceConfig
, ConnectionSparkMetastoreServiceConfigArgs

MetastoreService string
Resource name of an existing Dataproc Metastore service in the form of projects/[projectId]/locations/[region]/services/[serviceId].
MetastoreService string
Resource name of an existing Dataproc Metastore service in the form of projects/[projectId]/locations/[region]/services/[serviceId].
metastoreService String
Resource name of an existing Dataproc Metastore service in the form of projects/[projectId]/locations/[region]/services/[serviceId].
metastoreService string
Resource name of an existing Dataproc Metastore service in the form of projects/[projectId]/locations/[region]/services/[serviceId].
metastore_service str
Resource name of an existing Dataproc Metastore service in the form of projects/[projectId]/locations/[region]/services/[serviceId].
metastoreService String
Resource name of an existing Dataproc Metastore service in the form of projects/[projectId]/locations/[region]/services/[serviceId].

ConnectionSparkSparkHistoryServerConfig
, ConnectionSparkSparkHistoryServerConfigArgs

DataprocCluster string
Resource name of an existing Dataproc Cluster to act as a Spark History Server for the connection if the form of projects/[projectId]/regions/[region]/clusters/[cluster_name].
DataprocCluster string
Resource name of an existing Dataproc Cluster to act as a Spark History Server for the connection if the form of projects/[projectId]/regions/[region]/clusters/[cluster_name].
dataprocCluster String
Resource name of an existing Dataproc Cluster to act as a Spark History Server for the connection if the form of projects/[projectId]/regions/[region]/clusters/[cluster_name].
dataprocCluster string
Resource name of an existing Dataproc Cluster to act as a Spark History Server for the connection if the form of projects/[projectId]/regions/[region]/clusters/[cluster_name].
dataproc_cluster str
Resource name of an existing Dataproc Cluster to act as a Spark History Server for the connection if the form of projects/[projectId]/regions/[region]/clusters/[cluster_name].
dataprocCluster String
Resource name of an existing Dataproc Cluster to act as a Spark History Server for the connection if the form of projects/[projectId]/regions/[region]/clusters/[cluster_name].

Import

Connection can be imported using any of these accepted formats:

  • projects/{{project}}/locations/{{location}}/connections/{{connection_id}}

  • {{project}}/{{location}}/{{connection_id}}

  • {{location}}/{{connection_id}}

When using the pulumi import command, Connection can be imported using one of the formats above. For example:

$ pulumi import gcp:bigquery/connection:Connection default projects/{{project}}/locations/{{location}}/connections/{{connection_id}}
Copy
$ pulumi import gcp:bigquery/connection:Connection default {{project}}/{{location}}/{{connection_id}}
Copy
$ pulumi import gcp:bigquery/connection:Connection default {{location}}/{{connection_id}}
Copy

To learn more about importing existing cloud resources, see Importing resources.

Package Details

Repository
Google Cloud (GCP) Classic pulumi/pulumi-gcp
License
Apache-2.0
Notes
This Pulumi package is based on the google-beta Terraform Provider.