1. Packages
  2. Confluent Provider
  3. API Docs
  4. FlinkStatement
Confluent v2.23.0 published on Tuesday, Apr 1, 2025 by Pulumi

confluentcloud.FlinkStatement

Explore with Pulumi AI

Example Usage

import * as pulumi from "@pulumi/pulumi";
import * as confluentcloud from "@pulumi/confluentcloud";

const randomIntTable = new confluentcloud.FlinkStatement("random_int_table", {
    organization: {
        id: main.id,
    },
    environment: {
        id: staging.id,
    },
    computePool: {
        id: example.id,
    },
    principal: {
        id: app_manager_flink.id,
    },
    statement: "CREATE TABLE random_int_table(ts TIMESTAMP_LTZ(3), random_value INT);",
    properties: {
        "sql.current-catalog": exampleConfluentEnvironment.displayName,
        "sql.current-database": exampleConfluentKafkaCluster.displayName,
    },
    restEndpoint: mainConfluentFlinkRegion.restEndpoint,
    credentials: {
        key: env_admin_flink_api_key.id,
        secret: env_admin_flink_api_key.secret,
    },
});
Copy
import pulumi
import pulumi_confluentcloud as confluentcloud

random_int_table = confluentcloud.FlinkStatement("random_int_table",
    organization={
        "id": main["id"],
    },
    environment={
        "id": staging["id"],
    },
    compute_pool={
        "id": example["id"],
    },
    principal={
        "id": app_manager_flink["id"],
    },
    statement="CREATE TABLE random_int_table(ts TIMESTAMP_LTZ(3), random_value INT);",
    properties={
        "sql.current-catalog": example_confluent_environment["displayName"],
        "sql.current-database": example_confluent_kafka_cluster["displayName"],
    },
    rest_endpoint=main_confluent_flink_region["restEndpoint"],
    credentials={
        "key": env_admin_flink_api_key["id"],
        "secret": env_admin_flink_api_key["secret"],
    })
Copy
package main

import (
	"github.com/pulumi/pulumi-confluentcloud/sdk/v2/go/confluentcloud"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)

func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		_, err := confluentcloud.NewFlinkStatement(ctx, "random_int_table", &confluentcloud.FlinkStatementArgs{
			Organization: &confluentcloud.FlinkStatementOrganizationArgs{
				Id: pulumi.Any(main.Id),
			},
			Environment: &confluentcloud.FlinkStatementEnvironmentArgs{
				Id: pulumi.Any(staging.Id),
			},
			ComputePool: &confluentcloud.FlinkStatementComputePoolArgs{
				Id: pulumi.Any(example.Id),
			},
			Principal: &confluentcloud.FlinkStatementPrincipalArgs{
				Id: pulumi.Any(app_manager_flink.Id),
			},
			Statement: pulumi.String("CREATE TABLE random_int_table(ts TIMESTAMP_LTZ(3), random_value INT);"),
			Properties: pulumi.StringMap{
				"sql.current-catalog":  pulumi.Any(exampleConfluentEnvironment.DisplayName),
				"sql.current-database": pulumi.Any(exampleConfluentKafkaCluster.DisplayName),
			},
			RestEndpoint: pulumi.Any(mainConfluentFlinkRegion.RestEndpoint),
			Credentials: &confluentcloud.FlinkStatementCredentialsArgs{
				Key:    pulumi.Any(env_admin_flink_api_key.Id),
				Secret: pulumi.Any(env_admin_flink_api_key.Secret),
			},
		})
		if err != nil {
			return err
		}
		return nil
	})
}
Copy
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using ConfluentCloud = Pulumi.ConfluentCloud;

return await Deployment.RunAsync(() => 
{
    var randomIntTable = new ConfluentCloud.FlinkStatement("random_int_table", new()
    {
        Organization = new ConfluentCloud.Inputs.FlinkStatementOrganizationArgs
        {
            Id = main.Id,
        },
        Environment = new ConfluentCloud.Inputs.FlinkStatementEnvironmentArgs
        {
            Id = staging.Id,
        },
        ComputePool = new ConfluentCloud.Inputs.FlinkStatementComputePoolArgs
        {
            Id = example.Id,
        },
        Principal = new ConfluentCloud.Inputs.FlinkStatementPrincipalArgs
        {
            Id = app_manager_flink.Id,
        },
        Statement = "CREATE TABLE random_int_table(ts TIMESTAMP_LTZ(3), random_value INT);",
        Properties = 
        {
            { "sql.current-catalog", exampleConfluentEnvironment.DisplayName },
            { "sql.current-database", exampleConfluentKafkaCluster.DisplayName },
        },
        RestEndpoint = mainConfluentFlinkRegion.RestEndpoint,
        Credentials = new ConfluentCloud.Inputs.FlinkStatementCredentialsArgs
        {
            Key = env_admin_flink_api_key.Id,
            Secret = env_admin_flink_api_key.Secret,
        },
    });

});
Copy
package generated_program;

import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.confluentcloud.FlinkStatement;
import com.pulumi.confluentcloud.FlinkStatementArgs;
import com.pulumi.confluentcloud.inputs.FlinkStatementOrganizationArgs;
import com.pulumi.confluentcloud.inputs.FlinkStatementEnvironmentArgs;
import com.pulumi.confluentcloud.inputs.FlinkStatementComputePoolArgs;
import com.pulumi.confluentcloud.inputs.FlinkStatementPrincipalArgs;
import com.pulumi.confluentcloud.inputs.FlinkStatementCredentialsArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;

public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }

    public static void stack(Context ctx) {
        var randomIntTable = new FlinkStatement("randomIntTable", FlinkStatementArgs.builder()
            .organization(FlinkStatementOrganizationArgs.builder()
                .id(main.id())
                .build())
            .environment(FlinkStatementEnvironmentArgs.builder()
                .id(staging.id())
                .build())
            .computePool(FlinkStatementComputePoolArgs.builder()
                .id(example.id())
                .build())
            .principal(FlinkStatementPrincipalArgs.builder()
                .id(app_manager_flink.id())
                .build())
            .statement("CREATE TABLE random_int_table(ts TIMESTAMP_LTZ(3), random_value INT);")
            .properties(Map.ofEntries(
                Map.entry("sql.current-catalog", exampleConfluentEnvironment.displayName()),
                Map.entry("sql.current-database", exampleConfluentKafkaCluster.displayName())
            ))
            .restEndpoint(mainConfluentFlinkRegion.restEndpoint())
            .credentials(FlinkStatementCredentialsArgs.builder()
                .key(env_admin_flink_api_key.id())
                .secret(env_admin_flink_api_key.secret())
                .build())
            .build());

    }
}
Copy
resources:
  randomIntTable:
    type: confluentcloud:FlinkStatement
    name: random_int_table
    properties:
      organization:
        id: ${main.id}
      environment:
        id: ${staging.id}
      computePool:
        id: ${example.id}
      principal:
        id: ${["app-manager-flink"].id}
      statement: CREATE TABLE random_int_table(ts TIMESTAMP_LTZ(3), random_value INT);
      properties:
        sql.current-catalog: ${exampleConfluentEnvironment.displayName}
        sql.current-database: ${exampleConfluentKafkaCluster.displayName}
      restEndpoint: ${mainConfluentFlinkRegion.restEndpoint}
      credentials:
        key: ${["env-admin-flink-api-key"].id}
        secret: ${["env-admin-flink-api-key"].secret}
Copy
import * as pulumi from "@pulumi/pulumi";
import * as confluentcloud from "@pulumi/confluentcloud";

const example = new confluentcloud.FlinkStatement("example", {
    statement: "CREATE TABLE random_int_table(ts TIMESTAMP_LTZ(3), random_value INT);",
    properties: {
        "sql.current-catalog": confluentEnvironmentDisplayName,
        "sql.current-database": confluentKafkaClusterDisplayName,
    },
});
Copy
import pulumi
import pulumi_confluentcloud as confluentcloud

example = confluentcloud.FlinkStatement("example",
    statement="CREATE TABLE random_int_table(ts TIMESTAMP_LTZ(3), random_value INT);",
    properties={
        "sql.current-catalog": confluent_environment_display_name,
        "sql.current-database": confluent_kafka_cluster_display_name,
    })
Copy
package main

import (
	"github.com/pulumi/pulumi-confluentcloud/sdk/v2/go/confluentcloud"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)

func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		_, err := confluentcloud.NewFlinkStatement(ctx, "example", &confluentcloud.FlinkStatementArgs{
			Statement: pulumi.String("CREATE TABLE random_int_table(ts TIMESTAMP_LTZ(3), random_value INT);"),
			Properties: pulumi.StringMap{
				"sql.current-catalog":  pulumi.Any(confluentEnvironmentDisplayName),
				"sql.current-database": pulumi.Any(confluentKafkaClusterDisplayName),
			},
		})
		if err != nil {
			return err
		}
		return nil
	})
}
Copy
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using ConfluentCloud = Pulumi.ConfluentCloud;

return await Deployment.RunAsync(() => 
{
    var example = new ConfluentCloud.FlinkStatement("example", new()
    {
        Statement = "CREATE TABLE random_int_table(ts TIMESTAMP_LTZ(3), random_value INT);",
        Properties = 
        {
            { "sql.current-catalog", confluentEnvironmentDisplayName },
            { "sql.current-database", confluentKafkaClusterDisplayName },
        },
    });

});
Copy
package generated_program;

import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.confluentcloud.FlinkStatement;
import com.pulumi.confluentcloud.FlinkStatementArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;

public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }

    public static void stack(Context ctx) {
        var example = new FlinkStatement("example", FlinkStatementArgs.builder()
            .statement("CREATE TABLE random_int_table(ts TIMESTAMP_LTZ(3), random_value INT);")
            .properties(Map.ofEntries(
                Map.entry("sql.current-catalog", confluentEnvironmentDisplayName),
                Map.entry("sql.current-database", confluentKafkaClusterDisplayName)
            ))
            .build());

    }
}
Copy
resources:
  example:
    type: confluentcloud:FlinkStatement
    properties:
      statement: CREATE TABLE random_int_table(ts TIMESTAMP_LTZ(3), random_value INT);
      properties:
        sql.current-catalog: ${confluentEnvironmentDisplayName}
        sql.current-database: ${confluentKafkaClusterDisplayName}
Copy

Example of confluentcloud.FlinkStatement that creates a model:

import * as pulumi from "@pulumi/pulumi";
import * as confluentcloud from "@pulumi/confluentcloud";

const example = new confluentcloud.FlinkStatement("example", {
    statement: "CREATE MODEL `vector_encoding` INPUT (input STRING) OUTPUT (vector ARRAY<FLOAT>) WITH( 'TASK' = 'classification','PROVIDER' = 'OPENAI','OPENAI.ENDPOINT' = 'https://api.openai.com/v1/embeddings','OPENAI.API_KEY' = '{{sessionconfig/sql.secrets.openaikey}}');",
    properties: {
        "sql.current-catalog": confluentEnvironmentDisplayName,
        "sql.current-database": confluentKafkaClusterDisplayName,
    },
    propertiesSensitive: {
        "sql.secrets.openaikey": "***REDACTED***",
    },
});
Copy
import pulumi
import pulumi_confluentcloud as confluentcloud

example = confluentcloud.FlinkStatement("example",
    statement="CREATE MODEL `vector_encoding` INPUT (input STRING) OUTPUT (vector ARRAY<FLOAT>) WITH( 'TASK' = 'classification','PROVIDER' = 'OPENAI','OPENAI.ENDPOINT' = 'https://api.openai.com/v1/embeddings','OPENAI.API_KEY' = '{{sessionconfig/sql.secrets.openaikey}}');",
    properties={
        "sql.current-catalog": confluent_environment_display_name,
        "sql.current-database": confluent_kafka_cluster_display_name,
    },
    properties_sensitive={
        "sql.secrets.openaikey": "***REDACTED***",
    })
Copy
package main

import (
	"github.com/pulumi/pulumi-confluentcloud/sdk/v2/go/confluentcloud"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)

func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		_, err := confluentcloud.NewFlinkStatement(ctx, "example", &confluentcloud.FlinkStatementArgs{
			Statement: pulumi.String("CREATE MODEL `vector_encoding` INPUT (input STRING) OUTPUT (vector ARRAY<FLOAT>) WITH( 'TASK' = 'classification','PROVIDER' = 'OPENAI','OPENAI.ENDPOINT' = 'https://api.openai.com/v1/embeddings','OPENAI.API_KEY' = '{{sessionconfig/sql.secrets.openaikey}}');"),
			Properties: pulumi.StringMap{
				"sql.current-catalog":  pulumi.Any(confluentEnvironmentDisplayName),
				"sql.current-database": pulumi.Any(confluentKafkaClusterDisplayName),
			},
			PropertiesSensitive: pulumi.StringMap{
				"sql.secrets.openaikey": pulumi.String("***REDACTED***"),
			},
		})
		if err != nil {
			return err
		}
		return nil
	})
}
Copy
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using ConfluentCloud = Pulumi.ConfluentCloud;

return await Deployment.RunAsync(() => 
{
    var example = new ConfluentCloud.FlinkStatement("example", new()
    {
        Statement = "CREATE MODEL `vector_encoding` INPUT (input STRING) OUTPUT (vector ARRAY<FLOAT>) WITH( 'TASK' = 'classification','PROVIDER' = 'OPENAI','OPENAI.ENDPOINT' = 'https://api.openai.com/v1/embeddings','OPENAI.API_KEY' = '{{sessionconfig/sql.secrets.openaikey}}');",
        Properties = 
        {
            { "sql.current-catalog", confluentEnvironmentDisplayName },
            { "sql.current-database", confluentKafkaClusterDisplayName },
        },
        PropertiesSensitive = 
        {
            { "sql.secrets.openaikey", "***REDACTED***" },
        },
    });

});
Copy
package generated_program;

import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.confluentcloud.FlinkStatement;
import com.pulumi.confluentcloud.FlinkStatementArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;

public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }

    public static void stack(Context ctx) {
        var example = new FlinkStatement("example", FlinkStatementArgs.builder()
            .statement("CREATE MODEL `vector_encoding` INPUT (input STRING) OUTPUT (vector ARRAY<FLOAT>) WITH( 'TASK' = 'classification','PROVIDER' = 'OPENAI','OPENAI.ENDPOINT' = 'https://api.openai.com/v1/embeddings','OPENAI.API_KEY' = '{{sessionconfig/sql.secrets.openaikey}}');")
            .properties(Map.ofEntries(
                Map.entry("sql.current-catalog", confluentEnvironmentDisplayName),
                Map.entry("sql.current-database", confluentKafkaClusterDisplayName)
            ))
            .propertiesSensitive(Map.of("sql.secrets.openaikey", "***REDACTED***"))
            .build());

    }
}
Copy
resources:
  example:
    type: confluentcloud:FlinkStatement
    properties:
      statement: CREATE MODEL `vector_encoding` INPUT (input STRING) OUTPUT (vector ARRAY<FLOAT>) WITH( 'TASK' = 'classification','PROVIDER' = 'OPENAI','OPENAI.ENDPOINT' = 'https://api.openai.com/v1/embeddings','OPENAI.API_KEY' = '{{sessionconfig/sql.secrets.openaikey}}');
      properties:
        sql.current-catalog: ${confluentEnvironmentDisplayName}
        sql.current-database: ${confluentKafkaClusterDisplayName}
      propertiesSensitive:
        sql.secrets.openaikey: '***REDACTED***'
Copy

Getting Started

The following end-to-end example might help to get started with Flink Statements:

  • flink-quickstart
  • flink-carry-over-offset-between-statements

Create FlinkStatement Resource

Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.

Constructor syntax

new FlinkStatement(name: string, args: FlinkStatementArgs, opts?: CustomResourceOptions);
@overload
def FlinkStatement(resource_name: str,
                   args: FlinkStatementArgs,
                   opts: Optional[ResourceOptions] = None)

@overload
def FlinkStatement(resource_name: str,
                   opts: Optional[ResourceOptions] = None,
                   statement: Optional[str] = None,
                   compute_pool: Optional[FlinkStatementComputePoolArgs] = None,
                   credentials: Optional[FlinkStatementCredentialsArgs] = None,
                   environment: Optional[FlinkStatementEnvironmentArgs] = None,
                   organization: Optional[FlinkStatementOrganizationArgs] = None,
                   principal: Optional[FlinkStatementPrincipalArgs] = None,
                   properties: Optional[Mapping[str, str]] = None,
                   properties_sensitive: Optional[Mapping[str, str]] = None,
                   rest_endpoint: Optional[str] = None,
                   statement_name: Optional[str] = None,
                   stopped: Optional[bool] = None)
func NewFlinkStatement(ctx *Context, name string, args FlinkStatementArgs, opts ...ResourceOption) (*FlinkStatement, error)
public FlinkStatement(string name, FlinkStatementArgs args, CustomResourceOptions? opts = null)
public FlinkStatement(String name, FlinkStatementArgs args)
public FlinkStatement(String name, FlinkStatementArgs args, CustomResourceOptions options)
type: confluentcloud:FlinkStatement
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.

Parameters

name This property is required. string
The unique name of the resource.
args This property is required. FlinkStatementArgs
The arguments to resource properties.
opts CustomResourceOptions
Bag of options to control resource's behavior.
resource_name This property is required. str
The unique name of the resource.
args This property is required. FlinkStatementArgs
The arguments to resource properties.
opts ResourceOptions
Bag of options to control resource's behavior.
ctx Context
Context object for the current deployment.
name This property is required. string
The unique name of the resource.
args This property is required. FlinkStatementArgs
The arguments to resource properties.
opts ResourceOption
Bag of options to control resource's behavior.
name This property is required. string
The unique name of the resource.
args This property is required. FlinkStatementArgs
The arguments to resource properties.
opts CustomResourceOptions
Bag of options to control resource's behavior.
name This property is required. String
The unique name of the resource.
args This property is required. FlinkStatementArgs
The arguments to resource properties.
options CustomResourceOptions
Bag of options to control resource's behavior.

Constructor example

The following reference example uses placeholder values for all input properties.

var flinkStatementResource = new ConfluentCloud.FlinkStatement("flinkStatementResource", new()
{
    Statement = "string",
    ComputePool = new ConfluentCloud.Inputs.FlinkStatementComputePoolArgs
    {
        Id = "string",
    },
    Credentials = new ConfluentCloud.Inputs.FlinkStatementCredentialsArgs
    {
        Key = "string",
        Secret = "string",
    },
    Environment = new ConfluentCloud.Inputs.FlinkStatementEnvironmentArgs
    {
        Id = "string",
    },
    Organization = new ConfluentCloud.Inputs.FlinkStatementOrganizationArgs
    {
        Id = "string",
    },
    Principal = new ConfluentCloud.Inputs.FlinkStatementPrincipalArgs
    {
        Id = "string",
    },
    Properties = 
    {
        { "string", "string" },
    },
    PropertiesSensitive = 
    {
        { "string", "string" },
    },
    RestEndpoint = "string",
    StatementName = "string",
    Stopped = false,
});
Copy
example, err := confluentcloud.NewFlinkStatement(ctx, "flinkStatementResource", &confluentcloud.FlinkStatementArgs{
	Statement: pulumi.String("string"),
	ComputePool: &confluentcloud.FlinkStatementComputePoolArgs{
		Id: pulumi.String("string"),
	},
	Credentials: &confluentcloud.FlinkStatementCredentialsArgs{
		Key:    pulumi.String("string"),
		Secret: pulumi.String("string"),
	},
	Environment: &confluentcloud.FlinkStatementEnvironmentArgs{
		Id: pulumi.String("string"),
	},
	Organization: &confluentcloud.FlinkStatementOrganizationArgs{
		Id: pulumi.String("string"),
	},
	Principal: &confluentcloud.FlinkStatementPrincipalArgs{
		Id: pulumi.String("string"),
	},
	Properties: pulumi.StringMap{
		"string": pulumi.String("string"),
	},
	PropertiesSensitive: pulumi.StringMap{
		"string": pulumi.String("string"),
	},
	RestEndpoint:  pulumi.String("string"),
	StatementName: pulumi.String("string"),
	Stopped:       pulumi.Bool(false),
})
Copy
var flinkStatementResource = new FlinkStatement("flinkStatementResource", FlinkStatementArgs.builder()
    .statement("string")
    .computePool(FlinkStatementComputePoolArgs.builder()
        .id("string")
        .build())
    .credentials(FlinkStatementCredentialsArgs.builder()
        .key("string")
        .secret("string")
        .build())
    .environment(FlinkStatementEnvironmentArgs.builder()
        .id("string")
        .build())
    .organization(FlinkStatementOrganizationArgs.builder()
        .id("string")
        .build())
    .principal(FlinkStatementPrincipalArgs.builder()
        .id("string")
        .build())
    .properties(Map.of("string", "string"))
    .propertiesSensitive(Map.of("string", "string"))
    .restEndpoint("string")
    .statementName("string")
    .stopped(false)
    .build());
Copy
flink_statement_resource = confluentcloud.FlinkStatement("flinkStatementResource",
    statement="string",
    compute_pool={
        "id": "string",
    },
    credentials={
        "key": "string",
        "secret": "string",
    },
    environment={
        "id": "string",
    },
    organization={
        "id": "string",
    },
    principal={
        "id": "string",
    },
    properties={
        "string": "string",
    },
    properties_sensitive={
        "string": "string",
    },
    rest_endpoint="string",
    statement_name="string",
    stopped=False)
Copy
const flinkStatementResource = new confluentcloud.FlinkStatement("flinkStatementResource", {
    statement: "string",
    computePool: {
        id: "string",
    },
    credentials: {
        key: "string",
        secret: "string",
    },
    environment: {
        id: "string",
    },
    organization: {
        id: "string",
    },
    principal: {
        id: "string",
    },
    properties: {
        string: "string",
    },
    propertiesSensitive: {
        string: "string",
    },
    restEndpoint: "string",
    statementName: "string",
    stopped: false,
});
Copy
type: confluentcloud:FlinkStatement
properties:
    computePool:
        id: string
    credentials:
        key: string
        secret: string
    environment:
        id: string
    organization:
        id: string
    principal:
        id: string
    properties:
        string: string
    propertiesSensitive:
        string: string
    restEndpoint: string
    statement: string
    statementName: string
    stopped: false
Copy

FlinkStatement Resource Properties

To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.

Inputs

In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.

The FlinkStatement resource accepts the following input properties:

Statement
This property is required.
Changes to this property will trigger replacement.
string
The raw SQL text statement, for example, SELECT CURRENT_TIMESTAMP;.
ComputePool Pulumi.ConfluentCloud.Inputs.FlinkStatementComputePool
Credentials Pulumi.ConfluentCloud.Inputs.FlinkStatementCredentials
The Cluster API Credentials.
Environment Pulumi.ConfluentCloud.Inputs.FlinkStatementEnvironment
Organization Pulumi.ConfluentCloud.Inputs.FlinkStatementOrganization
Principal Pulumi.ConfluentCloud.Inputs.FlinkStatementPrincipal
Properties Dictionary<string, string>
The custom topic settings to set:
PropertiesSensitive Dictionary<string, string>
Block for sensitive statement properties:
RestEndpoint Changes to this property will trigger replacement. string
The REST endpoint of the Flink region, for example, https://flink.us-east-1.aws.confluent.cloud).
StatementName string
The ID of the Flink Statement, for example, cfeab4fe-b62c-49bd-9e99-51cc98c77a67.
Stopped bool
Indicates whether the statement should be stopped.
Statement
This property is required.
Changes to this property will trigger replacement.
string
The raw SQL text statement, for example, SELECT CURRENT_TIMESTAMP;.
ComputePool FlinkStatementComputePoolArgs
Credentials FlinkStatementCredentialsArgs
The Cluster API Credentials.
Environment FlinkStatementEnvironmentArgs
Organization FlinkStatementOrganizationArgs
Principal FlinkStatementPrincipalArgs
Properties map[string]string
The custom topic settings to set:
PropertiesSensitive map[string]string
Block for sensitive statement properties:
RestEndpoint Changes to this property will trigger replacement. string
The REST endpoint of the Flink region, for example, https://flink.us-east-1.aws.confluent.cloud).
StatementName string
The ID of the Flink Statement, for example, cfeab4fe-b62c-49bd-9e99-51cc98c77a67.
Stopped bool
Indicates whether the statement should be stopped.
statement
This property is required.
Changes to this property will trigger replacement.
String
The raw SQL text statement, for example, SELECT CURRENT_TIMESTAMP;.
computePool FlinkStatementComputePool
credentials FlinkStatementCredentials
The Cluster API Credentials.
environment FlinkStatementEnvironment
organization FlinkStatementOrganization
principal FlinkStatementPrincipal
properties Map<String,String>
The custom topic settings to set:
propertiesSensitive Map<String,String>
Block for sensitive statement properties:
restEndpoint Changes to this property will trigger replacement. String
The REST endpoint of the Flink region, for example, https://flink.us-east-1.aws.confluent.cloud).
statementName String
The ID of the Flink Statement, for example, cfeab4fe-b62c-49bd-9e99-51cc98c77a67.
stopped Boolean
Indicates whether the statement should be stopped.
statement
This property is required.
Changes to this property will trigger replacement.
string
The raw SQL text statement, for example, SELECT CURRENT_TIMESTAMP;.
computePool FlinkStatementComputePool
credentials FlinkStatementCredentials
The Cluster API Credentials.
environment FlinkStatementEnvironment
organization FlinkStatementOrganization
principal FlinkStatementPrincipal
properties {[key: string]: string}
The custom topic settings to set:
propertiesSensitive {[key: string]: string}
Block for sensitive statement properties:
restEndpoint Changes to this property will trigger replacement. string
The REST endpoint of the Flink region, for example, https://flink.us-east-1.aws.confluent.cloud).
statementName string
The ID of the Flink Statement, for example, cfeab4fe-b62c-49bd-9e99-51cc98c77a67.
stopped boolean
Indicates whether the statement should be stopped.
statement
This property is required.
Changes to this property will trigger replacement.
str
The raw SQL text statement, for example, SELECT CURRENT_TIMESTAMP;.
compute_pool FlinkStatementComputePoolArgs
credentials FlinkStatementCredentialsArgs
The Cluster API Credentials.
environment FlinkStatementEnvironmentArgs
organization FlinkStatementOrganizationArgs
principal FlinkStatementPrincipalArgs
properties Mapping[str, str]
The custom topic settings to set:
properties_sensitive Mapping[str, str]
Block for sensitive statement properties:
rest_endpoint Changes to this property will trigger replacement. str
The REST endpoint of the Flink region, for example, https://flink.us-east-1.aws.confluent.cloud).
statement_name str
The ID of the Flink Statement, for example, cfeab4fe-b62c-49bd-9e99-51cc98c77a67.
stopped bool
Indicates whether the statement should be stopped.
statement
This property is required.
Changes to this property will trigger replacement.
String
The raw SQL text statement, for example, SELECT CURRENT_TIMESTAMP;.
computePool Property Map
credentials Property Map
The Cluster API Credentials.
environment Property Map
organization Property Map
principal Property Map
properties Map<String>
The custom topic settings to set:
propertiesSensitive Map<String>
Block for sensitive statement properties:
restEndpoint Changes to this property will trigger replacement. String
The REST endpoint of the Flink region, for example, https://flink.us-east-1.aws.confluent.cloud).
statementName String
The ID of the Flink Statement, for example, cfeab4fe-b62c-49bd-9e99-51cc98c77a67.
stopped Boolean
Indicates whether the statement should be stopped.

Outputs

All input properties are implicitly available as output properties. Additionally, the FlinkStatement resource produces the following output properties:

Id string
The provider-assigned unique ID for this managed resource.
LatestOffsets Dictionary<string, string>
(Optional String) The last Kafka offsets that a statement has processed. Represented by a mapping from Kafka topic to a string representation of partitions mapped to offsets. For example,

"latest_offsets": {
"topic-1": "partition:0,offset:100;partition:1,offset:200",
"topic-2": "partition:0,offset:50"
}
LatestOffsetsTimestamp string
(Optional String) The date and time at which the Kafka topic offsets were added to the statement status. It is represented in RFC3339 format and is in UTC. For example, 2023-03-31T00:00:00-00:00.
Id string
The provider-assigned unique ID for this managed resource.
LatestOffsets map[string]string
(Optional String) The last Kafka offsets that a statement has processed. Represented by a mapping from Kafka topic to a string representation of partitions mapped to offsets. For example,

"latest_offsets": {
"topic-1": "partition:0,offset:100;partition:1,offset:200",
"topic-2": "partition:0,offset:50"
}
LatestOffsetsTimestamp string
(Optional String) The date and time at which the Kafka topic offsets were added to the statement status. It is represented in RFC3339 format and is in UTC. For example, 2023-03-31T00:00:00-00:00.
id String
The provider-assigned unique ID for this managed resource.
latestOffsets Map<String,String>
(Optional String) The last Kafka offsets that a statement has processed. Represented by a mapping from Kafka topic to a string representation of partitions mapped to offsets. For example,

"latest_offsets": {
"topic-1": "partition:0,offset:100;partition:1,offset:200",
"topic-2": "partition:0,offset:50"
}
latestOffsetsTimestamp String
(Optional String) The date and time at which the Kafka topic offsets were added to the statement status. It is represented in RFC3339 format and is in UTC. For example, 2023-03-31T00:00:00-00:00.
id string
The provider-assigned unique ID for this managed resource.
latestOffsets {[key: string]: string}
(Optional String) The last Kafka offsets that a statement has processed. Represented by a mapping from Kafka topic to a string representation of partitions mapped to offsets. For example,

"latest_offsets": {
"topic-1": "partition:0,offset:100;partition:1,offset:200",
"topic-2": "partition:0,offset:50"
}
latestOffsetsTimestamp string
(Optional String) The date and time at which the Kafka topic offsets were added to the statement status. It is represented in RFC3339 format and is in UTC. For example, 2023-03-31T00:00:00-00:00.
id str
The provider-assigned unique ID for this managed resource.
latest_offsets Mapping[str, str]
(Optional String) The last Kafka offsets that a statement has processed. Represented by a mapping from Kafka topic to a string representation of partitions mapped to offsets. For example,

"latest_offsets": {
"topic-1": "partition:0,offset:100;partition:1,offset:200",
"topic-2": "partition:0,offset:50"
}
latest_offsets_timestamp str
(Optional String) The date and time at which the Kafka topic offsets were added to the statement status. It is represented in RFC3339 format and is in UTC. For example, 2023-03-31T00:00:00-00:00.
id String
The provider-assigned unique ID for this managed resource.
latestOffsets Map<String>
(Optional String) The last Kafka offsets that a statement has processed. Represented by a mapping from Kafka topic to a string representation of partitions mapped to offsets. For example,

"latest_offsets": {
"topic-1": "partition:0,offset:100;partition:1,offset:200",
"topic-2": "partition:0,offset:50"
}
latestOffsetsTimestamp String
(Optional String) The date and time at which the Kafka topic offsets were added to the statement status. It is represented in RFC3339 format and is in UTC. For example, 2023-03-31T00:00:00-00:00.

Look up Existing FlinkStatement Resource

Get an existing FlinkStatement resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.

public static get(name: string, id: Input<ID>, state?: FlinkStatementState, opts?: CustomResourceOptions): FlinkStatement
@staticmethod
def get(resource_name: str,
        id: str,
        opts: Optional[ResourceOptions] = None,
        compute_pool: Optional[FlinkStatementComputePoolArgs] = None,
        credentials: Optional[FlinkStatementCredentialsArgs] = None,
        environment: Optional[FlinkStatementEnvironmentArgs] = None,
        latest_offsets: Optional[Mapping[str, str]] = None,
        latest_offsets_timestamp: Optional[str] = None,
        organization: Optional[FlinkStatementOrganizationArgs] = None,
        principal: Optional[FlinkStatementPrincipalArgs] = None,
        properties: Optional[Mapping[str, str]] = None,
        properties_sensitive: Optional[Mapping[str, str]] = None,
        rest_endpoint: Optional[str] = None,
        statement: Optional[str] = None,
        statement_name: Optional[str] = None,
        stopped: Optional[bool] = None) -> FlinkStatement
func GetFlinkStatement(ctx *Context, name string, id IDInput, state *FlinkStatementState, opts ...ResourceOption) (*FlinkStatement, error)
public static FlinkStatement Get(string name, Input<string> id, FlinkStatementState? state, CustomResourceOptions? opts = null)
public static FlinkStatement get(String name, Output<String> id, FlinkStatementState state, CustomResourceOptions options)
resources:  _:    type: confluentcloud:FlinkStatement    get:      id: ${id}
name This property is required.
The unique name of the resulting resource.
id This property is required.
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
resource_name This property is required.
The unique name of the resulting resource.
id This property is required.
The unique provider ID of the resource to lookup.
name This property is required.
The unique name of the resulting resource.
id This property is required.
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
name This property is required.
The unique name of the resulting resource.
id This property is required.
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
name This property is required.
The unique name of the resulting resource.
id This property is required.
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
The following state arguments are supported:
ComputePool Pulumi.ConfluentCloud.Inputs.FlinkStatementComputePool
Credentials Pulumi.ConfluentCloud.Inputs.FlinkStatementCredentials
The Cluster API Credentials.
Environment Pulumi.ConfluentCloud.Inputs.FlinkStatementEnvironment
LatestOffsets Dictionary<string, string>
(Optional String) The last Kafka offsets that a statement has processed. Represented by a mapping from Kafka topic to a string representation of partitions mapped to offsets. For example,

"latest_offsets": {
"topic-1": "partition:0,offset:100;partition:1,offset:200",
"topic-2": "partition:0,offset:50"
}
LatestOffsetsTimestamp string
(Optional String) The date and time at which the Kafka topic offsets were added to the statement status. It is represented in RFC3339 format and is in UTC. For example, 2023-03-31T00:00:00-00:00.
Organization Pulumi.ConfluentCloud.Inputs.FlinkStatementOrganization
Principal Pulumi.ConfluentCloud.Inputs.FlinkStatementPrincipal
Properties Dictionary<string, string>
The custom topic settings to set:
PropertiesSensitive Dictionary<string, string>
Block for sensitive statement properties:
RestEndpoint Changes to this property will trigger replacement. string
The REST endpoint of the Flink region, for example, https://flink.us-east-1.aws.confluent.cloud).
Statement Changes to this property will trigger replacement. string
The raw SQL text statement, for example, SELECT CURRENT_TIMESTAMP;.
StatementName string
The ID of the Flink Statement, for example, cfeab4fe-b62c-49bd-9e99-51cc98c77a67.
Stopped bool
Indicates whether the statement should be stopped.
ComputePool FlinkStatementComputePoolArgs
Credentials FlinkStatementCredentialsArgs
The Cluster API Credentials.
Environment FlinkStatementEnvironmentArgs
LatestOffsets map[string]string
(Optional String) The last Kafka offsets that a statement has processed. Represented by a mapping from Kafka topic to a string representation of partitions mapped to offsets. For example,

"latest_offsets": {
"topic-1": "partition:0,offset:100;partition:1,offset:200",
"topic-2": "partition:0,offset:50"
}
LatestOffsetsTimestamp string
(Optional String) The date and time at which the Kafka topic offsets were added to the statement status. It is represented in RFC3339 format and is in UTC. For example, 2023-03-31T00:00:00-00:00.
Organization FlinkStatementOrganizationArgs
Principal FlinkStatementPrincipalArgs
Properties map[string]string
The custom topic settings to set:
PropertiesSensitive map[string]string
Block for sensitive statement properties:
RestEndpoint Changes to this property will trigger replacement. string
The REST endpoint of the Flink region, for example, https://flink.us-east-1.aws.confluent.cloud).
Statement Changes to this property will trigger replacement. string
The raw SQL text statement, for example, SELECT CURRENT_TIMESTAMP;.
StatementName string
The ID of the Flink Statement, for example, cfeab4fe-b62c-49bd-9e99-51cc98c77a67.
Stopped bool
Indicates whether the statement should be stopped.
computePool FlinkStatementComputePool
credentials FlinkStatementCredentials
The Cluster API Credentials.
environment FlinkStatementEnvironment
latestOffsets Map<String,String>
(Optional String) The last Kafka offsets that a statement has processed. Represented by a mapping from Kafka topic to a string representation of partitions mapped to offsets. For example,

"latest_offsets": {
"topic-1": "partition:0,offset:100;partition:1,offset:200",
"topic-2": "partition:0,offset:50"
}
latestOffsetsTimestamp String
(Optional String) The date and time at which the Kafka topic offsets were added to the statement status. It is represented in RFC3339 format and is in UTC. For example, 2023-03-31T00:00:00-00:00.
organization FlinkStatementOrganization
principal FlinkStatementPrincipal
properties Map<String,String>
The custom topic settings to set:
propertiesSensitive Map<String,String>
Block for sensitive statement properties:
restEndpoint Changes to this property will trigger replacement. String
The REST endpoint of the Flink region, for example, https://flink.us-east-1.aws.confluent.cloud).
statement Changes to this property will trigger replacement. String
The raw SQL text statement, for example, SELECT CURRENT_TIMESTAMP;.
statementName String
The ID of the Flink Statement, for example, cfeab4fe-b62c-49bd-9e99-51cc98c77a67.
stopped Boolean
Indicates whether the statement should be stopped.
computePool FlinkStatementComputePool
credentials FlinkStatementCredentials
The Cluster API Credentials.
environment FlinkStatementEnvironment
latestOffsets {[key: string]: string}
(Optional String) The last Kafka offsets that a statement has processed. Represented by a mapping from Kafka topic to a string representation of partitions mapped to offsets. For example,

"latest_offsets": {
"topic-1": "partition:0,offset:100;partition:1,offset:200",
"topic-2": "partition:0,offset:50"
}
latestOffsetsTimestamp string
(Optional String) The date and time at which the Kafka topic offsets were added to the statement status. It is represented in RFC3339 format and is in UTC. For example, 2023-03-31T00:00:00-00:00.
organization FlinkStatementOrganization
principal FlinkStatementPrincipal
properties {[key: string]: string}
The custom topic settings to set:
propertiesSensitive {[key: string]: string}
Block for sensitive statement properties:
restEndpoint Changes to this property will trigger replacement. string
The REST endpoint of the Flink region, for example, https://flink.us-east-1.aws.confluent.cloud).
statement Changes to this property will trigger replacement. string
The raw SQL text statement, for example, SELECT CURRENT_TIMESTAMP;.
statementName string
The ID of the Flink Statement, for example, cfeab4fe-b62c-49bd-9e99-51cc98c77a67.
stopped boolean
Indicates whether the statement should be stopped.
compute_pool FlinkStatementComputePoolArgs
credentials FlinkStatementCredentialsArgs
The Cluster API Credentials.
environment FlinkStatementEnvironmentArgs
latest_offsets Mapping[str, str]
(Optional String) The last Kafka offsets that a statement has processed. Represented by a mapping from Kafka topic to a string representation of partitions mapped to offsets. For example,

"latest_offsets": {
"topic-1": "partition:0,offset:100;partition:1,offset:200",
"topic-2": "partition:0,offset:50"
}
latest_offsets_timestamp str
(Optional String) The date and time at which the Kafka topic offsets were added to the statement status. It is represented in RFC3339 format and is in UTC. For example, 2023-03-31T00:00:00-00:00.
organization FlinkStatementOrganizationArgs
principal FlinkStatementPrincipalArgs
properties Mapping[str, str]
The custom topic settings to set:
properties_sensitive Mapping[str, str]
Block for sensitive statement properties:
rest_endpoint Changes to this property will trigger replacement. str
The REST endpoint of the Flink region, for example, https://flink.us-east-1.aws.confluent.cloud).
statement Changes to this property will trigger replacement. str
The raw SQL text statement, for example, SELECT CURRENT_TIMESTAMP;.
statement_name str
The ID of the Flink Statement, for example, cfeab4fe-b62c-49bd-9e99-51cc98c77a67.
stopped bool
Indicates whether the statement should be stopped.
computePool Property Map
credentials Property Map
The Cluster API Credentials.
environment Property Map
latestOffsets Map<String>
(Optional String) The last Kafka offsets that a statement has processed. Represented by a mapping from Kafka topic to a string representation of partitions mapped to offsets. For example,

"latest_offsets": {
"topic-1": "partition:0,offset:100;partition:1,offset:200",
"topic-2": "partition:0,offset:50"
}
latestOffsetsTimestamp String
(Optional String) The date and time at which the Kafka topic offsets were added to the statement status. It is represented in RFC3339 format and is in UTC. For example, 2023-03-31T00:00:00-00:00.
organization Property Map
principal Property Map
properties Map<String>
The custom topic settings to set:
propertiesSensitive Map<String>
Block for sensitive statement properties:
restEndpoint Changes to this property will trigger replacement. String
The REST endpoint of the Flink region, for example, https://flink.us-east-1.aws.confluent.cloud).
statement Changes to this property will trigger replacement. String
The raw SQL text statement, for example, SELECT CURRENT_TIMESTAMP;.
statementName String
The ID of the Flink Statement, for example, cfeab4fe-b62c-49bd-9e99-51cc98c77a67.
stopped Boolean
Indicates whether the statement should be stopped.

Supporting Types

FlinkStatementComputePool
, FlinkStatementComputePoolArgs

Id This property is required. string
The ID of the Flink Compute Pool, for example, lfcp-abc123.
Id This property is required. string
The ID of the Flink Compute Pool, for example, lfcp-abc123.
id This property is required. String
The ID of the Flink Compute Pool, for example, lfcp-abc123.
id This property is required. string
The ID of the Flink Compute Pool, for example, lfcp-abc123.
id This property is required. str
The ID of the Flink Compute Pool, for example, lfcp-abc123.
id This property is required. String
The ID of the Flink Compute Pool, for example, lfcp-abc123.

FlinkStatementCredentials
, FlinkStatementCredentialsArgs

Key This property is required. string
The Flink API Key.
Secret This property is required. string

The Flink API Secret.

Note: A Flink API key consists of a key and a secret. Flink API keys are required to interact with Flink Statements in Confluent Cloud. Each Flink API key is valid for one specific Flink Region.

Note: Use Option #2 to simplify the key rotation process. When using Option #1, to rotate a Flink API key, create a new Flink API key, update the credentials block in all configuration files to use the new Flink API key, run pulumi up -target="confluent_flink_statement.example", and remove the old Flink API key. Alternatively, in case the old Flink API Key was deleted already, you might need to run pulumi preview -refresh=false -target="confluent_flink_statement.example" -out=rotate-flink-api-key and pulumi up rotate-flink-api-key instead.

Key This property is required. string
The Flink API Key.
Secret This property is required. string

The Flink API Secret.

Note: A Flink API key consists of a key and a secret. Flink API keys are required to interact with Flink Statements in Confluent Cloud. Each Flink API key is valid for one specific Flink Region.

Note: Use Option #2 to simplify the key rotation process. When using Option #1, to rotate a Flink API key, create a new Flink API key, update the credentials block in all configuration files to use the new Flink API key, run pulumi up -target="confluent_flink_statement.example", and remove the old Flink API key. Alternatively, in case the old Flink API Key was deleted already, you might need to run pulumi preview -refresh=false -target="confluent_flink_statement.example" -out=rotate-flink-api-key and pulumi up rotate-flink-api-key instead.

key This property is required. String
The Flink API Key.
secret This property is required. String

The Flink API Secret.

Note: A Flink API key consists of a key and a secret. Flink API keys are required to interact with Flink Statements in Confluent Cloud. Each Flink API key is valid for one specific Flink Region.

Note: Use Option #2 to simplify the key rotation process. When using Option #1, to rotate a Flink API key, create a new Flink API key, update the credentials block in all configuration files to use the new Flink API key, run pulumi up -target="confluent_flink_statement.example", and remove the old Flink API key. Alternatively, in case the old Flink API Key was deleted already, you might need to run pulumi preview -refresh=false -target="confluent_flink_statement.example" -out=rotate-flink-api-key and pulumi up rotate-flink-api-key instead.

key This property is required. string
The Flink API Key.
secret This property is required. string

The Flink API Secret.

Note: A Flink API key consists of a key and a secret. Flink API keys are required to interact with Flink Statements in Confluent Cloud. Each Flink API key is valid for one specific Flink Region.

Note: Use Option #2 to simplify the key rotation process. When using Option #1, to rotate a Flink API key, create a new Flink API key, update the credentials block in all configuration files to use the new Flink API key, run pulumi up -target="confluent_flink_statement.example", and remove the old Flink API key. Alternatively, in case the old Flink API Key was deleted already, you might need to run pulumi preview -refresh=false -target="confluent_flink_statement.example" -out=rotate-flink-api-key and pulumi up rotate-flink-api-key instead.

key This property is required. str
The Flink API Key.
secret This property is required. str

The Flink API Secret.

Note: A Flink API key consists of a key and a secret. Flink API keys are required to interact with Flink Statements in Confluent Cloud. Each Flink API key is valid for one specific Flink Region.

Note: Use Option #2 to simplify the key rotation process. When using Option #1, to rotate a Flink API key, create a new Flink API key, update the credentials block in all configuration files to use the new Flink API key, run pulumi up -target="confluent_flink_statement.example", and remove the old Flink API key. Alternatively, in case the old Flink API Key was deleted already, you might need to run pulumi preview -refresh=false -target="confluent_flink_statement.example" -out=rotate-flink-api-key and pulumi up rotate-flink-api-key instead.

key This property is required. String
The Flink API Key.
secret This property is required. String

The Flink API Secret.

Note: A Flink API key consists of a key and a secret. Flink API keys are required to interact with Flink Statements in Confluent Cloud. Each Flink API key is valid for one specific Flink Region.

Note: Use Option #2 to simplify the key rotation process. When using Option #1, to rotate a Flink API key, create a new Flink API key, update the credentials block in all configuration files to use the new Flink API key, run pulumi up -target="confluent_flink_statement.example", and remove the old Flink API key. Alternatively, in case the old Flink API Key was deleted already, you might need to run pulumi preview -refresh=false -target="confluent_flink_statement.example" -out=rotate-flink-api-key and pulumi up rotate-flink-api-key instead.

FlinkStatementEnvironment
, FlinkStatementEnvironmentArgs

Id
This property is required.
Changes to this property will trigger replacement.
string
The ID of the Environment, for example, env-abc123.
Id
This property is required.
Changes to this property will trigger replacement.
string
The ID of the Environment, for example, env-abc123.
id
This property is required.
Changes to this property will trigger replacement.
String
The ID of the Environment, for example, env-abc123.
id
This property is required.
Changes to this property will trigger replacement.
string
The ID of the Environment, for example, env-abc123.
id
This property is required.
Changes to this property will trigger replacement.
str
The ID of the Environment, for example, env-abc123.
id
This property is required.
Changes to this property will trigger replacement.
String
The ID of the Environment, for example, env-abc123.

FlinkStatementOrganization
, FlinkStatementOrganizationArgs

Id
This property is required.
Changes to this property will trigger replacement.
string
The ID of the Organization, for example, 1111aaaa-11aa-11aa-11aa-111111aaaaaa.
Id
This property is required.
Changes to this property will trigger replacement.
string
The ID of the Organization, for example, 1111aaaa-11aa-11aa-11aa-111111aaaaaa.
id
This property is required.
Changes to this property will trigger replacement.
String
The ID of the Organization, for example, 1111aaaa-11aa-11aa-11aa-111111aaaaaa.
id
This property is required.
Changes to this property will trigger replacement.
string
The ID of the Organization, for example, 1111aaaa-11aa-11aa-11aa-111111aaaaaa.
id
This property is required.
Changes to this property will trigger replacement.
str
The ID of the Organization, for example, 1111aaaa-11aa-11aa-11aa-111111aaaaaa.
id
This property is required.
Changes to this property will trigger replacement.
String
The ID of the Organization, for example, 1111aaaa-11aa-11aa-11aa-111111aaaaaa.

FlinkStatementPrincipal
, FlinkStatementPrincipalArgs

Id This property is required. string
The ID of the Principal the Flink Statement runs as, for example, sa-abc123.
Id This property is required. string
The ID of the Principal the Flink Statement runs as, for example, sa-abc123.
id This property is required. String
The ID of the Principal the Flink Statement runs as, for example, sa-abc123.
id This property is required. string
The ID of the Principal the Flink Statement runs as, for example, sa-abc123.
id This property is required. str
The ID of the Principal the Flink Statement runs as, for example, sa-abc123.
id This property is required. String
The ID of the Principal the Flink Statement runs as, for example, sa-abc123.

Import

You can import a Flink statement by using the Flink Statement name, for example:

Option #1: Manage multiple Flink Compute Pools in the same Pulumi Stack

$ export IMPORT_CONFLUENT_ORGANIZATION_ID="<organization_id>"

$ export IMPORT_CONFLUENT_ENVIRONMENT_ID="<environment_id>"

$ export IMPORT_FLINK_COMPUTE_POOL_ID="<flink_compute_pool_id>"

$ export IMPORT_FLINK_API_KEY="<flink_api_key>"

$ export IMPORT_FLINK_API_SECRET="<flink_api_secret>"

$ export IMPORT_FLINK_REST_ENDPOINT="<flink_rest_endpoint>"

$ export IMPORT_FLINK_PRINCIPAL_ID="<flink_rest_endpoint>"

$ pulumi import confluentcloud:index/flinkStatement:FlinkStatement example cfeab4fe-b62c-49bd-9e99-51cc98c77a67
Copy

Option #2: Manage a single Flink Compute Pool in the same Pulumi Stack

$ pulumi import confluentcloud:index/flinkStatement:FlinkStatement example cfeab4fe-b62c-49bd-9e99-51cc98c77a67
Copy

!> Warning: Do not forget to delete terminal command history afterwards for security purposes.

To learn more about importing existing cloud resources, see Importing resources.

Package Details

Repository
Confluent Cloud pulumi/pulumi-confluentcloud
License
Apache-2.0
Notes
This Pulumi package is based on the confluent Terraform Provider.