1. Packages
  2. Azure Native
  3. API Docs
  4. synapse
  5. BigDataPool
This is the latest version of Azure Native. Use the Azure Native v2 docs if using the v2 version of this package.
Azure Native v3.1.0 published on Tuesday, Apr 8, 2025 by Pulumi

azure-native.synapse.BigDataPool

Explore with Pulumi AI

This is the latest version of Azure Native. Use the Azure Native v2 docs if using the v2 version of this package.
Azure Native v3.1.0 published on Tuesday, Apr 8, 2025 by Pulumi

A Big Data pool

Uses Azure REST API version 2021-06-01. In version 2.x of the Azure Native provider, it used API version 2021-06-01.

Other available API versions: 2021-04-01-preview, 2021-05-01, 2021-06-01-preview. These can be accessed by generating a local SDK package using the CLI command pulumi package add azure-native synapse [ApiVersion]. See the version guide for details.

Example Usage

Create or update a Big Data pool

using System.Collections.Generic;
using System.Linq;
using Pulumi;
using AzureNative = Pulumi.AzureNative;

return await Deployment.RunAsync(() => 
{
    var bigDataPool = new AzureNative.Synapse.BigDataPool("bigDataPool", new()
    {
        AutoPause = new AzureNative.Synapse.Inputs.AutoPausePropertiesArgs
        {
            DelayInMinutes = 15,
            Enabled = true,
        },
        AutoScale = new AzureNative.Synapse.Inputs.AutoScalePropertiesArgs
        {
            Enabled = true,
            MaxNodeCount = 50,
            MinNodeCount = 3,
        },
        BigDataPoolName = "ExamplePool",
        DefaultSparkLogFolder = "/logs",
        IsAutotuneEnabled = false,
        LibraryRequirements = new AzureNative.Synapse.Inputs.LibraryRequirementsArgs
        {
            Content = "",
            Filename = "requirements.txt",
        },
        Location = "West US 2",
        NodeCount = 4,
        NodeSize = AzureNative.Synapse.NodeSize.Medium,
        NodeSizeFamily = AzureNative.Synapse.NodeSizeFamily.MemoryOptimized,
        ResourceGroupName = "ExampleResourceGroup",
        SparkEventsFolder = "/events",
        SparkVersion = "3.3",
        Tags = 
        {
            { "key", "value" },
        },
        WorkspaceName = "ExampleWorkspace",
    });

});
Copy
package main

import (
	synapse "github.com/pulumi/pulumi-azure-native-sdk/synapse/v3"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)

func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		_, err := synapse.NewBigDataPool(ctx, "bigDataPool", &synapse.BigDataPoolArgs{
			AutoPause: &synapse.AutoPausePropertiesArgs{
				DelayInMinutes: pulumi.Int(15),
				Enabled:        pulumi.Bool(true),
			},
			AutoScale: &synapse.AutoScalePropertiesArgs{
				Enabled:      pulumi.Bool(true),
				MaxNodeCount: pulumi.Int(50),
				MinNodeCount: pulumi.Int(3),
			},
			BigDataPoolName:       pulumi.String("ExamplePool"),
			DefaultSparkLogFolder: pulumi.String("/logs"),
			IsAutotuneEnabled:     pulumi.Bool(false),
			LibraryRequirements: &synapse.LibraryRequirementsArgs{
				Content:  pulumi.String(""),
				Filename: pulumi.String("requirements.txt"),
			},
			Location:          pulumi.String("West US 2"),
			NodeCount:         pulumi.Int(4),
			NodeSize:          pulumi.String(synapse.NodeSizeMedium),
			NodeSizeFamily:    pulumi.String(synapse.NodeSizeFamilyMemoryOptimized),
			ResourceGroupName: pulumi.String("ExampleResourceGroup"),
			SparkEventsFolder: pulumi.String("/events"),
			SparkVersion:      pulumi.String("3.3"),
			Tags: pulumi.StringMap{
				"key": pulumi.String("value"),
			},
			WorkspaceName: pulumi.String("ExampleWorkspace"),
		})
		if err != nil {
			return err
		}
		return nil
	})
}
Copy
package generated_program;

import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.azurenative.synapse.BigDataPool;
import com.pulumi.azurenative.synapse.BigDataPoolArgs;
import com.pulumi.azurenative.synapse.inputs.AutoPausePropertiesArgs;
import com.pulumi.azurenative.synapse.inputs.AutoScalePropertiesArgs;
import com.pulumi.azurenative.synapse.inputs.LibraryRequirementsArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;

public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }

    public static void stack(Context ctx) {
        var bigDataPool = new BigDataPool("bigDataPool", BigDataPoolArgs.builder()
            .autoPause(AutoPausePropertiesArgs.builder()
                .delayInMinutes(15)
                .enabled(true)
                .build())
            .autoScale(AutoScalePropertiesArgs.builder()
                .enabled(true)
                .maxNodeCount(50)
                .minNodeCount(3)
                .build())
            .bigDataPoolName("ExamplePool")
            .defaultSparkLogFolder("/logs")
            .isAutotuneEnabled(false)
            .libraryRequirements(LibraryRequirementsArgs.builder()
                .content("")
                .filename("requirements.txt")
                .build())
            .location("West US 2")
            .nodeCount(4)
            .nodeSize("Medium")
            .nodeSizeFamily("MemoryOptimized")
            .resourceGroupName("ExampleResourceGroup")
            .sparkEventsFolder("/events")
            .sparkVersion("3.3")
            .tags(Map.of("key", "value"))
            .workspaceName("ExampleWorkspace")
            .build());

    }
}
Copy
import * as pulumi from "@pulumi/pulumi";
import * as azure_native from "@pulumi/azure-native";

const bigDataPool = new azure_native.synapse.BigDataPool("bigDataPool", {
    autoPause: {
        delayInMinutes: 15,
        enabled: true,
    },
    autoScale: {
        enabled: true,
        maxNodeCount: 50,
        minNodeCount: 3,
    },
    bigDataPoolName: "ExamplePool",
    defaultSparkLogFolder: "/logs",
    isAutotuneEnabled: false,
    libraryRequirements: {
        content: "",
        filename: "requirements.txt",
    },
    location: "West US 2",
    nodeCount: 4,
    nodeSize: azure_native.synapse.NodeSize.Medium,
    nodeSizeFamily: azure_native.synapse.NodeSizeFamily.MemoryOptimized,
    resourceGroupName: "ExampleResourceGroup",
    sparkEventsFolder: "/events",
    sparkVersion: "3.3",
    tags: {
        key: "value",
    },
    workspaceName: "ExampleWorkspace",
});
Copy
import pulumi
import pulumi_azure_native as azure_native

big_data_pool = azure_native.synapse.BigDataPool("bigDataPool",
    auto_pause={
        "delay_in_minutes": 15,
        "enabled": True,
    },
    auto_scale={
        "enabled": True,
        "max_node_count": 50,
        "min_node_count": 3,
    },
    big_data_pool_name="ExamplePool",
    default_spark_log_folder="/logs",
    is_autotune_enabled=False,
    library_requirements={
        "content": "",
        "filename": "requirements.txt",
    },
    location="West US 2",
    node_count=4,
    node_size=azure_native.synapse.NodeSize.MEDIUM,
    node_size_family=azure_native.synapse.NodeSizeFamily.MEMORY_OPTIMIZED,
    resource_group_name="ExampleResourceGroup",
    spark_events_folder="/events",
    spark_version="3.3",
    tags={
        "key": "value",
    },
    workspace_name="ExampleWorkspace")
Copy
resources:
  bigDataPool:
    type: azure-native:synapse:BigDataPool
    properties:
      autoPause:
        delayInMinutes: 15
        enabled: true
      autoScale:
        enabled: true
        maxNodeCount: 50
        minNodeCount: 3
      bigDataPoolName: ExamplePool
      defaultSparkLogFolder: /logs
      isAutotuneEnabled: false
      libraryRequirements:
        content: ""
        filename: requirements.txt
      location: West US 2
      nodeCount: 4
      nodeSize: Medium
      nodeSizeFamily: MemoryOptimized
      resourceGroupName: ExampleResourceGroup
      sparkEventsFolder: /events
      sparkVersion: '3.3'
      tags:
        key: value
      workspaceName: ExampleWorkspace
Copy

Create BigDataPool Resource

Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.

Constructor syntax

new BigDataPool(name: string, args: BigDataPoolArgs, opts?: CustomResourceOptions);
@overload
def BigDataPool(resource_name: str,
                args: BigDataPoolArgs,
                opts: Optional[ResourceOptions] = None)

@overload
def BigDataPool(resource_name: str,
                opts: Optional[ResourceOptions] = None,
                resource_group_name: Optional[str] = None,
                workspace_name: Optional[str] = None,
                library_requirements: Optional[LibraryRequirementsArgs] = None,
                node_size: Optional[Union[str, NodeSize]] = None,
                custom_libraries: Optional[Sequence[LibraryInfoArgs]] = None,
                default_spark_log_folder: Optional[str] = None,
                dynamic_executor_allocation: Optional[DynamicExecutorAllocationArgs] = None,
                force: Optional[bool] = None,
                is_autotune_enabled: Optional[bool] = None,
                is_compute_isolation_enabled: Optional[bool] = None,
                auto_pause: Optional[AutoPausePropertiesArgs] = None,
                location: Optional[str] = None,
                node_count: Optional[int] = None,
                cache_size: Optional[int] = None,
                node_size_family: Optional[Union[str, NodeSizeFamily]] = None,
                provisioning_state: Optional[str] = None,
                big_data_pool_name: Optional[str] = None,
                session_level_packages_enabled: Optional[bool] = None,
                spark_config_properties: Optional[SparkConfigPropertiesArgs] = None,
                spark_events_folder: Optional[str] = None,
                spark_version: Optional[str] = None,
                tags: Optional[Mapping[str, str]] = None,
                auto_scale: Optional[AutoScalePropertiesArgs] = None)
func NewBigDataPool(ctx *Context, name string, args BigDataPoolArgs, opts ...ResourceOption) (*BigDataPool, error)
public BigDataPool(string name, BigDataPoolArgs args, CustomResourceOptions? opts = null)
public BigDataPool(String name, BigDataPoolArgs args)
public BigDataPool(String name, BigDataPoolArgs args, CustomResourceOptions options)
type: azure-native:synapse:BigDataPool
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.

Parameters

name This property is required. string
The unique name of the resource.
args This property is required. BigDataPoolArgs
The arguments to resource properties.
opts CustomResourceOptions
Bag of options to control resource's behavior.
resource_name This property is required. str
The unique name of the resource.
args This property is required. BigDataPoolArgs
The arguments to resource properties.
opts ResourceOptions
Bag of options to control resource's behavior.
ctx Context
Context object for the current deployment.
name This property is required. string
The unique name of the resource.
args This property is required. BigDataPoolArgs
The arguments to resource properties.
opts ResourceOption
Bag of options to control resource's behavior.
name This property is required. string
The unique name of the resource.
args This property is required. BigDataPoolArgs
The arguments to resource properties.
opts CustomResourceOptions
Bag of options to control resource's behavior.
name This property is required. String
The unique name of the resource.
args This property is required. BigDataPoolArgs
The arguments to resource properties.
options CustomResourceOptions
Bag of options to control resource's behavior.

Constructor example

The following reference example uses placeholder values for all input properties.

var bigDataPoolResource = new AzureNative.Synapse.BigDataPool("bigDataPoolResource", new()
{
    ResourceGroupName = "string",
    WorkspaceName = "string",
    LibraryRequirements = new AzureNative.Synapse.Inputs.LibraryRequirementsArgs
    {
        Content = "string",
        Filename = "string",
    },
    NodeSize = "string",
    CustomLibraries = new[]
    {
        new AzureNative.Synapse.Inputs.LibraryInfoArgs
        {
            ContainerName = "string",
            Name = "string",
            Path = "string",
            Type = "string",
        },
    },
    DefaultSparkLogFolder = "string",
    DynamicExecutorAllocation = new AzureNative.Synapse.Inputs.DynamicExecutorAllocationArgs
    {
        Enabled = false,
        MaxExecutors = 0,
        MinExecutors = 0,
    },
    Force = false,
    IsAutotuneEnabled = false,
    IsComputeIsolationEnabled = false,
    AutoPause = new AzureNative.Synapse.Inputs.AutoPausePropertiesArgs
    {
        DelayInMinutes = 0,
        Enabled = false,
    },
    Location = "string",
    NodeCount = 0,
    CacheSize = 0,
    NodeSizeFamily = "string",
    ProvisioningState = "string",
    BigDataPoolName = "string",
    SessionLevelPackagesEnabled = false,
    SparkConfigProperties = new AzureNative.Synapse.Inputs.SparkConfigPropertiesArgs
    {
        ConfigurationType = "string",
        Content = "string",
        Filename = "string",
    },
    SparkEventsFolder = "string",
    SparkVersion = "string",
    Tags = 
    {
        { "string", "string" },
    },
    AutoScale = new AzureNative.Synapse.Inputs.AutoScalePropertiesArgs
    {
        Enabled = false,
        MaxNodeCount = 0,
        MinNodeCount = 0,
    },
});
Copy
example, err := synapse.NewBigDataPool(ctx, "bigDataPoolResource", &synapse.BigDataPoolArgs{
	ResourceGroupName: pulumi.String("string"),
	WorkspaceName:     pulumi.String("string"),
	LibraryRequirements: &synapse.LibraryRequirementsArgs{
		Content:  pulumi.String("string"),
		Filename: pulumi.String("string"),
	},
	NodeSize: pulumi.String("string"),
	CustomLibraries: synapse.LibraryInfoArray{
		&synapse.LibraryInfoArgs{
			ContainerName: pulumi.String("string"),
			Name:          pulumi.String("string"),
			Path:          pulumi.String("string"),
			Type:          pulumi.String("string"),
		},
	},
	DefaultSparkLogFolder: pulumi.String("string"),
	DynamicExecutorAllocation: &synapse.DynamicExecutorAllocationArgs{
		Enabled:      pulumi.Bool(false),
		MaxExecutors: pulumi.Int(0),
		MinExecutors: pulumi.Int(0),
	},
	Force:                     pulumi.Bool(false),
	IsAutotuneEnabled:         pulumi.Bool(false),
	IsComputeIsolationEnabled: pulumi.Bool(false),
	AutoPause: &synapse.AutoPausePropertiesArgs{
		DelayInMinutes: pulumi.Int(0),
		Enabled:        pulumi.Bool(false),
	},
	Location:                    pulumi.String("string"),
	NodeCount:                   pulumi.Int(0),
	CacheSize:                   pulumi.Int(0),
	NodeSizeFamily:              pulumi.String("string"),
	ProvisioningState:           pulumi.String("string"),
	BigDataPoolName:             pulumi.String("string"),
	SessionLevelPackagesEnabled: pulumi.Bool(false),
	SparkConfigProperties: &synapse.SparkConfigPropertiesArgs{
		ConfigurationType: pulumi.String("string"),
		Content:           pulumi.String("string"),
		Filename:          pulumi.String("string"),
	},
	SparkEventsFolder: pulumi.String("string"),
	SparkVersion:      pulumi.String("string"),
	Tags: pulumi.StringMap{
		"string": pulumi.String("string"),
	},
	AutoScale: &synapse.AutoScalePropertiesArgs{
		Enabled:      pulumi.Bool(false),
		MaxNodeCount: pulumi.Int(0),
		MinNodeCount: pulumi.Int(0),
	},
})
Copy
var bigDataPoolResource = new BigDataPool("bigDataPoolResource", BigDataPoolArgs.builder()
    .resourceGroupName("string")
    .workspaceName("string")
    .libraryRequirements(LibraryRequirementsArgs.builder()
        .content("string")
        .filename("string")
        .build())
    .nodeSize("string")
    .customLibraries(LibraryInfoArgs.builder()
        .containerName("string")
        .name("string")
        .path("string")
        .type("string")
        .build())
    .defaultSparkLogFolder("string")
    .dynamicExecutorAllocation(DynamicExecutorAllocationArgs.builder()
        .enabled(false)
        .maxExecutors(0)
        .minExecutors(0)
        .build())
    .force(false)
    .isAutotuneEnabled(false)
    .isComputeIsolationEnabled(false)
    .autoPause(AutoPausePropertiesArgs.builder()
        .delayInMinutes(0)
        .enabled(false)
        .build())
    .location("string")
    .nodeCount(0)
    .cacheSize(0)
    .nodeSizeFamily("string")
    .provisioningState("string")
    .bigDataPoolName("string")
    .sessionLevelPackagesEnabled(false)
    .sparkConfigProperties(SparkConfigPropertiesArgs.builder()
        .configurationType("string")
        .content("string")
        .filename("string")
        .build())
    .sparkEventsFolder("string")
    .sparkVersion("string")
    .tags(Map.of("string", "string"))
    .autoScale(AutoScalePropertiesArgs.builder()
        .enabled(false)
        .maxNodeCount(0)
        .minNodeCount(0)
        .build())
    .build());
Copy
big_data_pool_resource = azure_native.synapse.BigDataPool("bigDataPoolResource",
    resource_group_name="string",
    workspace_name="string",
    library_requirements={
        "content": "string",
        "filename": "string",
    },
    node_size="string",
    custom_libraries=[{
        "container_name": "string",
        "name": "string",
        "path": "string",
        "type": "string",
    }],
    default_spark_log_folder="string",
    dynamic_executor_allocation={
        "enabled": False,
        "max_executors": 0,
        "min_executors": 0,
    },
    force=False,
    is_autotune_enabled=False,
    is_compute_isolation_enabled=False,
    auto_pause={
        "delay_in_minutes": 0,
        "enabled": False,
    },
    location="string",
    node_count=0,
    cache_size=0,
    node_size_family="string",
    provisioning_state="string",
    big_data_pool_name="string",
    session_level_packages_enabled=False,
    spark_config_properties={
        "configuration_type": "string",
        "content": "string",
        "filename": "string",
    },
    spark_events_folder="string",
    spark_version="string",
    tags={
        "string": "string",
    },
    auto_scale={
        "enabled": False,
        "max_node_count": 0,
        "min_node_count": 0,
    })
Copy
const bigDataPoolResource = new azure_native.synapse.BigDataPool("bigDataPoolResource", {
    resourceGroupName: "string",
    workspaceName: "string",
    libraryRequirements: {
        content: "string",
        filename: "string",
    },
    nodeSize: "string",
    customLibraries: [{
        containerName: "string",
        name: "string",
        path: "string",
        type: "string",
    }],
    defaultSparkLogFolder: "string",
    dynamicExecutorAllocation: {
        enabled: false,
        maxExecutors: 0,
        minExecutors: 0,
    },
    force: false,
    isAutotuneEnabled: false,
    isComputeIsolationEnabled: false,
    autoPause: {
        delayInMinutes: 0,
        enabled: false,
    },
    location: "string",
    nodeCount: 0,
    cacheSize: 0,
    nodeSizeFamily: "string",
    provisioningState: "string",
    bigDataPoolName: "string",
    sessionLevelPackagesEnabled: false,
    sparkConfigProperties: {
        configurationType: "string",
        content: "string",
        filename: "string",
    },
    sparkEventsFolder: "string",
    sparkVersion: "string",
    tags: {
        string: "string",
    },
    autoScale: {
        enabled: false,
        maxNodeCount: 0,
        minNodeCount: 0,
    },
});
Copy
type: azure-native:synapse:BigDataPool
properties:
    autoPause:
        delayInMinutes: 0
        enabled: false
    autoScale:
        enabled: false
        maxNodeCount: 0
        minNodeCount: 0
    bigDataPoolName: string
    cacheSize: 0
    customLibraries:
        - containerName: string
          name: string
          path: string
          type: string
    defaultSparkLogFolder: string
    dynamicExecutorAllocation:
        enabled: false
        maxExecutors: 0
        minExecutors: 0
    force: false
    isAutotuneEnabled: false
    isComputeIsolationEnabled: false
    libraryRequirements:
        content: string
        filename: string
    location: string
    nodeCount: 0
    nodeSize: string
    nodeSizeFamily: string
    provisioningState: string
    resourceGroupName: string
    sessionLevelPackagesEnabled: false
    sparkConfigProperties:
        configurationType: string
        content: string
        filename: string
    sparkEventsFolder: string
    sparkVersion: string
    tags:
        string: string
    workspaceName: string
Copy

BigDataPool Resource Properties

To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.

Inputs

In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.

The BigDataPool resource accepts the following input properties:

ResourceGroupName
This property is required.
Changes to this property will trigger replacement.
string
The name of the resource group. The name is case insensitive.
WorkspaceName
This property is required.
Changes to this property will trigger replacement.
string
The name of the workspace.
AutoPause Pulumi.AzureNative.Synapse.Inputs.AutoPauseProperties
Auto-pausing properties
AutoScale Pulumi.AzureNative.Synapse.Inputs.AutoScaleProperties
Auto-scaling properties
BigDataPoolName Changes to this property will trigger replacement. string
Big Data pool name
CacheSize int
The cache size
CustomLibraries List<Pulumi.AzureNative.Synapse.Inputs.LibraryInfo>
List of custom libraries/packages associated with the spark pool.
DefaultSparkLogFolder string
The default folder where Spark logs will be written.
DynamicExecutorAllocation Pulumi.AzureNative.Synapse.Inputs.DynamicExecutorAllocation
Dynamic Executor Allocation
Force bool
Whether to stop any running jobs in the Big Data pool
IsAutotuneEnabled Changes to this property will trigger replacement. bool
Whether autotune is required or not.
IsComputeIsolationEnabled bool
Whether compute isolation is required or not.
LibraryRequirements Pulumi.AzureNative.Synapse.Inputs.LibraryRequirements
Library version requirements
Location Changes to this property will trigger replacement. string
The geo-location where the resource lives
NodeCount Changes to this property will trigger replacement. int
The number of nodes in the Big Data pool.
NodeSize string | Pulumi.AzureNative.Synapse.NodeSize
The level of compute power that each node in the Big Data pool has.
NodeSizeFamily string | Pulumi.AzureNative.Synapse.NodeSizeFamily
The kind of nodes that the Big Data pool provides.
ProvisioningState string
The state of the Big Data pool.
SessionLevelPackagesEnabled bool
Whether session level packages enabled.
SparkConfigProperties Pulumi.AzureNative.Synapse.Inputs.SparkConfigProperties
Spark configuration file to specify additional properties
SparkEventsFolder string
The Spark events folder
SparkVersion string
The Apache Spark version.
Tags Dictionary<string, string>
Resource tags.
ResourceGroupName
This property is required.
Changes to this property will trigger replacement.
string
The name of the resource group. The name is case insensitive.
WorkspaceName
This property is required.
Changes to this property will trigger replacement.
string
The name of the workspace.
AutoPause AutoPausePropertiesArgs
Auto-pausing properties
AutoScale AutoScalePropertiesArgs
Auto-scaling properties
BigDataPoolName Changes to this property will trigger replacement. string
Big Data pool name
CacheSize int
The cache size
CustomLibraries []LibraryInfoArgs
List of custom libraries/packages associated with the spark pool.
DefaultSparkLogFolder string
The default folder where Spark logs will be written.
DynamicExecutorAllocation DynamicExecutorAllocationArgs
Dynamic Executor Allocation
Force bool
Whether to stop any running jobs in the Big Data pool
IsAutotuneEnabled Changes to this property will trigger replacement. bool
Whether autotune is required or not.
IsComputeIsolationEnabled bool
Whether compute isolation is required or not.
LibraryRequirements LibraryRequirementsArgs
Library version requirements
Location Changes to this property will trigger replacement. string
The geo-location where the resource lives
NodeCount Changes to this property will trigger replacement. int
The number of nodes in the Big Data pool.
NodeSize string | NodeSize
The level of compute power that each node in the Big Data pool has.
NodeSizeFamily string | NodeSizeFamily
The kind of nodes that the Big Data pool provides.
ProvisioningState string
The state of the Big Data pool.
SessionLevelPackagesEnabled bool
Whether session level packages enabled.
SparkConfigProperties SparkConfigPropertiesArgs
Spark configuration file to specify additional properties
SparkEventsFolder string
The Spark events folder
SparkVersion string
The Apache Spark version.
Tags map[string]string
Resource tags.
resourceGroupName
This property is required.
Changes to this property will trigger replacement.
String
The name of the resource group. The name is case insensitive.
workspaceName
This property is required.
Changes to this property will trigger replacement.
String
The name of the workspace.
autoPause AutoPauseProperties
Auto-pausing properties
autoScale AutoScaleProperties
Auto-scaling properties
bigDataPoolName Changes to this property will trigger replacement. String
Big Data pool name
cacheSize Integer
The cache size
customLibraries List<LibraryInfo>
List of custom libraries/packages associated with the spark pool.
defaultSparkLogFolder String
The default folder where Spark logs will be written.
dynamicExecutorAllocation DynamicExecutorAllocation
Dynamic Executor Allocation
force Boolean
Whether to stop any running jobs in the Big Data pool
isAutotuneEnabled Changes to this property will trigger replacement. Boolean
Whether autotune is required or not.
isComputeIsolationEnabled Boolean
Whether compute isolation is required or not.
libraryRequirements LibraryRequirements
Library version requirements
location Changes to this property will trigger replacement. String
The geo-location where the resource lives
nodeCount Changes to this property will trigger replacement. Integer
The number of nodes in the Big Data pool.
nodeSize String | NodeSize
The level of compute power that each node in the Big Data pool has.
nodeSizeFamily String | NodeSizeFamily
The kind of nodes that the Big Data pool provides.
provisioningState String
The state of the Big Data pool.
sessionLevelPackagesEnabled Boolean
Whether session level packages enabled.
sparkConfigProperties SparkConfigProperties
Spark configuration file to specify additional properties
sparkEventsFolder String
The Spark events folder
sparkVersion String
The Apache Spark version.
tags Map<String,String>
Resource tags.
resourceGroupName
This property is required.
Changes to this property will trigger replacement.
string
The name of the resource group. The name is case insensitive.
workspaceName
This property is required.
Changes to this property will trigger replacement.
string
The name of the workspace.
autoPause AutoPauseProperties
Auto-pausing properties
autoScale AutoScaleProperties
Auto-scaling properties
bigDataPoolName Changes to this property will trigger replacement. string
Big Data pool name
cacheSize number
The cache size
customLibraries LibraryInfo[]
List of custom libraries/packages associated with the spark pool.
defaultSparkLogFolder string
The default folder where Spark logs will be written.
dynamicExecutorAllocation DynamicExecutorAllocation
Dynamic Executor Allocation
force boolean
Whether to stop any running jobs in the Big Data pool
isAutotuneEnabled Changes to this property will trigger replacement. boolean
Whether autotune is required or not.
isComputeIsolationEnabled boolean
Whether compute isolation is required or not.
libraryRequirements LibraryRequirements
Library version requirements
location Changes to this property will trigger replacement. string
The geo-location where the resource lives
nodeCount Changes to this property will trigger replacement. number
The number of nodes in the Big Data pool.
nodeSize string | NodeSize
The level of compute power that each node in the Big Data pool has.
nodeSizeFamily string | NodeSizeFamily
The kind of nodes that the Big Data pool provides.
provisioningState string
The state of the Big Data pool.
sessionLevelPackagesEnabled boolean
Whether session level packages enabled.
sparkConfigProperties SparkConfigProperties
Spark configuration file to specify additional properties
sparkEventsFolder string
The Spark events folder
sparkVersion string
The Apache Spark version.
tags {[key: string]: string}
Resource tags.
resource_group_name
This property is required.
Changes to this property will trigger replacement.
str
The name of the resource group. The name is case insensitive.
workspace_name
This property is required.
Changes to this property will trigger replacement.
str
The name of the workspace.
auto_pause AutoPausePropertiesArgs
Auto-pausing properties
auto_scale AutoScalePropertiesArgs
Auto-scaling properties
big_data_pool_name Changes to this property will trigger replacement. str
Big Data pool name
cache_size int
The cache size
custom_libraries Sequence[LibraryInfoArgs]
List of custom libraries/packages associated with the spark pool.
default_spark_log_folder str
The default folder where Spark logs will be written.
dynamic_executor_allocation DynamicExecutorAllocationArgs
Dynamic Executor Allocation
force bool
Whether to stop any running jobs in the Big Data pool
is_autotune_enabled Changes to this property will trigger replacement. bool
Whether autotune is required or not.
is_compute_isolation_enabled bool
Whether compute isolation is required or not.
library_requirements LibraryRequirementsArgs
Library version requirements
location Changes to this property will trigger replacement. str
The geo-location where the resource lives
node_count Changes to this property will trigger replacement. int
The number of nodes in the Big Data pool.
node_size str | NodeSize
The level of compute power that each node in the Big Data pool has.
node_size_family str | NodeSizeFamily
The kind of nodes that the Big Data pool provides.
provisioning_state str
The state of the Big Data pool.
session_level_packages_enabled bool
Whether session level packages enabled.
spark_config_properties SparkConfigPropertiesArgs
Spark configuration file to specify additional properties
spark_events_folder str
The Spark events folder
spark_version str
The Apache Spark version.
tags Mapping[str, str]
Resource tags.
resourceGroupName
This property is required.
Changes to this property will trigger replacement.
String
The name of the resource group. The name is case insensitive.
workspaceName
This property is required.
Changes to this property will trigger replacement.
String
The name of the workspace.
autoPause Property Map
Auto-pausing properties
autoScale Property Map
Auto-scaling properties
bigDataPoolName Changes to this property will trigger replacement. String
Big Data pool name
cacheSize Number
The cache size
customLibraries List<Property Map>
List of custom libraries/packages associated with the spark pool.
defaultSparkLogFolder String
The default folder where Spark logs will be written.
dynamicExecutorAllocation Property Map
Dynamic Executor Allocation
force Boolean
Whether to stop any running jobs in the Big Data pool
isAutotuneEnabled Changes to this property will trigger replacement. Boolean
Whether autotune is required or not.
isComputeIsolationEnabled Boolean
Whether compute isolation is required or not.
libraryRequirements Property Map
Library version requirements
location Changes to this property will trigger replacement. String
The geo-location where the resource lives
nodeCount Changes to this property will trigger replacement. Number
The number of nodes in the Big Data pool.
nodeSize String | "None" | "Small" | "Medium" | "Large" | "XLarge" | "XXLarge" | "XXXLarge"
The level of compute power that each node in the Big Data pool has.
nodeSizeFamily String | "None" | "MemoryOptimized" | "HardwareAcceleratedFPGA" | "HardwareAcceleratedGPU"
The kind of nodes that the Big Data pool provides.
provisioningState String
The state of the Big Data pool.
sessionLevelPackagesEnabled Boolean
Whether session level packages enabled.
sparkConfigProperties Property Map
Spark configuration file to specify additional properties
sparkEventsFolder String
The Spark events folder
sparkVersion String
The Apache Spark version.
tags Map<String>
Resource tags.

Outputs

All input properties are implicitly available as output properties. Additionally, the BigDataPool resource produces the following output properties:

AzureApiVersion string
The Azure API version of the resource.
CreationDate string
The time when the Big Data pool was created.
Id string
The provider-assigned unique ID for this managed resource.
LastSucceededTimestamp string
The time when the Big Data pool was updated successfully.
Name string
The name of the resource
Type string
The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts"
AzureApiVersion string
The Azure API version of the resource.
CreationDate string
The time when the Big Data pool was created.
Id string
The provider-assigned unique ID for this managed resource.
LastSucceededTimestamp string
The time when the Big Data pool was updated successfully.
Name string
The name of the resource
Type string
The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts"
azureApiVersion String
The Azure API version of the resource.
creationDate String
The time when the Big Data pool was created.
id String
The provider-assigned unique ID for this managed resource.
lastSucceededTimestamp String
The time when the Big Data pool was updated successfully.
name String
The name of the resource
type String
The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts"
azureApiVersion string
The Azure API version of the resource.
creationDate string
The time when the Big Data pool was created.
id string
The provider-assigned unique ID for this managed resource.
lastSucceededTimestamp string
The time when the Big Data pool was updated successfully.
name string
The name of the resource
type string
The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts"
azure_api_version str
The Azure API version of the resource.
creation_date str
The time when the Big Data pool was created.
id str
The provider-assigned unique ID for this managed resource.
last_succeeded_timestamp str
The time when the Big Data pool was updated successfully.
name str
The name of the resource
type str
The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts"
azureApiVersion String
The Azure API version of the resource.
creationDate String
The time when the Big Data pool was created.
id String
The provider-assigned unique ID for this managed resource.
lastSucceededTimestamp String
The time when the Big Data pool was updated successfully.
name String
The name of the resource
type String
The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts"

Supporting Types

AutoPauseProperties
, AutoPausePropertiesArgs

DelayInMinutes int
Number of minutes of idle time before the Big Data pool is automatically paused.
Enabled bool
Whether auto-pausing is enabled for the Big Data pool.
DelayInMinutes int
Number of minutes of idle time before the Big Data pool is automatically paused.
Enabled bool
Whether auto-pausing is enabled for the Big Data pool.
delayInMinutes Integer
Number of minutes of idle time before the Big Data pool is automatically paused.
enabled Boolean
Whether auto-pausing is enabled for the Big Data pool.
delayInMinutes number
Number of minutes of idle time before the Big Data pool is automatically paused.
enabled boolean
Whether auto-pausing is enabled for the Big Data pool.
delay_in_minutes int
Number of minutes of idle time before the Big Data pool is automatically paused.
enabled bool
Whether auto-pausing is enabled for the Big Data pool.
delayInMinutes Number
Number of minutes of idle time before the Big Data pool is automatically paused.
enabled Boolean
Whether auto-pausing is enabled for the Big Data pool.

AutoPausePropertiesResponse
, AutoPausePropertiesResponseArgs

DelayInMinutes int
Number of minutes of idle time before the Big Data pool is automatically paused.
Enabled bool
Whether auto-pausing is enabled for the Big Data pool.
DelayInMinutes int
Number of minutes of idle time before the Big Data pool is automatically paused.
Enabled bool
Whether auto-pausing is enabled for the Big Data pool.
delayInMinutes Integer
Number of minutes of idle time before the Big Data pool is automatically paused.
enabled Boolean
Whether auto-pausing is enabled for the Big Data pool.
delayInMinutes number
Number of minutes of idle time before the Big Data pool is automatically paused.
enabled boolean
Whether auto-pausing is enabled for the Big Data pool.
delay_in_minutes int
Number of minutes of idle time before the Big Data pool is automatically paused.
enabled bool
Whether auto-pausing is enabled for the Big Data pool.
delayInMinutes Number
Number of minutes of idle time before the Big Data pool is automatically paused.
enabled Boolean
Whether auto-pausing is enabled for the Big Data pool.

AutoScaleProperties
, AutoScalePropertiesArgs

Enabled bool
Whether automatic scaling is enabled for the Big Data pool.
MaxNodeCount int
The maximum number of nodes the Big Data pool can support.
MinNodeCount int
The minimum number of nodes the Big Data pool can support.
Enabled bool
Whether automatic scaling is enabled for the Big Data pool.
MaxNodeCount int
The maximum number of nodes the Big Data pool can support.
MinNodeCount int
The minimum number of nodes the Big Data pool can support.
enabled Boolean
Whether automatic scaling is enabled for the Big Data pool.
maxNodeCount Integer
The maximum number of nodes the Big Data pool can support.
minNodeCount Integer
The minimum number of nodes the Big Data pool can support.
enabled boolean
Whether automatic scaling is enabled for the Big Data pool.
maxNodeCount number
The maximum number of nodes the Big Data pool can support.
minNodeCount number
The minimum number of nodes the Big Data pool can support.
enabled bool
Whether automatic scaling is enabled for the Big Data pool.
max_node_count int
The maximum number of nodes the Big Data pool can support.
min_node_count int
The minimum number of nodes the Big Data pool can support.
enabled Boolean
Whether automatic scaling is enabled for the Big Data pool.
maxNodeCount Number
The maximum number of nodes the Big Data pool can support.
minNodeCount Number
The minimum number of nodes the Big Data pool can support.

AutoScalePropertiesResponse
, AutoScalePropertiesResponseArgs

Enabled bool
Whether automatic scaling is enabled for the Big Data pool.
MaxNodeCount int
The maximum number of nodes the Big Data pool can support.
MinNodeCount int
The minimum number of nodes the Big Data pool can support.
Enabled bool
Whether automatic scaling is enabled for the Big Data pool.
MaxNodeCount int
The maximum number of nodes the Big Data pool can support.
MinNodeCount int
The minimum number of nodes the Big Data pool can support.
enabled Boolean
Whether automatic scaling is enabled for the Big Data pool.
maxNodeCount Integer
The maximum number of nodes the Big Data pool can support.
minNodeCount Integer
The minimum number of nodes the Big Data pool can support.
enabled boolean
Whether automatic scaling is enabled for the Big Data pool.
maxNodeCount number
The maximum number of nodes the Big Data pool can support.
minNodeCount number
The minimum number of nodes the Big Data pool can support.
enabled bool
Whether automatic scaling is enabled for the Big Data pool.
max_node_count int
The maximum number of nodes the Big Data pool can support.
min_node_count int
The minimum number of nodes the Big Data pool can support.
enabled Boolean
Whether automatic scaling is enabled for the Big Data pool.
maxNodeCount Number
The maximum number of nodes the Big Data pool can support.
minNodeCount Number
The minimum number of nodes the Big Data pool can support.

ConfigurationType
, ConfigurationTypeArgs

File
File
Artifact
Artifact
ConfigurationTypeFile
File
ConfigurationTypeArtifact
Artifact
File
File
Artifact
Artifact
File
File
Artifact
Artifact
FILE
File
ARTIFACT
Artifact
"File"
File
"Artifact"
Artifact

DynamicExecutorAllocation
, DynamicExecutorAllocationArgs

Enabled bool
Indicates whether Dynamic Executor Allocation is enabled or not.
MaxExecutors int
The maximum number of executors alloted
MinExecutors int
The minimum number of executors alloted
Enabled bool
Indicates whether Dynamic Executor Allocation is enabled or not.
MaxExecutors int
The maximum number of executors alloted
MinExecutors int
The minimum number of executors alloted
enabled Boolean
Indicates whether Dynamic Executor Allocation is enabled or not.
maxExecutors Integer
The maximum number of executors alloted
minExecutors Integer
The minimum number of executors alloted
enabled boolean
Indicates whether Dynamic Executor Allocation is enabled or not.
maxExecutors number
The maximum number of executors alloted
minExecutors number
The minimum number of executors alloted
enabled bool
Indicates whether Dynamic Executor Allocation is enabled or not.
max_executors int
The maximum number of executors alloted
min_executors int
The minimum number of executors alloted
enabled Boolean
Indicates whether Dynamic Executor Allocation is enabled or not.
maxExecutors Number
The maximum number of executors alloted
minExecutors Number
The minimum number of executors alloted

DynamicExecutorAllocationResponse
, DynamicExecutorAllocationResponseArgs

Enabled bool
Indicates whether Dynamic Executor Allocation is enabled or not.
MaxExecutors int
The maximum number of executors alloted
MinExecutors int
The minimum number of executors alloted
Enabled bool
Indicates whether Dynamic Executor Allocation is enabled or not.
MaxExecutors int
The maximum number of executors alloted
MinExecutors int
The minimum number of executors alloted
enabled Boolean
Indicates whether Dynamic Executor Allocation is enabled or not.
maxExecutors Integer
The maximum number of executors alloted
minExecutors Integer
The minimum number of executors alloted
enabled boolean
Indicates whether Dynamic Executor Allocation is enabled or not.
maxExecutors number
The maximum number of executors alloted
minExecutors number
The minimum number of executors alloted
enabled bool
Indicates whether Dynamic Executor Allocation is enabled or not.
max_executors int
The maximum number of executors alloted
min_executors int
The minimum number of executors alloted
enabled Boolean
Indicates whether Dynamic Executor Allocation is enabled or not.
maxExecutors Number
The maximum number of executors alloted
minExecutors Number
The minimum number of executors alloted

LibraryInfo
, LibraryInfoArgs

ContainerName string
Storage blob container name.
Name string
Name of the library.
Path string
Storage blob path of library.
Type string
Type of the library.
ContainerName string
Storage blob container name.
Name string
Name of the library.
Path string
Storage blob path of library.
Type string
Type of the library.
containerName String
Storage blob container name.
name String
Name of the library.
path String
Storage blob path of library.
type String
Type of the library.
containerName string
Storage blob container name.
name string
Name of the library.
path string
Storage blob path of library.
type string
Type of the library.
container_name str
Storage blob container name.
name str
Name of the library.
path str
Storage blob path of library.
type str
Type of the library.
containerName String
Storage blob container name.
name String
Name of the library.
path String
Storage blob path of library.
type String
Type of the library.

LibraryInfoResponse
, LibraryInfoResponseArgs

CreatorId This property is required. string
Creator Id of the library/package.
ProvisioningStatus This property is required. string
Provisioning status of the library/package.
UploadedTimestamp This property is required. string
The last update time of the library.
ContainerName string
Storage blob container name.
Name string
Name of the library.
Path string
Storage blob path of library.
Type string
Type of the library.
CreatorId This property is required. string
Creator Id of the library/package.
ProvisioningStatus This property is required. string
Provisioning status of the library/package.
UploadedTimestamp This property is required. string
The last update time of the library.
ContainerName string
Storage blob container name.
Name string
Name of the library.
Path string
Storage blob path of library.
Type string
Type of the library.
creatorId This property is required. String
Creator Id of the library/package.
provisioningStatus This property is required. String
Provisioning status of the library/package.
uploadedTimestamp This property is required. String
The last update time of the library.
containerName String
Storage blob container name.
name String
Name of the library.
path String
Storage blob path of library.
type String
Type of the library.
creatorId This property is required. string
Creator Id of the library/package.
provisioningStatus This property is required. string
Provisioning status of the library/package.
uploadedTimestamp This property is required. string
The last update time of the library.
containerName string
Storage blob container name.
name string
Name of the library.
path string
Storage blob path of library.
type string
Type of the library.
creator_id This property is required. str
Creator Id of the library/package.
provisioning_status This property is required. str
Provisioning status of the library/package.
uploaded_timestamp This property is required. str
The last update time of the library.
container_name str
Storage blob container name.
name str
Name of the library.
path str
Storage blob path of library.
type str
Type of the library.
creatorId This property is required. String
Creator Id of the library/package.
provisioningStatus This property is required. String
Provisioning status of the library/package.
uploadedTimestamp This property is required. String
The last update time of the library.
containerName String
Storage blob container name.
name String
Name of the library.
path String
Storage blob path of library.
type String
Type of the library.

LibraryRequirements
, LibraryRequirementsArgs

Content string
The library requirements.
Filename string
The filename of the library requirements file.
Content string
The library requirements.
Filename string
The filename of the library requirements file.
content String
The library requirements.
filename String
The filename of the library requirements file.
content string
The library requirements.
filename string
The filename of the library requirements file.
content str
The library requirements.
filename str
The filename of the library requirements file.
content String
The library requirements.
filename String
The filename of the library requirements file.

LibraryRequirementsResponse
, LibraryRequirementsResponseArgs

Time This property is required. string
The last update time of the library requirements file.
Content string
The library requirements.
Filename string
The filename of the library requirements file.
Time This property is required. string
The last update time of the library requirements file.
Content string
The library requirements.
Filename string
The filename of the library requirements file.
time This property is required. String
The last update time of the library requirements file.
content String
The library requirements.
filename String
The filename of the library requirements file.
time This property is required. string
The last update time of the library requirements file.
content string
The library requirements.
filename string
The filename of the library requirements file.
time This property is required. str
The last update time of the library requirements file.
content str
The library requirements.
filename str
The filename of the library requirements file.
time This property is required. String
The last update time of the library requirements file.
content String
The library requirements.
filename String
The filename of the library requirements file.

NodeSize
, NodeSizeArgs

None
None
Small
Small
Medium
Medium
Large
Large
XLarge
XLarge
XXLarge
XXLarge
XXXLarge
XXXLarge
NodeSizeNone
None
NodeSizeSmall
Small
NodeSizeMedium
Medium
NodeSizeLarge
Large
NodeSizeXLarge
XLarge
NodeSizeXXLarge
XXLarge
NodeSizeXXXLarge
XXXLarge
None
None
Small
Small
Medium
Medium
Large
Large
XLarge
XLarge
XXLarge
XXLarge
XXXLarge
XXXLarge
None
None
Small
Small
Medium
Medium
Large
Large
XLarge
XLarge
XXLarge
XXLarge
XXXLarge
XXXLarge
NONE
None
SMALL
Small
MEDIUM
Medium
LARGE
Large
X_LARGE
XLarge
XX_LARGE
XXLarge
XXX_LARGE
XXXLarge
"None"
None
"Small"
Small
"Medium"
Medium
"Large"
Large
"XLarge"
XLarge
"XXLarge"
XXLarge
"XXXLarge"
XXXLarge

NodeSizeFamily
, NodeSizeFamilyArgs

None
None
MemoryOptimized
MemoryOptimized
HardwareAcceleratedFPGA
HardwareAcceleratedFPGA
HardwareAcceleratedGPU
HardwareAcceleratedGPU
NodeSizeFamilyNone
None
NodeSizeFamilyMemoryOptimized
MemoryOptimized
NodeSizeFamilyHardwareAcceleratedFPGA
HardwareAcceleratedFPGA
NodeSizeFamilyHardwareAcceleratedGPU
HardwareAcceleratedGPU
None
None
MemoryOptimized
MemoryOptimized
HardwareAcceleratedFPGA
HardwareAcceleratedFPGA
HardwareAcceleratedGPU
HardwareAcceleratedGPU
None
None
MemoryOptimized
MemoryOptimized
HardwareAcceleratedFPGA
HardwareAcceleratedFPGA
HardwareAcceleratedGPU
HardwareAcceleratedGPU
NONE
None
MEMORY_OPTIMIZED
MemoryOptimized
HARDWARE_ACCELERATED_FPGA
HardwareAcceleratedFPGA
HARDWARE_ACCELERATED_GPU
HardwareAcceleratedGPU
"None"
None
"MemoryOptimized"
MemoryOptimized
"HardwareAcceleratedFPGA"
HardwareAcceleratedFPGA
"HardwareAcceleratedGPU"
HardwareAcceleratedGPU

SparkConfigProperties
, SparkConfigPropertiesArgs

ConfigurationType string | Pulumi.AzureNative.Synapse.ConfigurationType
The type of the spark config properties file.
Content string
The spark config properties.
Filename string
The filename of the spark config properties file.
ConfigurationType string | ConfigurationType
The type of the spark config properties file.
Content string
The spark config properties.
Filename string
The filename of the spark config properties file.
configurationType String | ConfigurationType
The type of the spark config properties file.
content String
The spark config properties.
filename String
The filename of the spark config properties file.
configurationType string | ConfigurationType
The type of the spark config properties file.
content string
The spark config properties.
filename string
The filename of the spark config properties file.
configuration_type str | ConfigurationType
The type of the spark config properties file.
content str
The spark config properties.
filename str
The filename of the spark config properties file.
configurationType String | "File" | "Artifact"
The type of the spark config properties file.
content String
The spark config properties.
filename String
The filename of the spark config properties file.

SparkConfigPropertiesResponse
, SparkConfigPropertiesResponseArgs

Time This property is required. string
The last update time of the spark config properties file.
ConfigurationType string
The type of the spark config properties file.
Content string
The spark config properties.
Filename string
The filename of the spark config properties file.
Time This property is required. string
The last update time of the spark config properties file.
ConfigurationType string
The type of the spark config properties file.
Content string
The spark config properties.
Filename string
The filename of the spark config properties file.
time This property is required. String
The last update time of the spark config properties file.
configurationType String
The type of the spark config properties file.
content String
The spark config properties.
filename String
The filename of the spark config properties file.
time This property is required. string
The last update time of the spark config properties file.
configurationType string
The type of the spark config properties file.
content string
The spark config properties.
filename string
The filename of the spark config properties file.
time This property is required. str
The last update time of the spark config properties file.
configuration_type str
The type of the spark config properties file.
content str
The spark config properties.
filename str
The filename of the spark config properties file.
time This property is required. String
The last update time of the spark config properties file.
configurationType String
The type of the spark config properties file.
content String
The spark config properties.
filename String
The filename of the spark config properties file.

Import

An existing resource can be imported using its type token, name, and identifier, e.g.

$ pulumi import azure-native:synapse:BigDataPool ExamplePool /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Synapse/workspaces/{workspaceName}/bigDataPools/{bigDataPoolName} 
Copy

To learn more about importing existing cloud resources, see Importing resources.

Package Details

Repository
Azure Native pulumi/pulumi-azure-native
License
Apache-2.0
This is the latest version of Azure Native. Use the Azure Native v2 docs if using the v2 version of this package.
Azure Native v3.1.0 published on Tuesday, Apr 8, 2025 by Pulumi