oci.DataFlow.InvokeRun
Explore with Pulumi AI
This resource provides the Invoke Run resource in Oracle Cloud Infrastructure Data Flow service.
Creates a run for an application.
Example Usage
import * as pulumi from "@pulumi/pulumi";
import * as oci from "@pulumi/oci";
const testInvokeRun = new oci.dataflow.InvokeRun("test_invoke_run", {
    compartmentId: compartmentId,
    applicationId: testApplication.id,
    applicationLogConfig: {
        logGroupId: testLogGroup.id,
        logId: testLog.id,
    },
    archiveUri: invokeRunArchiveUri,
    arguments: invokeRunArguments,
    configuration: invokeRunConfiguration,
    definedTags: {
        "Operations.CostCenter": "42",
    },
    displayName: invokeRunDisplayName,
    driverShape: invokeRunDriverShape,
    driverShapeConfig: {
        memoryInGbs: invokeRunDriverShapeConfigMemoryInGbs,
        ocpus: invokeRunDriverShapeConfigOcpus,
    },
    execute: invokeRunExecute,
    executorShape: invokeRunExecutorShape,
    executorShapeConfig: {
        memoryInGbs: invokeRunExecutorShapeConfigMemoryInGbs,
        ocpus: invokeRunExecutorShapeConfigOcpus,
    },
    freeformTags: {
        Department: "Finance",
    },
    idleTimeoutInMinutes: invokeRunIdleTimeoutInMinutes,
    logsBucketUri: invokeRunLogsBucketUri,
    maxDurationInMinutes: invokeRunMaxDurationInMinutes,
    metastoreId: metastoreId,
    numExecutors: invokeRunNumExecutors,
    opcParentRptUrl: invokeRunOpcParentRptUrl,
    parameters: [{
        name: invokeRunParametersName,
        value: invokeRunParametersValue,
    }],
    poolId: testPool.id,
    sparkVersion: invokeRunSparkVersion,
    type: invokeRunType,
    warehouseBucketUri: invokeRunWarehouseBucketUri,
});
import pulumi
import pulumi_oci as oci
test_invoke_run = oci.data_flow.InvokeRun("test_invoke_run",
    compartment_id=compartment_id,
    application_id=test_application["id"],
    application_log_config={
        "log_group_id": test_log_group["id"],
        "log_id": test_log["id"],
    },
    archive_uri=invoke_run_archive_uri,
    arguments=invoke_run_arguments,
    configuration=invoke_run_configuration,
    defined_tags={
        "Operations.CostCenter": "42",
    },
    display_name=invoke_run_display_name,
    driver_shape=invoke_run_driver_shape,
    driver_shape_config={
        "memory_in_gbs": invoke_run_driver_shape_config_memory_in_gbs,
        "ocpus": invoke_run_driver_shape_config_ocpus,
    },
    execute=invoke_run_execute,
    executor_shape=invoke_run_executor_shape,
    executor_shape_config={
        "memory_in_gbs": invoke_run_executor_shape_config_memory_in_gbs,
        "ocpus": invoke_run_executor_shape_config_ocpus,
    },
    freeform_tags={
        "Department": "Finance",
    },
    idle_timeout_in_minutes=invoke_run_idle_timeout_in_minutes,
    logs_bucket_uri=invoke_run_logs_bucket_uri,
    max_duration_in_minutes=invoke_run_max_duration_in_minutes,
    metastore_id=metastore_id,
    num_executors=invoke_run_num_executors,
    opc_parent_rpt_url=invoke_run_opc_parent_rpt_url,
    parameters=[{
        "name": invoke_run_parameters_name,
        "value": invoke_run_parameters_value,
    }],
    pool_id=test_pool["id"],
    spark_version=invoke_run_spark_version,
    type=invoke_run_type,
    warehouse_bucket_uri=invoke_run_warehouse_bucket_uri)
package main
import (
	"github.com/pulumi/pulumi-oci/sdk/v2/go/oci/dataflow"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		_, err := dataflow.NewInvokeRun(ctx, "test_invoke_run", &dataflow.InvokeRunArgs{
			CompartmentId: pulumi.Any(compartmentId),
			ApplicationId: pulumi.Any(testApplication.Id),
			ApplicationLogConfig: &dataflow.InvokeRunApplicationLogConfigArgs{
				LogGroupId: pulumi.Any(testLogGroup.Id),
				LogId:      pulumi.Any(testLog.Id),
			},
			ArchiveUri:    pulumi.Any(invokeRunArchiveUri),
			Arguments:     pulumi.Any(invokeRunArguments),
			Configuration: pulumi.Any(invokeRunConfiguration),
			DefinedTags: pulumi.StringMap{
				"Operations.CostCenter": pulumi.String("42"),
			},
			DisplayName: pulumi.Any(invokeRunDisplayName),
			DriverShape: pulumi.Any(invokeRunDriverShape),
			DriverShapeConfig: &dataflow.InvokeRunDriverShapeConfigArgs{
				MemoryInGbs: pulumi.Any(invokeRunDriverShapeConfigMemoryInGbs),
				Ocpus:       pulumi.Any(invokeRunDriverShapeConfigOcpus),
			},
			Execute:       pulumi.Any(invokeRunExecute),
			ExecutorShape: pulumi.Any(invokeRunExecutorShape),
			ExecutorShapeConfig: &dataflow.InvokeRunExecutorShapeConfigArgs{
				MemoryInGbs: pulumi.Any(invokeRunExecutorShapeConfigMemoryInGbs),
				Ocpus:       pulumi.Any(invokeRunExecutorShapeConfigOcpus),
			},
			FreeformTags: pulumi.StringMap{
				"Department": pulumi.String("Finance"),
			},
			IdleTimeoutInMinutes: pulumi.Any(invokeRunIdleTimeoutInMinutes),
			LogsBucketUri:        pulumi.Any(invokeRunLogsBucketUri),
			MaxDurationInMinutes: pulumi.Any(invokeRunMaxDurationInMinutes),
			MetastoreId:          pulumi.Any(metastoreId),
			NumExecutors:         pulumi.Any(invokeRunNumExecutors),
			OpcParentRptUrl:      pulumi.Any(invokeRunOpcParentRptUrl),
			Parameters: dataflow.InvokeRunParameterArray{
				&dataflow.InvokeRunParameterArgs{
					Name:  pulumi.Any(invokeRunParametersName),
					Value: pulumi.Any(invokeRunParametersValue),
				},
			},
			PoolId:             pulumi.Any(testPool.Id),
			SparkVersion:       pulumi.Any(invokeRunSparkVersion),
			Type:               pulumi.Any(invokeRunType),
			WarehouseBucketUri: pulumi.Any(invokeRunWarehouseBucketUri),
		})
		if err != nil {
			return err
		}
		return nil
	})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Oci = Pulumi.Oci;
return await Deployment.RunAsync(() => 
{
    var testInvokeRun = new Oci.DataFlow.InvokeRun("test_invoke_run", new()
    {
        CompartmentId = compartmentId,
        ApplicationId = testApplication.Id,
        ApplicationLogConfig = new Oci.DataFlow.Inputs.InvokeRunApplicationLogConfigArgs
        {
            LogGroupId = testLogGroup.Id,
            LogId = testLog.Id,
        },
        ArchiveUri = invokeRunArchiveUri,
        Arguments = invokeRunArguments,
        Configuration = invokeRunConfiguration,
        DefinedTags = 
        {
            { "Operations.CostCenter", "42" },
        },
        DisplayName = invokeRunDisplayName,
        DriverShape = invokeRunDriverShape,
        DriverShapeConfig = new Oci.DataFlow.Inputs.InvokeRunDriverShapeConfigArgs
        {
            MemoryInGbs = invokeRunDriverShapeConfigMemoryInGbs,
            Ocpus = invokeRunDriverShapeConfigOcpus,
        },
        Execute = invokeRunExecute,
        ExecutorShape = invokeRunExecutorShape,
        ExecutorShapeConfig = new Oci.DataFlow.Inputs.InvokeRunExecutorShapeConfigArgs
        {
            MemoryInGbs = invokeRunExecutorShapeConfigMemoryInGbs,
            Ocpus = invokeRunExecutorShapeConfigOcpus,
        },
        FreeformTags = 
        {
            { "Department", "Finance" },
        },
        IdleTimeoutInMinutes = invokeRunIdleTimeoutInMinutes,
        LogsBucketUri = invokeRunLogsBucketUri,
        MaxDurationInMinutes = invokeRunMaxDurationInMinutes,
        MetastoreId = metastoreId,
        NumExecutors = invokeRunNumExecutors,
        OpcParentRptUrl = invokeRunOpcParentRptUrl,
        Parameters = new[]
        {
            new Oci.DataFlow.Inputs.InvokeRunParameterArgs
            {
                Name = invokeRunParametersName,
                Value = invokeRunParametersValue,
            },
        },
        PoolId = testPool.Id,
        SparkVersion = invokeRunSparkVersion,
        Type = invokeRunType,
        WarehouseBucketUri = invokeRunWarehouseBucketUri,
    });
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.oci.DataFlow.InvokeRun;
import com.pulumi.oci.DataFlow.InvokeRunArgs;
import com.pulumi.oci.DataFlow.inputs.InvokeRunApplicationLogConfigArgs;
import com.pulumi.oci.DataFlow.inputs.InvokeRunDriverShapeConfigArgs;
import com.pulumi.oci.DataFlow.inputs.InvokeRunExecutorShapeConfigArgs;
import com.pulumi.oci.DataFlow.inputs.InvokeRunParameterArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }
    public static void stack(Context ctx) {
        var testInvokeRun = new InvokeRun("testInvokeRun", InvokeRunArgs.builder()
            .compartmentId(compartmentId)
            .applicationId(testApplication.id())
            .applicationLogConfig(InvokeRunApplicationLogConfigArgs.builder()
                .logGroupId(testLogGroup.id())
                .logId(testLog.id())
                .build())
            .archiveUri(invokeRunArchiveUri)
            .arguments(invokeRunArguments)
            .configuration(invokeRunConfiguration)
            .definedTags(Map.of("Operations.CostCenter", "42"))
            .displayName(invokeRunDisplayName)
            .driverShape(invokeRunDriverShape)
            .driverShapeConfig(InvokeRunDriverShapeConfigArgs.builder()
                .memoryInGbs(invokeRunDriverShapeConfigMemoryInGbs)
                .ocpus(invokeRunDriverShapeConfigOcpus)
                .build())
            .execute(invokeRunExecute)
            .executorShape(invokeRunExecutorShape)
            .executorShapeConfig(InvokeRunExecutorShapeConfigArgs.builder()
                .memoryInGbs(invokeRunExecutorShapeConfigMemoryInGbs)
                .ocpus(invokeRunExecutorShapeConfigOcpus)
                .build())
            .freeformTags(Map.of("Department", "Finance"))
            .idleTimeoutInMinutes(invokeRunIdleTimeoutInMinutes)
            .logsBucketUri(invokeRunLogsBucketUri)
            .maxDurationInMinutes(invokeRunMaxDurationInMinutes)
            .metastoreId(metastoreId)
            .numExecutors(invokeRunNumExecutors)
            .opcParentRptUrl(invokeRunOpcParentRptUrl)
            .parameters(InvokeRunParameterArgs.builder()
                .name(invokeRunParametersName)
                .value(invokeRunParametersValue)
                .build())
            .poolId(testPool.id())
            .sparkVersion(invokeRunSparkVersion)
            .type(invokeRunType)
            .warehouseBucketUri(invokeRunWarehouseBucketUri)
            .build());
    }
}
resources:
  testInvokeRun:
    type: oci:DataFlow:InvokeRun
    name: test_invoke_run
    properties:
      compartmentId: ${compartmentId}
      applicationId: ${testApplication.id}
      applicationLogConfig:
        logGroupId: ${testLogGroup.id}
        logId: ${testLog.id}
      archiveUri: ${invokeRunArchiveUri}
      arguments: ${invokeRunArguments}
      configuration: ${invokeRunConfiguration}
      definedTags:
        Operations.CostCenter: '42'
      displayName: ${invokeRunDisplayName}
      driverShape: ${invokeRunDriverShape}
      driverShapeConfig:
        memoryInGbs: ${invokeRunDriverShapeConfigMemoryInGbs}
        ocpus: ${invokeRunDriverShapeConfigOcpus}
      execute: ${invokeRunExecute}
      executorShape: ${invokeRunExecutorShape}
      executorShapeConfig:
        memoryInGbs: ${invokeRunExecutorShapeConfigMemoryInGbs}
        ocpus: ${invokeRunExecutorShapeConfigOcpus}
      freeformTags:
        Department: Finance
      idleTimeoutInMinutes: ${invokeRunIdleTimeoutInMinutes}
      logsBucketUri: ${invokeRunLogsBucketUri}
      maxDurationInMinutes: ${invokeRunMaxDurationInMinutes}
      metastoreId: ${metastoreId}
      numExecutors: ${invokeRunNumExecutors}
      opcParentRptUrl: ${invokeRunOpcParentRptUrl}
      parameters:
        - name: ${invokeRunParametersName}
          value: ${invokeRunParametersValue}
      poolId: ${testPool.id}
      sparkVersion: ${invokeRunSparkVersion}
      type: ${invokeRunType}
      warehouseBucketUri: ${invokeRunWarehouseBucketUri}
Note
At a time service allows only one run to succeed if user is trying to invoke runs on multiple applications which have Private Endpoints and service will proceed invoking only one run and put the rest of them in failed state.
Create InvokeRun Resource
Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.
Constructor syntax
new InvokeRun(name: string, args: InvokeRunArgs, opts?: CustomResourceOptions);@overload
def InvokeRun(resource_name: str,
              args: InvokeRunArgs,
              opts: Optional[ResourceOptions] = None)
@overload
def InvokeRun(resource_name: str,
              opts: Optional[ResourceOptions] = None,
              compartment_id: Optional[str] = None,
              executor_shape: Optional[str] = None,
              pool_id: Optional[str] = None,
              arguments: Optional[Sequence[str]] = None,
              asynchronous: Optional[bool] = None,
              application_log_config: Optional[InvokeRunApplicationLogConfigArgs] = None,
              configuration: Optional[Mapping[str, str]] = None,
              defined_tags: Optional[Mapping[str, str]] = None,
              display_name: Optional[str] = None,
              driver_shape: Optional[str] = None,
              driver_shape_config: Optional[InvokeRunDriverShapeConfigArgs] = None,
              execute: Optional[str] = None,
              application_id: Optional[str] = None,
              archive_uri: Optional[str] = None,
              idle_timeout_in_minutes: Optional[str] = None,
              executor_shape_config: Optional[InvokeRunExecutorShapeConfigArgs] = None,
              logs_bucket_uri: Optional[str] = None,
              max_duration_in_minutes: Optional[str] = None,
              metastore_id: Optional[str] = None,
              num_executors: Optional[int] = None,
              opc_parent_rpt_url: Optional[str] = None,
              parameters: Optional[Sequence[InvokeRunParameterArgs]] = None,
              freeform_tags: Optional[Mapping[str, str]] = None,
              spark_version: Optional[str] = None,
              type: Optional[str] = None,
              warehouse_bucket_uri: Optional[str] = None)func NewInvokeRun(ctx *Context, name string, args InvokeRunArgs, opts ...ResourceOption) (*InvokeRun, error)public InvokeRun(string name, InvokeRunArgs args, CustomResourceOptions? opts = null)
public InvokeRun(String name, InvokeRunArgs args)
public InvokeRun(String name, InvokeRunArgs args, CustomResourceOptions options)
type: oci:DataFlow:InvokeRun
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.
Parameters
- name string
- The unique name of the resource.
- args InvokeRunArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- resource_name str
- The unique name of the resource.
- args InvokeRunArgs
- The arguments to resource properties.
- opts ResourceOptions
- Bag of options to control resource's behavior.
- ctx Context
- Context object for the current deployment.
- name string
- The unique name of the resource.
- args InvokeRunArgs
- The arguments to resource properties.
- opts ResourceOption
- Bag of options to control resource's behavior.
- name string
- The unique name of the resource.
- args InvokeRunArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- name String
- The unique name of the resource.
- args InvokeRunArgs
- The arguments to resource properties.
- options CustomResourceOptions
- Bag of options to control resource's behavior.
Constructor example
The following reference example uses placeholder values for all input properties.
var invokeRunResource = new Oci.DataFlow.InvokeRun("invokeRunResource", new()
{
    CompartmentId = "string",
    ExecutorShape = "string",
    PoolId = "string",
    Arguments = new[]
    {
        "string",
    },
    Asynchronous = false,
    ApplicationLogConfig = new Oci.DataFlow.Inputs.InvokeRunApplicationLogConfigArgs
    {
        LogGroupId = "string",
        LogId = "string",
    },
    Configuration = 
    {
        { "string", "string" },
    },
    DefinedTags = 
    {
        { "string", "string" },
    },
    DisplayName = "string",
    DriverShape = "string",
    DriverShapeConfig = new Oci.DataFlow.Inputs.InvokeRunDriverShapeConfigArgs
    {
        MemoryInGbs = 0,
        Ocpus = 0,
    },
    Execute = "string",
    ApplicationId = "string",
    ArchiveUri = "string",
    IdleTimeoutInMinutes = "string",
    ExecutorShapeConfig = new Oci.DataFlow.Inputs.InvokeRunExecutorShapeConfigArgs
    {
        MemoryInGbs = 0,
        Ocpus = 0,
    },
    LogsBucketUri = "string",
    MaxDurationInMinutes = "string",
    MetastoreId = "string",
    NumExecutors = 0,
    OpcParentRptUrl = "string",
    Parameters = new[]
    {
        new Oci.DataFlow.Inputs.InvokeRunParameterArgs
        {
            Name = "string",
            Value = "string",
        },
    },
    FreeformTags = 
    {
        { "string", "string" },
    },
    SparkVersion = "string",
    Type = "string",
    WarehouseBucketUri = "string",
});
example, err := dataflow.NewInvokeRun(ctx, "invokeRunResource", &dataflow.InvokeRunArgs{
	CompartmentId: pulumi.String("string"),
	ExecutorShape: pulumi.String("string"),
	PoolId:        pulumi.String("string"),
	Arguments: pulumi.StringArray{
		pulumi.String("string"),
	},
	Asynchronous: pulumi.Bool(false),
	ApplicationLogConfig: &dataflow.InvokeRunApplicationLogConfigArgs{
		LogGroupId: pulumi.String("string"),
		LogId:      pulumi.String("string"),
	},
	Configuration: pulumi.StringMap{
		"string": pulumi.String("string"),
	},
	DefinedTags: pulumi.StringMap{
		"string": pulumi.String("string"),
	},
	DisplayName: pulumi.String("string"),
	DriverShape: pulumi.String("string"),
	DriverShapeConfig: &dataflow.InvokeRunDriverShapeConfigArgs{
		MemoryInGbs: pulumi.Float64(0),
		Ocpus:       pulumi.Float64(0),
	},
	Execute:              pulumi.String("string"),
	ApplicationId:        pulumi.String("string"),
	ArchiveUri:           pulumi.String("string"),
	IdleTimeoutInMinutes: pulumi.String("string"),
	ExecutorShapeConfig: &dataflow.InvokeRunExecutorShapeConfigArgs{
		MemoryInGbs: pulumi.Float64(0),
		Ocpus:       pulumi.Float64(0),
	},
	LogsBucketUri:        pulumi.String("string"),
	MaxDurationInMinutes: pulumi.String("string"),
	MetastoreId:          pulumi.String("string"),
	NumExecutors:         pulumi.Int(0),
	OpcParentRptUrl:      pulumi.String("string"),
	Parameters: dataflow.InvokeRunParameterArray{
		&dataflow.InvokeRunParameterArgs{
			Name:  pulumi.String("string"),
			Value: pulumi.String("string"),
		},
	},
	FreeformTags: pulumi.StringMap{
		"string": pulumi.String("string"),
	},
	SparkVersion:       pulumi.String("string"),
	Type:               pulumi.String("string"),
	WarehouseBucketUri: pulumi.String("string"),
})
var invokeRunResource = new InvokeRun("invokeRunResource", InvokeRunArgs.builder()
    .compartmentId("string")
    .executorShape("string")
    .poolId("string")
    .arguments("string")
    .asynchronous(false)
    .applicationLogConfig(InvokeRunApplicationLogConfigArgs.builder()
        .logGroupId("string")
        .logId("string")
        .build())
    .configuration(Map.of("string", "string"))
    .definedTags(Map.of("string", "string"))
    .displayName("string")
    .driverShape("string")
    .driverShapeConfig(InvokeRunDriverShapeConfigArgs.builder()
        .memoryInGbs(0.0)
        .ocpus(0.0)
        .build())
    .execute("string")
    .applicationId("string")
    .archiveUri("string")
    .idleTimeoutInMinutes("string")
    .executorShapeConfig(InvokeRunExecutorShapeConfigArgs.builder()
        .memoryInGbs(0.0)
        .ocpus(0.0)
        .build())
    .logsBucketUri("string")
    .maxDurationInMinutes("string")
    .metastoreId("string")
    .numExecutors(0)
    .opcParentRptUrl("string")
    .parameters(InvokeRunParameterArgs.builder()
        .name("string")
        .value("string")
        .build())
    .freeformTags(Map.of("string", "string"))
    .sparkVersion("string")
    .type("string")
    .warehouseBucketUri("string")
    .build());
invoke_run_resource = oci.data_flow.InvokeRun("invokeRunResource",
    compartment_id="string",
    executor_shape="string",
    pool_id="string",
    arguments=["string"],
    asynchronous=False,
    application_log_config={
        "log_group_id": "string",
        "log_id": "string",
    },
    configuration={
        "string": "string",
    },
    defined_tags={
        "string": "string",
    },
    display_name="string",
    driver_shape="string",
    driver_shape_config={
        "memory_in_gbs": 0,
        "ocpus": 0,
    },
    execute="string",
    application_id="string",
    archive_uri="string",
    idle_timeout_in_minutes="string",
    executor_shape_config={
        "memory_in_gbs": 0,
        "ocpus": 0,
    },
    logs_bucket_uri="string",
    max_duration_in_minutes="string",
    metastore_id="string",
    num_executors=0,
    opc_parent_rpt_url="string",
    parameters=[{
        "name": "string",
        "value": "string",
    }],
    freeform_tags={
        "string": "string",
    },
    spark_version="string",
    type="string",
    warehouse_bucket_uri="string")
const invokeRunResource = new oci.dataflow.InvokeRun("invokeRunResource", {
    compartmentId: "string",
    executorShape: "string",
    poolId: "string",
    arguments: ["string"],
    asynchronous: false,
    applicationLogConfig: {
        logGroupId: "string",
        logId: "string",
    },
    configuration: {
        string: "string",
    },
    definedTags: {
        string: "string",
    },
    displayName: "string",
    driverShape: "string",
    driverShapeConfig: {
        memoryInGbs: 0,
        ocpus: 0,
    },
    execute: "string",
    applicationId: "string",
    archiveUri: "string",
    idleTimeoutInMinutes: "string",
    executorShapeConfig: {
        memoryInGbs: 0,
        ocpus: 0,
    },
    logsBucketUri: "string",
    maxDurationInMinutes: "string",
    metastoreId: "string",
    numExecutors: 0,
    opcParentRptUrl: "string",
    parameters: [{
        name: "string",
        value: "string",
    }],
    freeformTags: {
        string: "string",
    },
    sparkVersion: "string",
    type: "string",
    warehouseBucketUri: "string",
});
type: oci:DataFlow:InvokeRun
properties:
    applicationId: string
    applicationLogConfig:
        logGroupId: string
        logId: string
    archiveUri: string
    arguments:
        - string
    asynchronous: false
    compartmentId: string
    configuration:
        string: string
    definedTags:
        string: string
    displayName: string
    driverShape: string
    driverShapeConfig:
        memoryInGbs: 0
        ocpus: 0
    execute: string
    executorShape: string
    executorShapeConfig:
        memoryInGbs: 0
        ocpus: 0
    freeformTags:
        string: string
    idleTimeoutInMinutes: string
    logsBucketUri: string
    maxDurationInMinutes: string
    metastoreId: string
    numExecutors: 0
    opcParentRptUrl: string
    parameters:
        - name: string
          value: string
    poolId: string
    sparkVersion: string
    type: string
    warehouseBucketUri: string
InvokeRun Resource Properties
To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.
Inputs
In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.
The InvokeRun resource accepts the following input properties:
- CompartmentId string
- (Updatable) The OCID of a compartment.
- ApplicationId string
- The OCID of the associated application. If this value is set, then no value for the execute parameter is required. If this value is not set, then a value for the execute parameter is required, and a new application is created and associated with the new run.
- ApplicationLog InvokeConfig Run Application Log Config 
- Logging details of Application logs for Data Flow Run.
- ArchiveUri string
- A comma separated list of one or more archive files as Oracle Cloud Infrastructure URIs. For example, oci://path/to/a.zip,oci://path/to/b.zip. An Oracle Cloud Infrastructure URI of an archive.zip file containing custom dependencies that may be used to support the execution of a Python, Java, or Scala application. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
- Arguments List<string>
- The arguments passed to the running application as command line arguments. An argument is either a plain text or a placeholder. Placeholders are replaced using values from the parameters map. Each placeholder specified must be represented in the parameters map else the request (POST or PUT) will fail with a HTTP 400 status code. Placeholders are specified as Service Api Spec, wherenameis the name of the parameter. Example:[ "--input", "${input_file}", "--name", "John Doe" ]If "input_file" has a value of "mydata.xml", then the value above will be translated to--input mydata.xml --name "John Doe"
- Asynchronous bool
- Configuration Dictionary<string, string>
- The Spark configuration passed to the running process. See https://spark.apache.org/docs/latest/configuration.html#available-properties Example: { "spark.app.name" : "My App Name", "spark.shuffle.io.maxRetries" : "4" } Note: Not all Spark properties are permitted to be set. Attempting to set a property that is not allowed to be overwritten will cause a 400 status to be returned.
- Dictionary<string, string>
- (Updatable) Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see Resource Tags. Example: {"Operations.CostCenter": "42"}
- DisplayName string
- A user-friendly name that does not have to be unique. Avoid entering confidential information. If this value is not specified, it will be derived from the associated application's displayName or set by API using fileUri's application file name.
- DriverShape string
- The VM shape for the driver. Sets the driver cores and memory.
- DriverShape InvokeConfig Run Driver Shape Config 
- This is used to configure the shape of the driver or executor if a flexible shape is used.
- Execute string
- The input used for spark-submit command. For more details see https://spark.apache.org/docs/latest/submitting-applications.html#launching-applications-with-spark-submit. Supported options include --class,--file,--jars,--conf,--py-files, and main application file with arguments. Example:--jars oci://path/to/a.jar,oci://path/to/b.jar --files oci://path/to/a.json,oci://path/to/b.csv --py-files oci://path/to/a.py,oci://path/to/b.py --conf spark.sql.crossJoin.enabled=true --class org.apache.spark.examples.SparkPi oci://path/to/main.jar 10Note: If execute is specified together with applicationId, className, configuration, fileUri, language, arguments, parameters during application create/update, or run create/submit, Data Flow service will use derived information from execute input only.
- ExecutorShape string
- The VM shape for the executors. Sets the executor cores and memory.
- ExecutorShape InvokeConfig Run Executor Shape Config 
- This is used to configure the shape of the driver or executor if a flexible shape is used.
- Dictionary<string, string>
- (Updatable) Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see Resource Tags. Example: {"Department": "Finance"}
- IdleTimeout stringIn Minutes 
- (Updatable) The timeout value in minutes used to manage Runs. A Run would be stopped after inactivity for this amount of time period. Note: This parameter is currently only applicable for Runs of type SESSION. Default value is 2880 minutes (2 days)
- LogsBucket stringUri 
- An Oracle Cloud Infrastructure URI of the bucket where the Spark job logs are to be uploaded. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
- MaxDuration stringIn Minutes 
- (Updatable) The maximum duration in minutes for which an Application should run. Data Flow Run would be terminated once it reaches this duration from the time it transitions to IN_PROGRESSstate.
- MetastoreId string
- The OCID of Oracle Cloud Infrastructure Hive Metastore.
- NumExecutors int
- The number of executor VMs requested.
- OpcParent stringRpt Url 
- (Optional header param, required for Resource Principal version 3.0+) Parent resource control plane endpoint used to exchange for upper level resource principal token.
- Parameters
List<InvokeRun Parameter> 
- An array of name/value pairs used to fill placeholders found in properties like Application.arguments. The name must be a string of one or more word characters (a-z, A-Z, 0-9, _). The value can be a string of 0 or more characters of any kind. Example: [ { name: "iterations", value: "10"}, { name: "input_file", value: "mydata.xml" }, { name: "variable_x", value: "${x}"} ]
- PoolId string
- The OCID of a pool. Unique Id to indentify a dataflow pool resource.
- SparkVersion string
- The Spark version utilized to run the application. This value may be set if applicationId is not since the Spark version will be taken from the associated application.
- Type string
- The Spark application processing type.
- WarehouseBucket stringUri 
- An Oracle Cloud Infrastructure URI of the bucket to be used as default warehouse directory for BATCH SQL runs. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat. - ** IMPORTANT ** Any change to a property that does not support update will force the destruction and recreation of the resource with the new property values 
- CompartmentId string
- (Updatable) The OCID of a compartment.
- ApplicationId string
- The OCID of the associated application. If this value is set, then no value for the execute parameter is required. If this value is not set, then a value for the execute parameter is required, and a new application is created and associated with the new run.
- ApplicationLog InvokeConfig Run Application Log Config Args 
- Logging details of Application logs for Data Flow Run.
- ArchiveUri string
- A comma separated list of one or more archive files as Oracle Cloud Infrastructure URIs. For example, oci://path/to/a.zip,oci://path/to/b.zip. An Oracle Cloud Infrastructure URI of an archive.zip file containing custom dependencies that may be used to support the execution of a Python, Java, or Scala application. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
- Arguments []string
- The arguments passed to the running application as command line arguments. An argument is either a plain text or a placeholder. Placeholders are replaced using values from the parameters map. Each placeholder specified must be represented in the parameters map else the request (POST or PUT) will fail with a HTTP 400 status code. Placeholders are specified as Service Api Spec, wherenameis the name of the parameter. Example:[ "--input", "${input_file}", "--name", "John Doe" ]If "input_file" has a value of "mydata.xml", then the value above will be translated to--input mydata.xml --name "John Doe"
- Asynchronous bool
- Configuration map[string]string
- The Spark configuration passed to the running process. See https://spark.apache.org/docs/latest/configuration.html#available-properties Example: { "spark.app.name" : "My App Name", "spark.shuffle.io.maxRetries" : "4" } Note: Not all Spark properties are permitted to be set. Attempting to set a property that is not allowed to be overwritten will cause a 400 status to be returned.
- map[string]string
- (Updatable) Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see Resource Tags. Example: {"Operations.CostCenter": "42"}
- DisplayName string
- A user-friendly name that does not have to be unique. Avoid entering confidential information. If this value is not specified, it will be derived from the associated application's displayName or set by API using fileUri's application file name.
- DriverShape string
- The VM shape for the driver. Sets the driver cores and memory.
- DriverShape InvokeConfig Run Driver Shape Config Args 
- This is used to configure the shape of the driver or executor if a flexible shape is used.
- Execute string
- The input used for spark-submit command. For more details see https://spark.apache.org/docs/latest/submitting-applications.html#launching-applications-with-spark-submit. Supported options include --class,--file,--jars,--conf,--py-files, and main application file with arguments. Example:--jars oci://path/to/a.jar,oci://path/to/b.jar --files oci://path/to/a.json,oci://path/to/b.csv --py-files oci://path/to/a.py,oci://path/to/b.py --conf spark.sql.crossJoin.enabled=true --class org.apache.spark.examples.SparkPi oci://path/to/main.jar 10Note: If execute is specified together with applicationId, className, configuration, fileUri, language, arguments, parameters during application create/update, or run create/submit, Data Flow service will use derived information from execute input only.
- ExecutorShape string
- The VM shape for the executors. Sets the executor cores and memory.
- ExecutorShape InvokeConfig Run Executor Shape Config Args 
- This is used to configure the shape of the driver or executor if a flexible shape is used.
- map[string]string
- (Updatable) Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see Resource Tags. Example: {"Department": "Finance"}
- IdleTimeout stringIn Minutes 
- (Updatable) The timeout value in minutes used to manage Runs. A Run would be stopped after inactivity for this amount of time period. Note: This parameter is currently only applicable for Runs of type SESSION. Default value is 2880 minutes (2 days)
- LogsBucket stringUri 
- An Oracle Cloud Infrastructure URI of the bucket where the Spark job logs are to be uploaded. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
- MaxDuration stringIn Minutes 
- (Updatable) The maximum duration in minutes for which an Application should run. Data Flow Run would be terminated once it reaches this duration from the time it transitions to IN_PROGRESSstate.
- MetastoreId string
- The OCID of Oracle Cloud Infrastructure Hive Metastore.
- NumExecutors int
- The number of executor VMs requested.
- OpcParent stringRpt Url 
- (Optional header param, required for Resource Principal version 3.0+) Parent resource control plane endpoint used to exchange for upper level resource principal token.
- Parameters
[]InvokeRun Parameter Args 
- An array of name/value pairs used to fill placeholders found in properties like Application.arguments. The name must be a string of one or more word characters (a-z, A-Z, 0-9, _). The value can be a string of 0 or more characters of any kind. Example: [ { name: "iterations", value: "10"}, { name: "input_file", value: "mydata.xml" }, { name: "variable_x", value: "${x}"} ]
- PoolId string
- The OCID of a pool. Unique Id to indentify a dataflow pool resource.
- SparkVersion string
- The Spark version utilized to run the application. This value may be set if applicationId is not since the Spark version will be taken from the associated application.
- Type string
- The Spark application processing type.
- WarehouseBucket stringUri 
- An Oracle Cloud Infrastructure URI of the bucket to be used as default warehouse directory for BATCH SQL runs. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat. - ** IMPORTANT ** Any change to a property that does not support update will force the destruction and recreation of the resource with the new property values 
- compartmentId String
- (Updatable) The OCID of a compartment.
- applicationId String
- The OCID of the associated application. If this value is set, then no value for the execute parameter is required. If this value is not set, then a value for the execute parameter is required, and a new application is created and associated with the new run.
- applicationLog InvokeConfig Run Application Log Config 
- Logging details of Application logs for Data Flow Run.
- archiveUri String
- A comma separated list of one or more archive files as Oracle Cloud Infrastructure URIs. For example, oci://path/to/a.zip,oci://path/to/b.zip. An Oracle Cloud Infrastructure URI of an archive.zip file containing custom dependencies that may be used to support the execution of a Python, Java, or Scala application. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
- arguments List<String>
- The arguments passed to the running application as command line arguments. An argument is either a plain text or a placeholder. Placeholders are replaced using values from the parameters map. Each placeholder specified must be represented in the parameters map else the request (POST or PUT) will fail with a HTTP 400 status code. Placeholders are specified as Service Api Spec, wherenameis the name of the parameter. Example:[ "--input", "${input_file}", "--name", "John Doe" ]If "input_file" has a value of "mydata.xml", then the value above will be translated to--input mydata.xml --name "John Doe"
- asynchronous Boolean
- configuration Map<String,String>
- The Spark configuration passed to the running process. See https://spark.apache.org/docs/latest/configuration.html#available-properties Example: { "spark.app.name" : "My App Name", "spark.shuffle.io.maxRetries" : "4" } Note: Not all Spark properties are permitted to be set. Attempting to set a property that is not allowed to be overwritten will cause a 400 status to be returned.
- Map<String,String>
- (Updatable) Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see Resource Tags. Example: {"Operations.CostCenter": "42"}
- displayName String
- A user-friendly name that does not have to be unique. Avoid entering confidential information. If this value is not specified, it will be derived from the associated application's displayName or set by API using fileUri's application file name.
- driverShape String
- The VM shape for the driver. Sets the driver cores and memory.
- driverShape InvokeConfig Run Driver Shape Config 
- This is used to configure the shape of the driver or executor if a flexible shape is used.
- execute String
- The input used for spark-submit command. For more details see https://spark.apache.org/docs/latest/submitting-applications.html#launching-applications-with-spark-submit. Supported options include --class,--file,--jars,--conf,--py-files, and main application file with arguments. Example:--jars oci://path/to/a.jar,oci://path/to/b.jar --files oci://path/to/a.json,oci://path/to/b.csv --py-files oci://path/to/a.py,oci://path/to/b.py --conf spark.sql.crossJoin.enabled=true --class org.apache.spark.examples.SparkPi oci://path/to/main.jar 10Note: If execute is specified together with applicationId, className, configuration, fileUri, language, arguments, parameters during application create/update, or run create/submit, Data Flow service will use derived information from execute input only.
- executorShape String
- The VM shape for the executors. Sets the executor cores and memory.
- executorShape InvokeConfig Run Executor Shape Config 
- This is used to configure the shape of the driver or executor if a flexible shape is used.
- Map<String,String>
- (Updatable) Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see Resource Tags. Example: {"Department": "Finance"}
- idleTimeout StringIn Minutes 
- (Updatable) The timeout value in minutes used to manage Runs. A Run would be stopped after inactivity for this amount of time period. Note: This parameter is currently only applicable for Runs of type SESSION. Default value is 2880 minutes (2 days)
- logsBucket StringUri 
- An Oracle Cloud Infrastructure URI of the bucket where the Spark job logs are to be uploaded. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
- maxDuration StringIn Minutes 
- (Updatable) The maximum duration in minutes for which an Application should run. Data Flow Run would be terminated once it reaches this duration from the time it transitions to IN_PROGRESSstate.
- metastoreId String
- The OCID of Oracle Cloud Infrastructure Hive Metastore.
- numExecutors Integer
- The number of executor VMs requested.
- opcParent StringRpt Url 
- (Optional header param, required for Resource Principal version 3.0+) Parent resource control plane endpoint used to exchange for upper level resource principal token.
- parameters
List<InvokeRun Parameter> 
- An array of name/value pairs used to fill placeholders found in properties like Application.arguments. The name must be a string of one or more word characters (a-z, A-Z, 0-9, _). The value can be a string of 0 or more characters of any kind. Example: [ { name: "iterations", value: "10"}, { name: "input_file", value: "mydata.xml" }, { name: "variable_x", value: "${x}"} ]
- poolId String
- The OCID of a pool. Unique Id to indentify a dataflow pool resource.
- sparkVersion String
- The Spark version utilized to run the application. This value may be set if applicationId is not since the Spark version will be taken from the associated application.
- type String
- The Spark application processing type.
- warehouseBucket StringUri 
- An Oracle Cloud Infrastructure URI of the bucket to be used as default warehouse directory for BATCH SQL runs. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat. - ** IMPORTANT ** Any change to a property that does not support update will force the destruction and recreation of the resource with the new property values 
- compartmentId string
- (Updatable) The OCID of a compartment.
- applicationId string
- The OCID of the associated application. If this value is set, then no value for the execute parameter is required. If this value is not set, then a value for the execute parameter is required, and a new application is created and associated with the new run.
- applicationLog InvokeConfig Run Application Log Config 
- Logging details of Application logs for Data Flow Run.
- archiveUri string
- A comma separated list of one or more archive files as Oracle Cloud Infrastructure URIs. For example, oci://path/to/a.zip,oci://path/to/b.zip. An Oracle Cloud Infrastructure URI of an archive.zip file containing custom dependencies that may be used to support the execution of a Python, Java, or Scala application. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
- arguments string[]
- The arguments passed to the running application as command line arguments. An argument is either a plain text or a placeholder. Placeholders are replaced using values from the parameters map. Each placeholder specified must be represented in the parameters map else the request (POST or PUT) will fail with a HTTP 400 status code. Placeholders are specified as Service Api Spec, wherenameis the name of the parameter. Example:[ "--input", "${input_file}", "--name", "John Doe" ]If "input_file" has a value of "mydata.xml", then the value above will be translated to--input mydata.xml --name "John Doe"
- asynchronous boolean
- configuration {[key: string]: string}
- The Spark configuration passed to the running process. See https://spark.apache.org/docs/latest/configuration.html#available-properties Example: { "spark.app.name" : "My App Name", "spark.shuffle.io.maxRetries" : "4" } Note: Not all Spark properties are permitted to be set. Attempting to set a property that is not allowed to be overwritten will cause a 400 status to be returned.
- {[key: string]: string}
- (Updatable) Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see Resource Tags. Example: {"Operations.CostCenter": "42"}
- displayName string
- A user-friendly name that does not have to be unique. Avoid entering confidential information. If this value is not specified, it will be derived from the associated application's displayName or set by API using fileUri's application file name.
- driverShape string
- The VM shape for the driver. Sets the driver cores and memory.
- driverShape InvokeConfig Run Driver Shape Config 
- This is used to configure the shape of the driver or executor if a flexible shape is used.
- execute string
- The input used for spark-submit command. For more details see https://spark.apache.org/docs/latest/submitting-applications.html#launching-applications-with-spark-submit. Supported options include --class,--file,--jars,--conf,--py-files, and main application file with arguments. Example:--jars oci://path/to/a.jar,oci://path/to/b.jar --files oci://path/to/a.json,oci://path/to/b.csv --py-files oci://path/to/a.py,oci://path/to/b.py --conf spark.sql.crossJoin.enabled=true --class org.apache.spark.examples.SparkPi oci://path/to/main.jar 10Note: If execute is specified together with applicationId, className, configuration, fileUri, language, arguments, parameters during application create/update, or run create/submit, Data Flow service will use derived information from execute input only.
- executorShape string
- The VM shape for the executors. Sets the executor cores and memory.
- executorShape InvokeConfig Run Executor Shape Config 
- This is used to configure the shape of the driver or executor if a flexible shape is used.
- {[key: string]: string}
- (Updatable) Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see Resource Tags. Example: {"Department": "Finance"}
- idleTimeout stringIn Minutes 
- (Updatable) The timeout value in minutes used to manage Runs. A Run would be stopped after inactivity for this amount of time period. Note: This parameter is currently only applicable for Runs of type SESSION. Default value is 2880 minutes (2 days)
- logsBucket stringUri 
- An Oracle Cloud Infrastructure URI of the bucket where the Spark job logs are to be uploaded. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
- maxDuration stringIn Minutes 
- (Updatable) The maximum duration in minutes for which an Application should run. Data Flow Run would be terminated once it reaches this duration from the time it transitions to IN_PROGRESSstate.
- metastoreId string
- The OCID of Oracle Cloud Infrastructure Hive Metastore.
- numExecutors number
- The number of executor VMs requested.
- opcParent stringRpt Url 
- (Optional header param, required for Resource Principal version 3.0+) Parent resource control plane endpoint used to exchange for upper level resource principal token.
- parameters
InvokeRun Parameter[] 
- An array of name/value pairs used to fill placeholders found in properties like Application.arguments. The name must be a string of one or more word characters (a-z, A-Z, 0-9, _). The value can be a string of 0 or more characters of any kind. Example: [ { name: "iterations", value: "10"}, { name: "input_file", value: "mydata.xml" }, { name: "variable_x", value: "${x}"} ]
- poolId string
- The OCID of a pool. Unique Id to indentify a dataflow pool resource.
- sparkVersion string
- The Spark version utilized to run the application. This value may be set if applicationId is not since the Spark version will be taken from the associated application.
- type string
- The Spark application processing type.
- warehouseBucket stringUri 
- An Oracle Cloud Infrastructure URI of the bucket to be used as default warehouse directory for BATCH SQL runs. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat. - ** IMPORTANT ** Any change to a property that does not support update will force the destruction and recreation of the resource with the new property values 
- compartment_id str
- (Updatable) The OCID of a compartment.
- application_id str
- The OCID of the associated application. If this value is set, then no value for the execute parameter is required. If this value is not set, then a value for the execute parameter is required, and a new application is created and associated with the new run.
- application_log_ Invokeconfig Run Application Log Config Args 
- Logging details of Application logs for Data Flow Run.
- archive_uri str
- A comma separated list of one or more archive files as Oracle Cloud Infrastructure URIs. For example, oci://path/to/a.zip,oci://path/to/b.zip. An Oracle Cloud Infrastructure URI of an archive.zip file containing custom dependencies that may be used to support the execution of a Python, Java, or Scala application. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
- arguments Sequence[str]
- The arguments passed to the running application as command line arguments. An argument is either a plain text or a placeholder. Placeholders are replaced using values from the parameters map. Each placeholder specified must be represented in the parameters map else the request (POST or PUT) will fail with a HTTP 400 status code. Placeholders are specified as Service Api Spec, wherenameis the name of the parameter. Example:[ "--input", "${input_file}", "--name", "John Doe" ]If "input_file" has a value of "mydata.xml", then the value above will be translated to--input mydata.xml --name "John Doe"
- asynchronous bool
- configuration Mapping[str, str]
- The Spark configuration passed to the running process. See https://spark.apache.org/docs/latest/configuration.html#available-properties Example: { "spark.app.name" : "My App Name", "spark.shuffle.io.maxRetries" : "4" } Note: Not all Spark properties are permitted to be set. Attempting to set a property that is not allowed to be overwritten will cause a 400 status to be returned.
- Mapping[str, str]
- (Updatable) Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see Resource Tags. Example: {"Operations.CostCenter": "42"}
- display_name str
- A user-friendly name that does not have to be unique. Avoid entering confidential information. If this value is not specified, it will be derived from the associated application's displayName or set by API using fileUri's application file name.
- driver_shape str
- The VM shape for the driver. Sets the driver cores and memory.
- driver_shape_ Invokeconfig Run Driver Shape Config Args 
- This is used to configure the shape of the driver or executor if a flexible shape is used.
- execute str
- The input used for spark-submit command. For more details see https://spark.apache.org/docs/latest/submitting-applications.html#launching-applications-with-spark-submit. Supported options include --class,--file,--jars,--conf,--py-files, and main application file with arguments. Example:--jars oci://path/to/a.jar,oci://path/to/b.jar --files oci://path/to/a.json,oci://path/to/b.csv --py-files oci://path/to/a.py,oci://path/to/b.py --conf spark.sql.crossJoin.enabled=true --class org.apache.spark.examples.SparkPi oci://path/to/main.jar 10Note: If execute is specified together with applicationId, className, configuration, fileUri, language, arguments, parameters during application create/update, or run create/submit, Data Flow service will use derived information from execute input only.
- executor_shape str
- The VM shape for the executors. Sets the executor cores and memory.
- executor_shape_ Invokeconfig Run Executor Shape Config Args 
- This is used to configure the shape of the driver or executor if a flexible shape is used.
- Mapping[str, str]
- (Updatable) Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see Resource Tags. Example: {"Department": "Finance"}
- idle_timeout_ strin_ minutes 
- (Updatable) The timeout value in minutes used to manage Runs. A Run would be stopped after inactivity for this amount of time period. Note: This parameter is currently only applicable for Runs of type SESSION. Default value is 2880 minutes (2 days)
- logs_bucket_ struri 
- An Oracle Cloud Infrastructure URI of the bucket where the Spark job logs are to be uploaded. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
- max_duration_ strin_ minutes 
- (Updatable) The maximum duration in minutes for which an Application should run. Data Flow Run would be terminated once it reaches this duration from the time it transitions to IN_PROGRESSstate.
- metastore_id str
- The OCID of Oracle Cloud Infrastructure Hive Metastore.
- num_executors int
- The number of executor VMs requested.
- opc_parent_ strrpt_ url 
- (Optional header param, required for Resource Principal version 3.0+) Parent resource control plane endpoint used to exchange for upper level resource principal token.
- parameters
Sequence[InvokeRun Parameter Args] 
- An array of name/value pairs used to fill placeholders found in properties like Application.arguments. The name must be a string of one or more word characters (a-z, A-Z, 0-9, _). The value can be a string of 0 or more characters of any kind. Example: [ { name: "iterations", value: "10"}, { name: "input_file", value: "mydata.xml" }, { name: "variable_x", value: "${x}"} ]
- pool_id str
- The OCID of a pool. Unique Id to indentify a dataflow pool resource.
- spark_version str
- The Spark version utilized to run the application. This value may be set if applicationId is not since the Spark version will be taken from the associated application.
- type str
- The Spark application processing type.
- warehouse_bucket_ struri 
- An Oracle Cloud Infrastructure URI of the bucket to be used as default warehouse directory for BATCH SQL runs. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat. - ** IMPORTANT ** Any change to a property that does not support update will force the destruction and recreation of the resource with the new property values 
- compartmentId String
- (Updatable) The OCID of a compartment.
- applicationId String
- The OCID of the associated application. If this value is set, then no value for the execute parameter is required. If this value is not set, then a value for the execute parameter is required, and a new application is created and associated with the new run.
- applicationLog Property MapConfig 
- Logging details of Application logs for Data Flow Run.
- archiveUri String
- A comma separated list of one or more archive files as Oracle Cloud Infrastructure URIs. For example, oci://path/to/a.zip,oci://path/to/b.zip. An Oracle Cloud Infrastructure URI of an archive.zip file containing custom dependencies that may be used to support the execution of a Python, Java, or Scala application. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
- arguments List<String>
- The arguments passed to the running application as command line arguments. An argument is either a plain text or a placeholder. Placeholders are replaced using values from the parameters map. Each placeholder specified must be represented in the parameters map else the request (POST or PUT) will fail with a HTTP 400 status code. Placeholders are specified as Service Api Spec, wherenameis the name of the parameter. Example:[ "--input", "${input_file}", "--name", "John Doe" ]If "input_file" has a value of "mydata.xml", then the value above will be translated to--input mydata.xml --name "John Doe"
- asynchronous Boolean
- configuration Map<String>
- The Spark configuration passed to the running process. See https://spark.apache.org/docs/latest/configuration.html#available-properties Example: { "spark.app.name" : "My App Name", "spark.shuffle.io.maxRetries" : "4" } Note: Not all Spark properties are permitted to be set. Attempting to set a property that is not allowed to be overwritten will cause a 400 status to be returned.
- Map<String>
- (Updatable) Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see Resource Tags. Example: {"Operations.CostCenter": "42"}
- displayName String
- A user-friendly name that does not have to be unique. Avoid entering confidential information. If this value is not specified, it will be derived from the associated application's displayName or set by API using fileUri's application file name.
- driverShape String
- The VM shape for the driver. Sets the driver cores and memory.
- driverShape Property MapConfig 
- This is used to configure the shape of the driver or executor if a flexible shape is used.
- execute String
- The input used for spark-submit command. For more details see https://spark.apache.org/docs/latest/submitting-applications.html#launching-applications-with-spark-submit. Supported options include --class,--file,--jars,--conf,--py-files, and main application file with arguments. Example:--jars oci://path/to/a.jar,oci://path/to/b.jar --files oci://path/to/a.json,oci://path/to/b.csv --py-files oci://path/to/a.py,oci://path/to/b.py --conf spark.sql.crossJoin.enabled=true --class org.apache.spark.examples.SparkPi oci://path/to/main.jar 10Note: If execute is specified together with applicationId, className, configuration, fileUri, language, arguments, parameters during application create/update, or run create/submit, Data Flow service will use derived information from execute input only.
- executorShape String
- The VM shape for the executors. Sets the executor cores and memory.
- executorShape Property MapConfig 
- This is used to configure the shape of the driver or executor if a flexible shape is used.
- Map<String>
- (Updatable) Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see Resource Tags. Example: {"Department": "Finance"}
- idleTimeout StringIn Minutes 
- (Updatable) The timeout value in minutes used to manage Runs. A Run would be stopped after inactivity for this amount of time period. Note: This parameter is currently only applicable for Runs of type SESSION. Default value is 2880 minutes (2 days)
- logsBucket StringUri 
- An Oracle Cloud Infrastructure URI of the bucket where the Spark job logs are to be uploaded. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
- maxDuration StringIn Minutes 
- (Updatable) The maximum duration in minutes for which an Application should run. Data Flow Run would be terminated once it reaches this duration from the time it transitions to IN_PROGRESSstate.
- metastoreId String
- The OCID of Oracle Cloud Infrastructure Hive Metastore.
- numExecutors Number
- The number of executor VMs requested.
- opcParent StringRpt Url 
- (Optional header param, required for Resource Principal version 3.0+) Parent resource control plane endpoint used to exchange for upper level resource principal token.
- parameters List<Property Map>
- An array of name/value pairs used to fill placeholders found in properties like Application.arguments. The name must be a string of one or more word characters (a-z, A-Z, 0-9, _). The value can be a string of 0 or more characters of any kind. Example: [ { name: "iterations", value: "10"}, { name: "input_file", value: "mydata.xml" }, { name: "variable_x", value: "${x}"} ]
- poolId String
- The OCID of a pool. Unique Id to indentify a dataflow pool resource.
- sparkVersion String
- The Spark version utilized to run the application. This value may be set if applicationId is not since the Spark version will be taken from the associated application.
- type String
- The Spark application processing type.
- warehouseBucket StringUri 
- An Oracle Cloud Infrastructure URI of the bucket to be used as default warehouse directory for BATCH SQL runs. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat. - ** IMPORTANT ** Any change to a property that does not support update will force the destruction and recreation of the resource with the new property values 
Outputs
All input properties are implicitly available as output properties. Additionally, the InvokeRun resource produces the following output properties:
- ClassName string
- The class for the application.
- DataRead stringIn Bytes 
- The data read by the run in bytes.
- DataWritten stringIn Bytes 
- The data written by the run in bytes.
- FileUri string
- An Oracle Cloud Infrastructure URI of the file containing the application to execute. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
- Id string
- The provider-assigned unique ID for this managed resource.
- Language string
- The Spark language.
- LifecycleDetails string
- The detailed messages about the lifecycle state.
- OpcRequest stringId 
- Unique Oracle assigned identifier for the request. If you need to contact Oracle about a particular request, please provide the request ID.
- OwnerPrincipal stringId 
- The OCID of the user who created the resource.
- OwnerUser stringName 
- The username of the user who created the resource. If the username of the owner does not exist, nullwill be returned and the caller should refer to the ownerPrincipalId value instead.
- PrivateEndpoint List<string>Dns Zones 
- An array of DNS zone names. Example: [ "app.examplecorp.com", "app.examplecorp2.com" ]
- PrivateEndpoint stringId 
- The OCID of a private endpoint.
- PrivateEndpoint intMax Host Count 
- The maximum number of hosts to be accessed through the private endpoint. This value is used to calculate the relevant CIDR block and should be a multiple of 256. If the value is not a multiple of 256, it is rounded up to the next multiple of 256. For example, 300 is rounded up to 512.
- PrivateEndpoint List<string>Nsg Ids 
- An array of network security group OCIDs.
- PrivateEndpoint stringSubnet Id 
- The OCID of a subnet.
- RunDuration stringIn Milliseconds 
- The duration of the run in milliseconds.
- State string
- The current state of this run.
- TimeCreated string
- The date and time the resource was created, expressed in RFC 3339 timestamp format. Example: 2018-04-03T21:10:29.600Z
- TimeUpdated string
- The date and time the resource was updated, expressed in RFC 3339 timestamp format. Example: 2018-04-03T21:10:29.600Z
- TotalOcpu int
- The total number of oCPU requested by the run.
- ClassName string
- The class for the application.
- DataRead stringIn Bytes 
- The data read by the run in bytes.
- DataWritten stringIn Bytes 
- The data written by the run in bytes.
- FileUri string
- An Oracle Cloud Infrastructure URI of the file containing the application to execute. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
- Id string
- The provider-assigned unique ID for this managed resource.
- Language string
- The Spark language.
- LifecycleDetails string
- The detailed messages about the lifecycle state.
- OpcRequest stringId 
- Unique Oracle assigned identifier for the request. If you need to contact Oracle about a particular request, please provide the request ID.
- OwnerPrincipal stringId 
- The OCID of the user who created the resource.
- OwnerUser stringName 
- The username of the user who created the resource. If the username of the owner does not exist, nullwill be returned and the caller should refer to the ownerPrincipalId value instead.
- PrivateEndpoint []stringDns Zones 
- An array of DNS zone names. Example: [ "app.examplecorp.com", "app.examplecorp2.com" ]
- PrivateEndpoint stringId 
- The OCID of a private endpoint.
- PrivateEndpoint intMax Host Count 
- The maximum number of hosts to be accessed through the private endpoint. This value is used to calculate the relevant CIDR block and should be a multiple of 256. If the value is not a multiple of 256, it is rounded up to the next multiple of 256. For example, 300 is rounded up to 512.
- PrivateEndpoint []stringNsg Ids 
- An array of network security group OCIDs.
- PrivateEndpoint stringSubnet Id 
- The OCID of a subnet.
- RunDuration stringIn Milliseconds 
- The duration of the run in milliseconds.
- State string
- The current state of this run.
- TimeCreated string
- The date and time the resource was created, expressed in RFC 3339 timestamp format. Example: 2018-04-03T21:10:29.600Z
- TimeUpdated string
- The date and time the resource was updated, expressed in RFC 3339 timestamp format. Example: 2018-04-03T21:10:29.600Z
- TotalOcpu int
- The total number of oCPU requested by the run.
- className String
- The class for the application.
- dataRead StringIn Bytes 
- The data read by the run in bytes.
- dataWritten StringIn Bytes 
- The data written by the run in bytes.
- fileUri String
- An Oracle Cloud Infrastructure URI of the file containing the application to execute. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
- id String
- The provider-assigned unique ID for this managed resource.
- language String
- The Spark language.
- lifecycleDetails String
- The detailed messages about the lifecycle state.
- opcRequest StringId 
- Unique Oracle assigned identifier for the request. If you need to contact Oracle about a particular request, please provide the request ID.
- ownerPrincipal StringId 
- The OCID of the user who created the resource.
- ownerUser StringName 
- The username of the user who created the resource. If the username of the owner does not exist, nullwill be returned and the caller should refer to the ownerPrincipalId value instead.
- privateEndpoint List<String>Dns Zones 
- An array of DNS zone names. Example: [ "app.examplecorp.com", "app.examplecorp2.com" ]
- privateEndpoint StringId 
- The OCID of a private endpoint.
- privateEndpoint IntegerMax Host Count 
- The maximum number of hosts to be accessed through the private endpoint. This value is used to calculate the relevant CIDR block and should be a multiple of 256. If the value is not a multiple of 256, it is rounded up to the next multiple of 256. For example, 300 is rounded up to 512.
- privateEndpoint List<String>Nsg Ids 
- An array of network security group OCIDs.
- privateEndpoint StringSubnet Id 
- The OCID of a subnet.
- runDuration StringIn Milliseconds 
- The duration of the run in milliseconds.
- state String
- The current state of this run.
- timeCreated String
- The date and time the resource was created, expressed in RFC 3339 timestamp format. Example: 2018-04-03T21:10:29.600Z
- timeUpdated String
- The date and time the resource was updated, expressed in RFC 3339 timestamp format. Example: 2018-04-03T21:10:29.600Z
- totalOcpu Integer
- The total number of oCPU requested by the run.
- className string
- The class for the application.
- dataRead stringIn Bytes 
- The data read by the run in bytes.
- dataWritten stringIn Bytes 
- The data written by the run in bytes.
- fileUri string
- An Oracle Cloud Infrastructure URI of the file containing the application to execute. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
- id string
- The provider-assigned unique ID for this managed resource.
- language string
- The Spark language.
- lifecycleDetails string
- The detailed messages about the lifecycle state.
- opcRequest stringId 
- Unique Oracle assigned identifier for the request. If you need to contact Oracle about a particular request, please provide the request ID.
- ownerPrincipal stringId 
- The OCID of the user who created the resource.
- ownerUser stringName 
- The username of the user who created the resource. If the username of the owner does not exist, nullwill be returned and the caller should refer to the ownerPrincipalId value instead.
- privateEndpoint string[]Dns Zones 
- An array of DNS zone names. Example: [ "app.examplecorp.com", "app.examplecorp2.com" ]
- privateEndpoint stringId 
- The OCID of a private endpoint.
- privateEndpoint numberMax Host Count 
- The maximum number of hosts to be accessed through the private endpoint. This value is used to calculate the relevant CIDR block and should be a multiple of 256. If the value is not a multiple of 256, it is rounded up to the next multiple of 256. For example, 300 is rounded up to 512.
- privateEndpoint string[]Nsg Ids 
- An array of network security group OCIDs.
- privateEndpoint stringSubnet Id 
- The OCID of a subnet.
- runDuration stringIn Milliseconds 
- The duration of the run in milliseconds.
- state string
- The current state of this run.
- timeCreated string
- The date and time the resource was created, expressed in RFC 3339 timestamp format. Example: 2018-04-03T21:10:29.600Z
- timeUpdated string
- The date and time the resource was updated, expressed in RFC 3339 timestamp format. Example: 2018-04-03T21:10:29.600Z
- totalOcpu number
- The total number of oCPU requested by the run.
- class_name str
- The class for the application.
- data_read_ strin_ bytes 
- The data read by the run in bytes.
- data_written_ strin_ bytes 
- The data written by the run in bytes.
- file_uri str
- An Oracle Cloud Infrastructure URI of the file containing the application to execute. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
- id str
- The provider-assigned unique ID for this managed resource.
- language str
- The Spark language.
- lifecycle_details str
- The detailed messages about the lifecycle state.
- opc_request_ strid 
- Unique Oracle assigned identifier for the request. If you need to contact Oracle about a particular request, please provide the request ID.
- owner_principal_ strid 
- The OCID of the user who created the resource.
- owner_user_ strname 
- The username of the user who created the resource. If the username of the owner does not exist, nullwill be returned and the caller should refer to the ownerPrincipalId value instead.
- private_endpoint_ Sequence[str]dns_ zones 
- An array of DNS zone names. Example: [ "app.examplecorp.com", "app.examplecorp2.com" ]
- private_endpoint_ strid 
- The OCID of a private endpoint.
- private_endpoint_ intmax_ host_ count 
- The maximum number of hosts to be accessed through the private endpoint. This value is used to calculate the relevant CIDR block and should be a multiple of 256. If the value is not a multiple of 256, it is rounded up to the next multiple of 256. For example, 300 is rounded up to 512.
- private_endpoint_ Sequence[str]nsg_ ids 
- An array of network security group OCIDs.
- private_endpoint_ strsubnet_ id 
- The OCID of a subnet.
- run_duration_ strin_ milliseconds 
- The duration of the run in milliseconds.
- state str
- The current state of this run.
- time_created str
- The date and time the resource was created, expressed in RFC 3339 timestamp format. Example: 2018-04-03T21:10:29.600Z
- time_updated str
- The date and time the resource was updated, expressed in RFC 3339 timestamp format. Example: 2018-04-03T21:10:29.600Z
- total_ocpu int
- The total number of oCPU requested by the run.
- className String
- The class for the application.
- dataRead StringIn Bytes 
- The data read by the run in bytes.
- dataWritten StringIn Bytes 
- The data written by the run in bytes.
- fileUri String
- An Oracle Cloud Infrastructure URI of the file containing the application to execute. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
- id String
- The provider-assigned unique ID for this managed resource.
- language String
- The Spark language.
- lifecycleDetails String
- The detailed messages about the lifecycle state.
- opcRequest StringId 
- Unique Oracle assigned identifier for the request. If you need to contact Oracle about a particular request, please provide the request ID.
- ownerPrincipal StringId 
- The OCID of the user who created the resource.
- ownerUser StringName 
- The username of the user who created the resource. If the username of the owner does not exist, nullwill be returned and the caller should refer to the ownerPrincipalId value instead.
- privateEndpoint List<String>Dns Zones 
- An array of DNS zone names. Example: [ "app.examplecorp.com", "app.examplecorp2.com" ]
- privateEndpoint StringId 
- The OCID of a private endpoint.
- privateEndpoint NumberMax Host Count 
- The maximum number of hosts to be accessed through the private endpoint. This value is used to calculate the relevant CIDR block and should be a multiple of 256. If the value is not a multiple of 256, it is rounded up to the next multiple of 256. For example, 300 is rounded up to 512.
- privateEndpoint List<String>Nsg Ids 
- An array of network security group OCIDs.
- privateEndpoint StringSubnet Id 
- The OCID of a subnet.
- runDuration StringIn Milliseconds 
- The duration of the run in milliseconds.
- state String
- The current state of this run.
- timeCreated String
- The date and time the resource was created, expressed in RFC 3339 timestamp format. Example: 2018-04-03T21:10:29.600Z
- timeUpdated String
- The date and time the resource was updated, expressed in RFC 3339 timestamp format. Example: 2018-04-03T21:10:29.600Z
- totalOcpu Number
- The total number of oCPU requested by the run.
Look up Existing InvokeRun Resource
Get an existing InvokeRun resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.
public static get(name: string, id: Input<ID>, state?: InvokeRunState, opts?: CustomResourceOptions): InvokeRun@staticmethod
def get(resource_name: str,
        id: str,
        opts: Optional[ResourceOptions] = None,
        application_id: Optional[str] = None,
        application_log_config: Optional[InvokeRunApplicationLogConfigArgs] = None,
        archive_uri: Optional[str] = None,
        arguments: Optional[Sequence[str]] = None,
        asynchronous: Optional[bool] = None,
        class_name: Optional[str] = None,
        compartment_id: Optional[str] = None,
        configuration: Optional[Mapping[str, str]] = None,
        data_read_in_bytes: Optional[str] = None,
        data_written_in_bytes: Optional[str] = None,
        defined_tags: Optional[Mapping[str, str]] = None,
        display_name: Optional[str] = None,
        driver_shape: Optional[str] = None,
        driver_shape_config: Optional[InvokeRunDriverShapeConfigArgs] = None,
        execute: Optional[str] = None,
        executor_shape: Optional[str] = None,
        executor_shape_config: Optional[InvokeRunExecutorShapeConfigArgs] = None,
        file_uri: Optional[str] = None,
        freeform_tags: Optional[Mapping[str, str]] = None,
        idle_timeout_in_minutes: Optional[str] = None,
        language: Optional[str] = None,
        lifecycle_details: Optional[str] = None,
        logs_bucket_uri: Optional[str] = None,
        max_duration_in_minutes: Optional[str] = None,
        metastore_id: Optional[str] = None,
        num_executors: Optional[int] = None,
        opc_parent_rpt_url: Optional[str] = None,
        opc_request_id: Optional[str] = None,
        owner_principal_id: Optional[str] = None,
        owner_user_name: Optional[str] = None,
        parameters: Optional[Sequence[InvokeRunParameterArgs]] = None,
        pool_id: Optional[str] = None,
        private_endpoint_dns_zones: Optional[Sequence[str]] = None,
        private_endpoint_id: Optional[str] = None,
        private_endpoint_max_host_count: Optional[int] = None,
        private_endpoint_nsg_ids: Optional[Sequence[str]] = None,
        private_endpoint_subnet_id: Optional[str] = None,
        run_duration_in_milliseconds: Optional[str] = None,
        spark_version: Optional[str] = None,
        state: Optional[str] = None,
        time_created: Optional[str] = None,
        time_updated: Optional[str] = None,
        total_ocpu: Optional[int] = None,
        type: Optional[str] = None,
        warehouse_bucket_uri: Optional[str] = None) -> InvokeRunfunc GetInvokeRun(ctx *Context, name string, id IDInput, state *InvokeRunState, opts ...ResourceOption) (*InvokeRun, error)public static InvokeRun Get(string name, Input<string> id, InvokeRunState? state, CustomResourceOptions? opts = null)public static InvokeRun get(String name, Output<String> id, InvokeRunState state, CustomResourceOptions options)resources:  _:    type: oci:DataFlow:InvokeRun    get:      id: ${id}- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- resource_name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- ApplicationId string
- The OCID of the associated application. If this value is set, then no value for the execute parameter is required. If this value is not set, then a value for the execute parameter is required, and a new application is created and associated with the new run.
- ApplicationLog InvokeConfig Run Application Log Config 
- Logging details of Application logs for Data Flow Run.
- ArchiveUri string
- A comma separated list of one or more archive files as Oracle Cloud Infrastructure URIs. For example, oci://path/to/a.zip,oci://path/to/b.zip. An Oracle Cloud Infrastructure URI of an archive.zip file containing custom dependencies that may be used to support the execution of a Python, Java, or Scala application. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
- Arguments List<string>
- The arguments passed to the running application as command line arguments. An argument is either a plain text or a placeholder. Placeholders are replaced using values from the parameters map. Each placeholder specified must be represented in the parameters map else the request (POST or PUT) will fail with a HTTP 400 status code. Placeholders are specified as Service Api Spec, wherenameis the name of the parameter. Example:[ "--input", "${input_file}", "--name", "John Doe" ]If "input_file" has a value of "mydata.xml", then the value above will be translated to--input mydata.xml --name "John Doe"
- Asynchronous bool
- ClassName string
- The class for the application.
- CompartmentId string
- (Updatable) The OCID of a compartment.
- Configuration Dictionary<string, string>
- The Spark configuration passed to the running process. See https://spark.apache.org/docs/latest/configuration.html#available-properties Example: { "spark.app.name" : "My App Name", "spark.shuffle.io.maxRetries" : "4" } Note: Not all Spark properties are permitted to be set. Attempting to set a property that is not allowed to be overwritten will cause a 400 status to be returned.
- DataRead stringIn Bytes 
- The data read by the run in bytes.
- DataWritten stringIn Bytes 
- The data written by the run in bytes.
- Dictionary<string, string>
- (Updatable) Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see Resource Tags. Example: {"Operations.CostCenter": "42"}
- DisplayName string
- A user-friendly name that does not have to be unique. Avoid entering confidential information. If this value is not specified, it will be derived from the associated application's displayName or set by API using fileUri's application file name.
- DriverShape string
- The VM shape for the driver. Sets the driver cores and memory.
- DriverShape InvokeConfig Run Driver Shape Config 
- This is used to configure the shape of the driver or executor if a flexible shape is used.
- Execute string
- The input used for spark-submit command. For more details see https://spark.apache.org/docs/latest/submitting-applications.html#launching-applications-with-spark-submit. Supported options include --class,--file,--jars,--conf,--py-files, and main application file with arguments. Example:--jars oci://path/to/a.jar,oci://path/to/b.jar --files oci://path/to/a.json,oci://path/to/b.csv --py-files oci://path/to/a.py,oci://path/to/b.py --conf spark.sql.crossJoin.enabled=true --class org.apache.spark.examples.SparkPi oci://path/to/main.jar 10Note: If execute is specified together with applicationId, className, configuration, fileUri, language, arguments, parameters during application create/update, or run create/submit, Data Flow service will use derived information from execute input only.
- ExecutorShape string
- The VM shape for the executors. Sets the executor cores and memory.
- ExecutorShape InvokeConfig Run Executor Shape Config 
- This is used to configure the shape of the driver or executor if a flexible shape is used.
- FileUri string
- An Oracle Cloud Infrastructure URI of the file containing the application to execute. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
- Dictionary<string, string>
- (Updatable) Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see Resource Tags. Example: {"Department": "Finance"}
- IdleTimeout stringIn Minutes 
- (Updatable) The timeout value in minutes used to manage Runs. A Run would be stopped after inactivity for this amount of time period. Note: This parameter is currently only applicable for Runs of type SESSION. Default value is 2880 minutes (2 days)
- Language string
- The Spark language.
- LifecycleDetails string
- The detailed messages about the lifecycle state.
- LogsBucket stringUri 
- An Oracle Cloud Infrastructure URI of the bucket where the Spark job logs are to be uploaded. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
- MaxDuration stringIn Minutes 
- (Updatable) The maximum duration in minutes for which an Application should run. Data Flow Run would be terminated once it reaches this duration from the time it transitions to IN_PROGRESSstate.
- MetastoreId string
- The OCID of Oracle Cloud Infrastructure Hive Metastore.
- NumExecutors int
- The number of executor VMs requested.
- OpcParent stringRpt Url 
- (Optional header param, required for Resource Principal version 3.0+) Parent resource control plane endpoint used to exchange for upper level resource principal token.
- OpcRequest stringId 
- Unique Oracle assigned identifier for the request. If you need to contact Oracle about a particular request, please provide the request ID.
- OwnerPrincipal stringId 
- The OCID of the user who created the resource.
- OwnerUser stringName 
- The username of the user who created the resource. If the username of the owner does not exist, nullwill be returned and the caller should refer to the ownerPrincipalId value instead.
- Parameters
List<InvokeRun Parameter> 
- An array of name/value pairs used to fill placeholders found in properties like Application.arguments. The name must be a string of one or more word characters (a-z, A-Z, 0-9, _). The value can be a string of 0 or more characters of any kind. Example: [ { name: "iterations", value: "10"}, { name: "input_file", value: "mydata.xml" }, { name: "variable_x", value: "${x}"} ]
- PoolId string
- The OCID of a pool. Unique Id to indentify a dataflow pool resource.
- PrivateEndpoint List<string>Dns Zones 
- An array of DNS zone names. Example: [ "app.examplecorp.com", "app.examplecorp2.com" ]
- PrivateEndpoint stringId 
- The OCID of a private endpoint.
- PrivateEndpoint intMax Host Count 
- The maximum number of hosts to be accessed through the private endpoint. This value is used to calculate the relevant CIDR block and should be a multiple of 256. If the value is not a multiple of 256, it is rounded up to the next multiple of 256. For example, 300 is rounded up to 512.
- PrivateEndpoint List<string>Nsg Ids 
- An array of network security group OCIDs.
- PrivateEndpoint stringSubnet Id 
- The OCID of a subnet.
- RunDuration stringIn Milliseconds 
- The duration of the run in milliseconds.
- SparkVersion string
- The Spark version utilized to run the application. This value may be set if applicationId is not since the Spark version will be taken from the associated application.
- State string
- The current state of this run.
- TimeCreated string
- The date and time the resource was created, expressed in RFC 3339 timestamp format. Example: 2018-04-03T21:10:29.600Z
- TimeUpdated string
- The date and time the resource was updated, expressed in RFC 3339 timestamp format. Example: 2018-04-03T21:10:29.600Z
- TotalOcpu int
- The total number of oCPU requested by the run.
- Type string
- The Spark application processing type.
- WarehouseBucket stringUri 
- An Oracle Cloud Infrastructure URI of the bucket to be used as default warehouse directory for BATCH SQL runs. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat. - ** IMPORTANT ** Any change to a property that does not support update will force the destruction and recreation of the resource with the new property values 
- ApplicationId string
- The OCID of the associated application. If this value is set, then no value for the execute parameter is required. If this value is not set, then a value for the execute parameter is required, and a new application is created and associated with the new run.
- ApplicationLog InvokeConfig Run Application Log Config Args 
- Logging details of Application logs for Data Flow Run.
- ArchiveUri string
- A comma separated list of one or more archive files as Oracle Cloud Infrastructure URIs. For example, oci://path/to/a.zip,oci://path/to/b.zip. An Oracle Cloud Infrastructure URI of an archive.zip file containing custom dependencies that may be used to support the execution of a Python, Java, or Scala application. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
- Arguments []string
- The arguments passed to the running application as command line arguments. An argument is either a plain text or a placeholder. Placeholders are replaced using values from the parameters map. Each placeholder specified must be represented in the parameters map else the request (POST or PUT) will fail with a HTTP 400 status code. Placeholders are specified as Service Api Spec, wherenameis the name of the parameter. Example:[ "--input", "${input_file}", "--name", "John Doe" ]If "input_file" has a value of "mydata.xml", then the value above will be translated to--input mydata.xml --name "John Doe"
- Asynchronous bool
- ClassName string
- The class for the application.
- CompartmentId string
- (Updatable) The OCID of a compartment.
- Configuration map[string]string
- The Spark configuration passed to the running process. See https://spark.apache.org/docs/latest/configuration.html#available-properties Example: { "spark.app.name" : "My App Name", "spark.shuffle.io.maxRetries" : "4" } Note: Not all Spark properties are permitted to be set. Attempting to set a property that is not allowed to be overwritten will cause a 400 status to be returned.
- DataRead stringIn Bytes 
- The data read by the run in bytes.
- DataWritten stringIn Bytes 
- The data written by the run in bytes.
- map[string]string
- (Updatable) Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see Resource Tags. Example: {"Operations.CostCenter": "42"}
- DisplayName string
- A user-friendly name that does not have to be unique. Avoid entering confidential information. If this value is not specified, it will be derived from the associated application's displayName or set by API using fileUri's application file name.
- DriverShape string
- The VM shape for the driver. Sets the driver cores and memory.
- DriverShape InvokeConfig Run Driver Shape Config Args 
- This is used to configure the shape of the driver or executor if a flexible shape is used.
- Execute string
- The input used for spark-submit command. For more details see https://spark.apache.org/docs/latest/submitting-applications.html#launching-applications-with-spark-submit. Supported options include --class,--file,--jars,--conf,--py-files, and main application file with arguments. Example:--jars oci://path/to/a.jar,oci://path/to/b.jar --files oci://path/to/a.json,oci://path/to/b.csv --py-files oci://path/to/a.py,oci://path/to/b.py --conf spark.sql.crossJoin.enabled=true --class org.apache.spark.examples.SparkPi oci://path/to/main.jar 10Note: If execute is specified together with applicationId, className, configuration, fileUri, language, arguments, parameters during application create/update, or run create/submit, Data Flow service will use derived information from execute input only.
- ExecutorShape string
- The VM shape for the executors. Sets the executor cores and memory.
- ExecutorShape InvokeConfig Run Executor Shape Config Args 
- This is used to configure the shape of the driver or executor if a flexible shape is used.
- FileUri string
- An Oracle Cloud Infrastructure URI of the file containing the application to execute. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
- map[string]string
- (Updatable) Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see Resource Tags. Example: {"Department": "Finance"}
- IdleTimeout stringIn Minutes 
- (Updatable) The timeout value in minutes used to manage Runs. A Run would be stopped after inactivity for this amount of time period. Note: This parameter is currently only applicable for Runs of type SESSION. Default value is 2880 minutes (2 days)
- Language string
- The Spark language.
- LifecycleDetails string
- The detailed messages about the lifecycle state.
- LogsBucket stringUri 
- An Oracle Cloud Infrastructure URI of the bucket where the Spark job logs are to be uploaded. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
- MaxDuration stringIn Minutes 
- (Updatable) The maximum duration in minutes for which an Application should run. Data Flow Run would be terminated once it reaches this duration from the time it transitions to IN_PROGRESSstate.
- MetastoreId string
- The OCID of Oracle Cloud Infrastructure Hive Metastore.
- NumExecutors int
- The number of executor VMs requested.
- OpcParent stringRpt Url 
- (Optional header param, required for Resource Principal version 3.0+) Parent resource control plane endpoint used to exchange for upper level resource principal token.
- OpcRequest stringId 
- Unique Oracle assigned identifier for the request. If you need to contact Oracle about a particular request, please provide the request ID.
- OwnerPrincipal stringId 
- The OCID of the user who created the resource.
- OwnerUser stringName 
- The username of the user who created the resource. If the username of the owner does not exist, nullwill be returned and the caller should refer to the ownerPrincipalId value instead.
- Parameters
[]InvokeRun Parameter Args 
- An array of name/value pairs used to fill placeholders found in properties like Application.arguments. The name must be a string of one or more word characters (a-z, A-Z, 0-9, _). The value can be a string of 0 or more characters of any kind. Example: [ { name: "iterations", value: "10"}, { name: "input_file", value: "mydata.xml" }, { name: "variable_x", value: "${x}"} ]
- PoolId string
- The OCID of a pool. Unique Id to indentify a dataflow pool resource.
- PrivateEndpoint []stringDns Zones 
- An array of DNS zone names. Example: [ "app.examplecorp.com", "app.examplecorp2.com" ]
- PrivateEndpoint stringId 
- The OCID of a private endpoint.
- PrivateEndpoint intMax Host Count 
- The maximum number of hosts to be accessed through the private endpoint. This value is used to calculate the relevant CIDR block and should be a multiple of 256. If the value is not a multiple of 256, it is rounded up to the next multiple of 256. For example, 300 is rounded up to 512.
- PrivateEndpoint []stringNsg Ids 
- An array of network security group OCIDs.
- PrivateEndpoint stringSubnet Id 
- The OCID of a subnet.
- RunDuration stringIn Milliseconds 
- The duration of the run in milliseconds.
- SparkVersion string
- The Spark version utilized to run the application. This value may be set if applicationId is not since the Spark version will be taken from the associated application.
- State string
- The current state of this run.
- TimeCreated string
- The date and time the resource was created, expressed in RFC 3339 timestamp format. Example: 2018-04-03T21:10:29.600Z
- TimeUpdated string
- The date and time the resource was updated, expressed in RFC 3339 timestamp format. Example: 2018-04-03T21:10:29.600Z
- TotalOcpu int
- The total number of oCPU requested by the run.
- Type string
- The Spark application processing type.
- WarehouseBucket stringUri 
- An Oracle Cloud Infrastructure URI of the bucket to be used as default warehouse directory for BATCH SQL runs. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat. - ** IMPORTANT ** Any change to a property that does not support update will force the destruction and recreation of the resource with the new property values 
- applicationId String
- The OCID of the associated application. If this value is set, then no value for the execute parameter is required. If this value is not set, then a value for the execute parameter is required, and a new application is created and associated with the new run.
- applicationLog InvokeConfig Run Application Log Config 
- Logging details of Application logs for Data Flow Run.
- archiveUri String
- A comma separated list of one or more archive files as Oracle Cloud Infrastructure URIs. For example, oci://path/to/a.zip,oci://path/to/b.zip. An Oracle Cloud Infrastructure URI of an archive.zip file containing custom dependencies that may be used to support the execution of a Python, Java, or Scala application. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
- arguments List<String>
- The arguments passed to the running application as command line arguments. An argument is either a plain text or a placeholder. Placeholders are replaced using values from the parameters map. Each placeholder specified must be represented in the parameters map else the request (POST or PUT) will fail with a HTTP 400 status code. Placeholders are specified as Service Api Spec, wherenameis the name of the parameter. Example:[ "--input", "${input_file}", "--name", "John Doe" ]If "input_file" has a value of "mydata.xml", then the value above will be translated to--input mydata.xml --name "John Doe"
- asynchronous Boolean
- className String
- The class for the application.
- compartmentId String
- (Updatable) The OCID of a compartment.
- configuration Map<String,String>
- The Spark configuration passed to the running process. See https://spark.apache.org/docs/latest/configuration.html#available-properties Example: { "spark.app.name" : "My App Name", "spark.shuffle.io.maxRetries" : "4" } Note: Not all Spark properties are permitted to be set. Attempting to set a property that is not allowed to be overwritten will cause a 400 status to be returned.
- dataRead StringIn Bytes 
- The data read by the run in bytes.
- dataWritten StringIn Bytes 
- The data written by the run in bytes.
- Map<String,String>
- (Updatable) Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see Resource Tags. Example: {"Operations.CostCenter": "42"}
- displayName String
- A user-friendly name that does not have to be unique. Avoid entering confidential information. If this value is not specified, it will be derived from the associated application's displayName or set by API using fileUri's application file name.
- driverShape String
- The VM shape for the driver. Sets the driver cores and memory.
- driverShape InvokeConfig Run Driver Shape Config 
- This is used to configure the shape of the driver or executor if a flexible shape is used.
- execute String
- The input used for spark-submit command. For more details see https://spark.apache.org/docs/latest/submitting-applications.html#launching-applications-with-spark-submit. Supported options include --class,--file,--jars,--conf,--py-files, and main application file with arguments. Example:--jars oci://path/to/a.jar,oci://path/to/b.jar --files oci://path/to/a.json,oci://path/to/b.csv --py-files oci://path/to/a.py,oci://path/to/b.py --conf spark.sql.crossJoin.enabled=true --class org.apache.spark.examples.SparkPi oci://path/to/main.jar 10Note: If execute is specified together with applicationId, className, configuration, fileUri, language, arguments, parameters during application create/update, or run create/submit, Data Flow service will use derived information from execute input only.
- executorShape String
- The VM shape for the executors. Sets the executor cores and memory.
- executorShape InvokeConfig Run Executor Shape Config 
- This is used to configure the shape of the driver or executor if a flexible shape is used.
- fileUri String
- An Oracle Cloud Infrastructure URI of the file containing the application to execute. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
- Map<String,String>
- (Updatable) Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see Resource Tags. Example: {"Department": "Finance"}
- idleTimeout StringIn Minutes 
- (Updatable) The timeout value in minutes used to manage Runs. A Run would be stopped after inactivity for this amount of time period. Note: This parameter is currently only applicable for Runs of type SESSION. Default value is 2880 minutes (2 days)
- language String
- The Spark language.
- lifecycleDetails String
- The detailed messages about the lifecycle state.
- logsBucket StringUri 
- An Oracle Cloud Infrastructure URI of the bucket where the Spark job logs are to be uploaded. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
- maxDuration StringIn Minutes 
- (Updatable) The maximum duration in minutes for which an Application should run. Data Flow Run would be terminated once it reaches this duration from the time it transitions to IN_PROGRESSstate.
- metastoreId String
- The OCID of Oracle Cloud Infrastructure Hive Metastore.
- numExecutors Integer
- The number of executor VMs requested.
- opcParent StringRpt Url 
- (Optional header param, required for Resource Principal version 3.0+) Parent resource control plane endpoint used to exchange for upper level resource principal token.
- opcRequest StringId 
- Unique Oracle assigned identifier for the request. If you need to contact Oracle about a particular request, please provide the request ID.
- ownerPrincipal StringId 
- The OCID of the user who created the resource.
- ownerUser StringName 
- The username of the user who created the resource. If the username of the owner does not exist, nullwill be returned and the caller should refer to the ownerPrincipalId value instead.
- parameters
List<InvokeRun Parameter> 
- An array of name/value pairs used to fill placeholders found in properties like Application.arguments. The name must be a string of one or more word characters (a-z, A-Z, 0-9, _). The value can be a string of 0 or more characters of any kind. Example: [ { name: "iterations", value: "10"}, { name: "input_file", value: "mydata.xml" }, { name: "variable_x", value: "${x}"} ]
- poolId String
- The OCID of a pool. Unique Id to indentify a dataflow pool resource.
- privateEndpoint List<String>Dns Zones 
- An array of DNS zone names. Example: [ "app.examplecorp.com", "app.examplecorp2.com" ]
- privateEndpoint StringId 
- The OCID of a private endpoint.
- privateEndpoint IntegerMax Host Count 
- The maximum number of hosts to be accessed through the private endpoint. This value is used to calculate the relevant CIDR block and should be a multiple of 256. If the value is not a multiple of 256, it is rounded up to the next multiple of 256. For example, 300 is rounded up to 512.
- privateEndpoint List<String>Nsg Ids 
- An array of network security group OCIDs.
- privateEndpoint StringSubnet Id 
- The OCID of a subnet.
- runDuration StringIn Milliseconds 
- The duration of the run in milliseconds.
- sparkVersion String
- The Spark version utilized to run the application. This value may be set if applicationId is not since the Spark version will be taken from the associated application.
- state String
- The current state of this run.
- timeCreated String
- The date and time the resource was created, expressed in RFC 3339 timestamp format. Example: 2018-04-03T21:10:29.600Z
- timeUpdated String
- The date and time the resource was updated, expressed in RFC 3339 timestamp format. Example: 2018-04-03T21:10:29.600Z
- totalOcpu Integer
- The total number of oCPU requested by the run.
- type String
- The Spark application processing type.
- warehouseBucket StringUri 
- An Oracle Cloud Infrastructure URI of the bucket to be used as default warehouse directory for BATCH SQL runs. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat. - ** IMPORTANT ** Any change to a property that does not support update will force the destruction and recreation of the resource with the new property values 
- applicationId string
- The OCID of the associated application. If this value is set, then no value for the execute parameter is required. If this value is not set, then a value for the execute parameter is required, and a new application is created and associated with the new run.
- applicationLog InvokeConfig Run Application Log Config 
- Logging details of Application logs for Data Flow Run.
- archiveUri string
- A comma separated list of one or more archive files as Oracle Cloud Infrastructure URIs. For example, oci://path/to/a.zip,oci://path/to/b.zip. An Oracle Cloud Infrastructure URI of an archive.zip file containing custom dependencies that may be used to support the execution of a Python, Java, or Scala application. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
- arguments string[]
- The arguments passed to the running application as command line arguments. An argument is either a plain text or a placeholder. Placeholders are replaced using values from the parameters map. Each placeholder specified must be represented in the parameters map else the request (POST or PUT) will fail with a HTTP 400 status code. Placeholders are specified as Service Api Spec, wherenameis the name of the parameter. Example:[ "--input", "${input_file}", "--name", "John Doe" ]If "input_file" has a value of "mydata.xml", then the value above will be translated to--input mydata.xml --name "John Doe"
- asynchronous boolean
- className string
- The class for the application.
- compartmentId string
- (Updatable) The OCID of a compartment.
- configuration {[key: string]: string}
- The Spark configuration passed to the running process. See https://spark.apache.org/docs/latest/configuration.html#available-properties Example: { "spark.app.name" : "My App Name", "spark.shuffle.io.maxRetries" : "4" } Note: Not all Spark properties are permitted to be set. Attempting to set a property that is not allowed to be overwritten will cause a 400 status to be returned.
- dataRead stringIn Bytes 
- The data read by the run in bytes.
- dataWritten stringIn Bytes 
- The data written by the run in bytes.
- {[key: string]: string}
- (Updatable) Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see Resource Tags. Example: {"Operations.CostCenter": "42"}
- displayName string
- A user-friendly name that does not have to be unique. Avoid entering confidential information. If this value is not specified, it will be derived from the associated application's displayName or set by API using fileUri's application file name.
- driverShape string
- The VM shape for the driver. Sets the driver cores and memory.
- driverShape InvokeConfig Run Driver Shape Config 
- This is used to configure the shape of the driver or executor if a flexible shape is used.
- execute string
- The input used for spark-submit command. For more details see https://spark.apache.org/docs/latest/submitting-applications.html#launching-applications-with-spark-submit. Supported options include --class,--file,--jars,--conf,--py-files, and main application file with arguments. Example:--jars oci://path/to/a.jar,oci://path/to/b.jar --files oci://path/to/a.json,oci://path/to/b.csv --py-files oci://path/to/a.py,oci://path/to/b.py --conf spark.sql.crossJoin.enabled=true --class org.apache.spark.examples.SparkPi oci://path/to/main.jar 10Note: If execute is specified together with applicationId, className, configuration, fileUri, language, arguments, parameters during application create/update, or run create/submit, Data Flow service will use derived information from execute input only.
- executorShape string
- The VM shape for the executors. Sets the executor cores and memory.
- executorShape InvokeConfig Run Executor Shape Config 
- This is used to configure the shape of the driver or executor if a flexible shape is used.
- fileUri string
- An Oracle Cloud Infrastructure URI of the file containing the application to execute. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
- {[key: string]: string}
- (Updatable) Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see Resource Tags. Example: {"Department": "Finance"}
- idleTimeout stringIn Minutes 
- (Updatable) The timeout value in minutes used to manage Runs. A Run would be stopped after inactivity for this amount of time period. Note: This parameter is currently only applicable for Runs of type SESSION. Default value is 2880 minutes (2 days)
- language string
- The Spark language.
- lifecycleDetails string
- The detailed messages about the lifecycle state.
- logsBucket stringUri 
- An Oracle Cloud Infrastructure URI of the bucket where the Spark job logs are to be uploaded. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
- maxDuration stringIn Minutes 
- (Updatable) The maximum duration in minutes for which an Application should run. Data Flow Run would be terminated once it reaches this duration from the time it transitions to IN_PROGRESSstate.
- metastoreId string
- The OCID of Oracle Cloud Infrastructure Hive Metastore.
- numExecutors number
- The number of executor VMs requested.
- opcParent stringRpt Url 
- (Optional header param, required for Resource Principal version 3.0+) Parent resource control plane endpoint used to exchange for upper level resource principal token.
- opcRequest stringId 
- Unique Oracle assigned identifier for the request. If you need to contact Oracle about a particular request, please provide the request ID.
- ownerPrincipal stringId 
- The OCID of the user who created the resource.
- ownerUser stringName 
- The username of the user who created the resource. If the username of the owner does not exist, nullwill be returned and the caller should refer to the ownerPrincipalId value instead.
- parameters
InvokeRun Parameter[] 
- An array of name/value pairs used to fill placeholders found in properties like Application.arguments. The name must be a string of one or more word characters (a-z, A-Z, 0-9, _). The value can be a string of 0 or more characters of any kind. Example: [ { name: "iterations", value: "10"}, { name: "input_file", value: "mydata.xml" }, { name: "variable_x", value: "${x}"} ]
- poolId string
- The OCID of a pool. Unique Id to indentify a dataflow pool resource.
- privateEndpoint string[]Dns Zones 
- An array of DNS zone names. Example: [ "app.examplecorp.com", "app.examplecorp2.com" ]
- privateEndpoint stringId 
- The OCID of a private endpoint.
- privateEndpoint numberMax Host Count 
- The maximum number of hosts to be accessed through the private endpoint. This value is used to calculate the relevant CIDR block and should be a multiple of 256. If the value is not a multiple of 256, it is rounded up to the next multiple of 256. For example, 300 is rounded up to 512.
- privateEndpoint string[]Nsg Ids 
- An array of network security group OCIDs.
- privateEndpoint stringSubnet Id 
- The OCID of a subnet.
- runDuration stringIn Milliseconds 
- The duration of the run in milliseconds.
- sparkVersion string
- The Spark version utilized to run the application. This value may be set if applicationId is not since the Spark version will be taken from the associated application.
- state string
- The current state of this run.
- timeCreated string
- The date and time the resource was created, expressed in RFC 3339 timestamp format. Example: 2018-04-03T21:10:29.600Z
- timeUpdated string
- The date and time the resource was updated, expressed in RFC 3339 timestamp format. Example: 2018-04-03T21:10:29.600Z
- totalOcpu number
- The total number of oCPU requested by the run.
- type string
- The Spark application processing type.
- warehouseBucket stringUri 
- An Oracle Cloud Infrastructure URI of the bucket to be used as default warehouse directory for BATCH SQL runs. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat. - ** IMPORTANT ** Any change to a property that does not support update will force the destruction and recreation of the resource with the new property values 
- application_id str
- The OCID of the associated application. If this value is set, then no value for the execute parameter is required. If this value is not set, then a value for the execute parameter is required, and a new application is created and associated with the new run.
- application_log_ Invokeconfig Run Application Log Config Args 
- Logging details of Application logs for Data Flow Run.
- archive_uri str
- A comma separated list of one or more archive files as Oracle Cloud Infrastructure URIs. For example, oci://path/to/a.zip,oci://path/to/b.zip. An Oracle Cloud Infrastructure URI of an archive.zip file containing custom dependencies that may be used to support the execution of a Python, Java, or Scala application. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
- arguments Sequence[str]
- The arguments passed to the running application as command line arguments. An argument is either a plain text or a placeholder. Placeholders are replaced using values from the parameters map. Each placeholder specified must be represented in the parameters map else the request (POST or PUT) will fail with a HTTP 400 status code. Placeholders are specified as Service Api Spec, wherenameis the name of the parameter. Example:[ "--input", "${input_file}", "--name", "John Doe" ]If "input_file" has a value of "mydata.xml", then the value above will be translated to--input mydata.xml --name "John Doe"
- asynchronous bool
- class_name str
- The class for the application.
- compartment_id str
- (Updatable) The OCID of a compartment.
- configuration Mapping[str, str]
- The Spark configuration passed to the running process. See https://spark.apache.org/docs/latest/configuration.html#available-properties Example: { "spark.app.name" : "My App Name", "spark.shuffle.io.maxRetries" : "4" } Note: Not all Spark properties are permitted to be set. Attempting to set a property that is not allowed to be overwritten will cause a 400 status to be returned.
- data_read_ strin_ bytes 
- The data read by the run in bytes.
- data_written_ strin_ bytes 
- The data written by the run in bytes.
- Mapping[str, str]
- (Updatable) Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see Resource Tags. Example: {"Operations.CostCenter": "42"}
- display_name str
- A user-friendly name that does not have to be unique. Avoid entering confidential information. If this value is not specified, it will be derived from the associated application's displayName or set by API using fileUri's application file name.
- driver_shape str
- The VM shape for the driver. Sets the driver cores and memory.
- driver_shape_ Invokeconfig Run Driver Shape Config Args 
- This is used to configure the shape of the driver or executor if a flexible shape is used.
- execute str
- The input used for spark-submit command. For more details see https://spark.apache.org/docs/latest/submitting-applications.html#launching-applications-with-spark-submit. Supported options include --class,--file,--jars,--conf,--py-files, and main application file with arguments. Example:--jars oci://path/to/a.jar,oci://path/to/b.jar --files oci://path/to/a.json,oci://path/to/b.csv --py-files oci://path/to/a.py,oci://path/to/b.py --conf spark.sql.crossJoin.enabled=true --class org.apache.spark.examples.SparkPi oci://path/to/main.jar 10Note: If execute is specified together with applicationId, className, configuration, fileUri, language, arguments, parameters during application create/update, or run create/submit, Data Flow service will use derived information from execute input only.
- executor_shape str
- The VM shape for the executors. Sets the executor cores and memory.
- executor_shape_ Invokeconfig Run Executor Shape Config Args 
- This is used to configure the shape of the driver or executor if a flexible shape is used.
- file_uri str
- An Oracle Cloud Infrastructure URI of the file containing the application to execute. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
- Mapping[str, str]
- (Updatable) Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see Resource Tags. Example: {"Department": "Finance"}
- idle_timeout_ strin_ minutes 
- (Updatable) The timeout value in minutes used to manage Runs. A Run would be stopped after inactivity for this amount of time period. Note: This parameter is currently only applicable for Runs of type SESSION. Default value is 2880 minutes (2 days)
- language str
- The Spark language.
- lifecycle_details str
- The detailed messages about the lifecycle state.
- logs_bucket_ struri 
- An Oracle Cloud Infrastructure URI of the bucket where the Spark job logs are to be uploaded. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
- max_duration_ strin_ minutes 
- (Updatable) The maximum duration in minutes for which an Application should run. Data Flow Run would be terminated once it reaches this duration from the time it transitions to IN_PROGRESSstate.
- metastore_id str
- The OCID of Oracle Cloud Infrastructure Hive Metastore.
- num_executors int
- The number of executor VMs requested.
- opc_parent_ strrpt_ url 
- (Optional header param, required for Resource Principal version 3.0+) Parent resource control plane endpoint used to exchange for upper level resource principal token.
- opc_request_ strid 
- Unique Oracle assigned identifier for the request. If you need to contact Oracle about a particular request, please provide the request ID.
- owner_principal_ strid 
- The OCID of the user who created the resource.
- owner_user_ strname 
- The username of the user who created the resource. If the username of the owner does not exist, nullwill be returned and the caller should refer to the ownerPrincipalId value instead.
- parameters
Sequence[InvokeRun Parameter Args] 
- An array of name/value pairs used to fill placeholders found in properties like Application.arguments. The name must be a string of one or more word characters (a-z, A-Z, 0-9, _). The value can be a string of 0 or more characters of any kind. Example: [ { name: "iterations", value: "10"}, { name: "input_file", value: "mydata.xml" }, { name: "variable_x", value: "${x}"} ]
- pool_id str
- The OCID of a pool. Unique Id to indentify a dataflow pool resource.
- private_endpoint_ Sequence[str]dns_ zones 
- An array of DNS zone names. Example: [ "app.examplecorp.com", "app.examplecorp2.com" ]
- private_endpoint_ strid 
- The OCID of a private endpoint.
- private_endpoint_ intmax_ host_ count 
- The maximum number of hosts to be accessed through the private endpoint. This value is used to calculate the relevant CIDR block and should be a multiple of 256. If the value is not a multiple of 256, it is rounded up to the next multiple of 256. For example, 300 is rounded up to 512.
- private_endpoint_ Sequence[str]nsg_ ids 
- An array of network security group OCIDs.
- private_endpoint_ strsubnet_ id 
- The OCID of a subnet.
- run_duration_ strin_ milliseconds 
- The duration of the run in milliseconds.
- spark_version str
- The Spark version utilized to run the application. This value may be set if applicationId is not since the Spark version will be taken from the associated application.
- state str
- The current state of this run.
- time_created str
- The date and time the resource was created, expressed in RFC 3339 timestamp format. Example: 2018-04-03T21:10:29.600Z
- time_updated str
- The date and time the resource was updated, expressed in RFC 3339 timestamp format. Example: 2018-04-03T21:10:29.600Z
- total_ocpu int
- The total number of oCPU requested by the run.
- type str
- The Spark application processing type.
- warehouse_bucket_ struri 
- An Oracle Cloud Infrastructure URI of the bucket to be used as default warehouse directory for BATCH SQL runs. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat. - ** IMPORTANT ** Any change to a property that does not support update will force the destruction and recreation of the resource with the new property values 
- applicationId String
- The OCID of the associated application. If this value is set, then no value for the execute parameter is required. If this value is not set, then a value for the execute parameter is required, and a new application is created and associated with the new run.
- applicationLog Property MapConfig 
- Logging details of Application logs for Data Flow Run.
- archiveUri String
- A comma separated list of one or more archive files as Oracle Cloud Infrastructure URIs. For example, oci://path/to/a.zip,oci://path/to/b.zip. An Oracle Cloud Infrastructure URI of an archive.zip file containing custom dependencies that may be used to support the execution of a Python, Java, or Scala application. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
- arguments List<String>
- The arguments passed to the running application as command line arguments. An argument is either a plain text or a placeholder. Placeholders are replaced using values from the parameters map. Each placeholder specified must be represented in the parameters map else the request (POST or PUT) will fail with a HTTP 400 status code. Placeholders are specified as Service Api Spec, wherenameis the name of the parameter. Example:[ "--input", "${input_file}", "--name", "John Doe" ]If "input_file" has a value of "mydata.xml", then the value above will be translated to--input mydata.xml --name "John Doe"
- asynchronous Boolean
- className String
- The class for the application.
- compartmentId String
- (Updatable) The OCID of a compartment.
- configuration Map<String>
- The Spark configuration passed to the running process. See https://spark.apache.org/docs/latest/configuration.html#available-properties Example: { "spark.app.name" : "My App Name", "spark.shuffle.io.maxRetries" : "4" } Note: Not all Spark properties are permitted to be set. Attempting to set a property that is not allowed to be overwritten will cause a 400 status to be returned.
- dataRead StringIn Bytes 
- The data read by the run in bytes.
- dataWritten StringIn Bytes 
- The data written by the run in bytes.
- Map<String>
- (Updatable) Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see Resource Tags. Example: {"Operations.CostCenter": "42"}
- displayName String
- A user-friendly name that does not have to be unique. Avoid entering confidential information. If this value is not specified, it will be derived from the associated application's displayName or set by API using fileUri's application file name.
- driverShape String
- The VM shape for the driver. Sets the driver cores and memory.
- driverShape Property MapConfig 
- This is used to configure the shape of the driver or executor if a flexible shape is used.
- execute String
- The input used for spark-submit command. For more details see https://spark.apache.org/docs/latest/submitting-applications.html#launching-applications-with-spark-submit. Supported options include --class,--file,--jars,--conf,--py-files, and main application file with arguments. Example:--jars oci://path/to/a.jar,oci://path/to/b.jar --files oci://path/to/a.json,oci://path/to/b.csv --py-files oci://path/to/a.py,oci://path/to/b.py --conf spark.sql.crossJoin.enabled=true --class org.apache.spark.examples.SparkPi oci://path/to/main.jar 10Note: If execute is specified together with applicationId, className, configuration, fileUri, language, arguments, parameters during application create/update, or run create/submit, Data Flow service will use derived information from execute input only.
- executorShape String
- The VM shape for the executors. Sets the executor cores and memory.
- executorShape Property MapConfig 
- This is used to configure the shape of the driver or executor if a flexible shape is used.
- fileUri String
- An Oracle Cloud Infrastructure URI of the file containing the application to execute. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
- Map<String>
- (Updatable) Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see Resource Tags. Example: {"Department": "Finance"}
- idleTimeout StringIn Minutes 
- (Updatable) The timeout value in minutes used to manage Runs. A Run would be stopped after inactivity for this amount of time period. Note: This parameter is currently only applicable for Runs of type SESSION. Default value is 2880 minutes (2 days)
- language String
- The Spark language.
- lifecycleDetails String
- The detailed messages about the lifecycle state.
- logsBucket StringUri 
- An Oracle Cloud Infrastructure URI of the bucket where the Spark job logs are to be uploaded. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
- maxDuration StringIn Minutes 
- (Updatable) The maximum duration in minutes for which an Application should run. Data Flow Run would be terminated once it reaches this duration from the time it transitions to IN_PROGRESSstate.
- metastoreId String
- The OCID of Oracle Cloud Infrastructure Hive Metastore.
- numExecutors Number
- The number of executor VMs requested.
- opcParent StringRpt Url 
- (Optional header param, required for Resource Principal version 3.0+) Parent resource control plane endpoint used to exchange for upper level resource principal token.
- opcRequest StringId 
- Unique Oracle assigned identifier for the request. If you need to contact Oracle about a particular request, please provide the request ID.
- ownerPrincipal StringId 
- The OCID of the user who created the resource.
- ownerUser StringName 
- The username of the user who created the resource. If the username of the owner does not exist, nullwill be returned and the caller should refer to the ownerPrincipalId value instead.
- parameters List<Property Map>
- An array of name/value pairs used to fill placeholders found in properties like Application.arguments. The name must be a string of one or more word characters (a-z, A-Z, 0-9, _). The value can be a string of 0 or more characters of any kind. Example: [ { name: "iterations", value: "10"}, { name: "input_file", value: "mydata.xml" }, { name: "variable_x", value: "${x}"} ]
- poolId String
- The OCID of a pool. Unique Id to indentify a dataflow pool resource.
- privateEndpoint List<String>Dns Zones 
- An array of DNS zone names. Example: [ "app.examplecorp.com", "app.examplecorp2.com" ]
- privateEndpoint StringId 
- The OCID of a private endpoint.
- privateEndpoint NumberMax Host Count 
- The maximum number of hosts to be accessed through the private endpoint. This value is used to calculate the relevant CIDR block and should be a multiple of 256. If the value is not a multiple of 256, it is rounded up to the next multiple of 256. For example, 300 is rounded up to 512.
- privateEndpoint List<String>Nsg Ids 
- An array of network security group OCIDs.
- privateEndpoint StringSubnet Id 
- The OCID of a subnet.
- runDuration StringIn Milliseconds 
- The duration of the run in milliseconds.
- sparkVersion String
- The Spark version utilized to run the application. This value may be set if applicationId is not since the Spark version will be taken from the associated application.
- state String
- The current state of this run.
- timeCreated String
- The date and time the resource was created, expressed in RFC 3339 timestamp format. Example: 2018-04-03T21:10:29.600Z
- timeUpdated String
- The date and time the resource was updated, expressed in RFC 3339 timestamp format. Example: 2018-04-03T21:10:29.600Z
- totalOcpu Number
- The total number of oCPU requested by the run.
- type String
- The Spark application processing type.
- warehouseBucket StringUri 
- An Oracle Cloud Infrastructure URI of the bucket to be used as default warehouse directory for BATCH SQL runs. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat. - ** IMPORTANT ** Any change to a property that does not support update will force the destruction and recreation of the resource with the new property values 
Supporting Types
InvokeRunApplicationLogConfig, InvokeRunApplicationLogConfigArgs          
- LogGroup stringId 
- The log group id for where log objects will be for Data Flow Runs.
- LogId string
- The log id of the log object the Application Logs of Data Flow Run will be shipped to.
- LogGroup stringId 
- The log group id for where log objects will be for Data Flow Runs.
- LogId string
- The log id of the log object the Application Logs of Data Flow Run will be shipped to.
- logGroup StringId 
- The log group id for where log objects will be for Data Flow Runs.
- logId String
- The log id of the log object the Application Logs of Data Flow Run will be shipped to.
- logGroup stringId 
- The log group id for where log objects will be for Data Flow Runs.
- logId string
- The log id of the log object the Application Logs of Data Flow Run will be shipped to.
- log_group_ strid 
- The log group id for where log objects will be for Data Flow Runs.
- log_id str
- The log id of the log object the Application Logs of Data Flow Run will be shipped to.
- logGroup StringId 
- The log group id for where log objects will be for Data Flow Runs.
- logId String
- The log id of the log object the Application Logs of Data Flow Run will be shipped to.
InvokeRunDriverShapeConfig, InvokeRunDriverShapeConfigArgs          
- MemoryIn doubleGbs 
- The amount of memory used for the driver or executors.
- Ocpus double
- The total number of OCPUs used for the driver or executors. See here for details.
- MemoryIn float64Gbs 
- The amount of memory used for the driver or executors.
- Ocpus float64
- The total number of OCPUs used for the driver or executors. See here for details.
- memoryIn DoubleGbs 
- The amount of memory used for the driver or executors.
- ocpus Double
- The total number of OCPUs used for the driver or executors. See here for details.
- memoryIn numberGbs 
- The amount of memory used for the driver or executors.
- ocpus number
- The total number of OCPUs used for the driver or executors. See here for details.
- memory_in_ floatgbs 
- The amount of memory used for the driver or executors.
- ocpus float
- The total number of OCPUs used for the driver or executors. See here for details.
- memoryIn NumberGbs 
- The amount of memory used for the driver or executors.
- ocpus Number
- The total number of OCPUs used for the driver or executors. See here for details.
InvokeRunExecutorShapeConfig, InvokeRunExecutorShapeConfigArgs          
- MemoryIn doubleGbs 
- The amount of memory used for the driver or executors.
- Ocpus double
- The total number of OCPUs used for the driver or executors. See here for details.
- MemoryIn float64Gbs 
- The amount of memory used for the driver or executors.
- Ocpus float64
- The total number of OCPUs used for the driver or executors. See here for details.
- memoryIn DoubleGbs 
- The amount of memory used for the driver or executors.
- ocpus Double
- The total number of OCPUs used for the driver or executors. See here for details.
- memoryIn numberGbs 
- The amount of memory used for the driver or executors.
- ocpus number
- The total number of OCPUs used for the driver or executors. See here for details.
- memory_in_ floatgbs 
- The amount of memory used for the driver or executors.
- ocpus float
- The total number of OCPUs used for the driver or executors. See here for details.
- memoryIn NumberGbs 
- The amount of memory used for the driver or executors.
- ocpus Number
- The total number of OCPUs used for the driver or executors. See here for details.
InvokeRunParameter, InvokeRunParameterArgs      
Import
InvokeRuns can be imported using the id, e.g.
$ pulumi import oci:DataFlow/invokeRun:InvokeRun test_invoke_run "id"
To learn more about importing existing cloud resources, see Importing resources.
Package Details
- Repository
- oci pulumi/pulumi-oci
- License
- Apache-2.0
- Notes
- This Pulumi package is based on the ociTerraform Provider.