oci.DataFlow.getApplications
Explore with Pulumi AI
This data source provides the list of Applications in Oracle Cloud Infrastructure Data Flow service.
Lists all applications in the specified compartment. Only one parameter other than compartmentId may also be included in a query. The query must include compartmentId. If the query does not include compartmentId, or includes compartmentId but two or more other parameters an error is returned.
Example Usage
import * as pulumi from "@pulumi/pulumi";
import * as oci from "@pulumi/oci";
const testApplications = oci.DataFlow.getApplications({
    compartmentId: compartmentId,
    displayName: applicationDisplayName,
    displayNameStartsWith: applicationDisplayNameStartsWith,
    ownerPrincipalId: testOwnerPrincipal.id,
    sparkVersion: applicationSparkVersion,
});
import pulumi
import pulumi_oci as oci
test_applications = oci.DataFlow.get_applications(compartment_id=compartment_id,
    display_name=application_display_name,
    display_name_starts_with=application_display_name_starts_with,
    owner_principal_id=test_owner_principal["id"],
    spark_version=application_spark_version)
package main
import (
	"github.com/pulumi/pulumi-oci/sdk/v2/go/oci/dataflow"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		_, err := dataflow.GetApplications(ctx, &dataflow.GetApplicationsArgs{
			CompartmentId:         compartmentId,
			DisplayName:           pulumi.StringRef(applicationDisplayName),
			DisplayNameStartsWith: pulumi.StringRef(applicationDisplayNameStartsWith),
			OwnerPrincipalId:      pulumi.StringRef(testOwnerPrincipal.Id),
			SparkVersion:          pulumi.StringRef(applicationSparkVersion),
		}, nil)
		if err != nil {
			return err
		}
		return nil
	})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Oci = Pulumi.Oci;
return await Deployment.RunAsync(() => 
{
    var testApplications = Oci.DataFlow.GetApplications.Invoke(new()
    {
        CompartmentId = compartmentId,
        DisplayName = applicationDisplayName,
        DisplayNameStartsWith = applicationDisplayNameStartsWith,
        OwnerPrincipalId = testOwnerPrincipal.Id,
        SparkVersion = applicationSparkVersion,
    });
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.oci.DataFlow.DataFlowFunctions;
import com.pulumi.oci.DataFlow.inputs.GetApplicationsArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }
    public static void stack(Context ctx) {
        final var testApplications = DataFlowFunctions.getApplications(GetApplicationsArgs.builder()
            .compartmentId(compartmentId)
            .displayName(applicationDisplayName)
            .displayNameStartsWith(applicationDisplayNameStartsWith)
            .ownerPrincipalId(testOwnerPrincipal.id())
            .sparkVersion(applicationSparkVersion)
            .build());
    }
}
variables:
  testApplications:
    fn::invoke:
      function: oci:DataFlow:getApplications
      arguments:
        compartmentId: ${compartmentId}
        displayName: ${applicationDisplayName}
        displayNameStartsWith: ${applicationDisplayNameStartsWith}
        ownerPrincipalId: ${testOwnerPrincipal.id}
        sparkVersion: ${applicationSparkVersion}
Using getApplications
Two invocation forms are available. The direct form accepts plain arguments and either blocks until the result value is available, or returns a Promise-wrapped result. The output form accepts Input-wrapped arguments and returns an Output-wrapped result.
function getApplications(args: GetApplicationsArgs, opts?: InvokeOptions): Promise<GetApplicationsResult>
function getApplicationsOutput(args: GetApplicationsOutputArgs, opts?: InvokeOptions): Output<GetApplicationsResult>def get_applications(compartment_id: Optional[str] = None,
                     display_name: Optional[str] = None,
                     display_name_starts_with: Optional[str] = None,
                     filters: Optional[Sequence[GetApplicationsFilter]] = None,
                     owner_principal_id: Optional[str] = None,
                     spark_version: Optional[str] = None,
                     opts: Optional[InvokeOptions] = None) -> GetApplicationsResult
def get_applications_output(compartment_id: Optional[pulumi.Input[str]] = None,
                     display_name: Optional[pulumi.Input[str]] = None,
                     display_name_starts_with: Optional[pulumi.Input[str]] = None,
                     filters: Optional[pulumi.Input[Sequence[pulumi.Input[GetApplicationsFilterArgs]]]] = None,
                     owner_principal_id: Optional[pulumi.Input[str]] = None,
                     spark_version: Optional[pulumi.Input[str]] = None,
                     opts: Optional[InvokeOptions] = None) -> Output[GetApplicationsResult]func GetApplications(ctx *Context, args *GetApplicationsArgs, opts ...InvokeOption) (*GetApplicationsResult, error)
func GetApplicationsOutput(ctx *Context, args *GetApplicationsOutputArgs, opts ...InvokeOption) GetApplicationsResultOutput> Note: This function is named GetApplications in the Go SDK.
public static class GetApplications 
{
    public static Task<GetApplicationsResult> InvokeAsync(GetApplicationsArgs args, InvokeOptions? opts = null)
    public static Output<GetApplicationsResult> Invoke(GetApplicationsInvokeArgs args, InvokeOptions? opts = null)
}public static CompletableFuture<GetApplicationsResult> getApplications(GetApplicationsArgs args, InvokeOptions options)
public static Output<GetApplicationsResult> getApplications(GetApplicationsArgs args, InvokeOptions options)
fn::invoke:
  function: oci:DataFlow/getApplications:getApplications
  arguments:
    # arguments dictionaryThe following arguments are supported:
- CompartmentId string
- The OCID of the compartment.
- DisplayName string
- The query parameter for the Spark application name.
- DisplayName stringStarts With 
- The displayName prefix.
- Filters
List<GetApplications Filter> 
- OwnerPrincipal stringId 
- The OCID of the user who created the resource.
- SparkVersion string
- The Spark version utilized to run the application.
- CompartmentId string
- The OCID of the compartment.
- DisplayName string
- The query parameter for the Spark application name.
- DisplayName stringStarts With 
- The displayName prefix.
- Filters
[]GetApplications Filter 
- OwnerPrincipal stringId 
- The OCID of the user who created the resource.
- SparkVersion string
- The Spark version utilized to run the application.
- compartmentId String
- The OCID of the compartment.
- displayName String
- The query parameter for the Spark application name.
- displayName StringStarts With 
- The displayName prefix.
- filters
List<GetApplications Filter> 
- ownerPrincipal StringId 
- The OCID of the user who created the resource.
- sparkVersion String
- The Spark version utilized to run the application.
- compartmentId string
- The OCID of the compartment.
- displayName string
- The query parameter for the Spark application name.
- displayName stringStarts With 
- The displayName prefix.
- filters
GetApplications Filter[] 
- ownerPrincipal stringId 
- The OCID of the user who created the resource.
- sparkVersion string
- The Spark version utilized to run the application.
- compartment_id str
- The OCID of the compartment.
- display_name str
- The query parameter for the Spark application name.
- display_name_ strstarts_ with 
- The displayName prefix.
- filters
Sequence[GetApplications Filter] 
- owner_principal_ strid 
- The OCID of the user who created the resource.
- spark_version str
- The Spark version utilized to run the application.
- compartmentId String
- The OCID of the compartment.
- displayName String
- The query parameter for the Spark application name.
- displayName StringStarts With 
- The displayName prefix.
- filters List<Property Map>
- ownerPrincipal StringId 
- The OCID of the user who created the resource.
- sparkVersion String
- The Spark version utilized to run the application.
getApplications Result
The following output properties are available:
- Applications
List<GetApplications Application> 
- The list of applications.
- CompartmentId string
- The OCID of a compartment.
- Id string
- The provider-assigned unique ID for this managed resource.
- DisplayName string
- A user-friendly name. This name is not necessarily unique.
- DisplayName stringStarts With 
- Filters
List<GetApplications Filter> 
- OwnerPrincipal stringId 
- The OCID of the user who created the resource.
- SparkVersion string
- The Spark version utilized to run the application.
- Applications
[]GetApplications Application 
- The list of applications.
- CompartmentId string
- The OCID of a compartment.
- Id string
- The provider-assigned unique ID for this managed resource.
- DisplayName string
- A user-friendly name. This name is not necessarily unique.
- DisplayName stringStarts With 
- Filters
[]GetApplications Filter 
- OwnerPrincipal stringId 
- The OCID of the user who created the resource.
- SparkVersion string
- The Spark version utilized to run the application.
- applications
List<GetApplications Application> 
- The list of applications.
- compartmentId String
- The OCID of a compartment.
- id String
- The provider-assigned unique ID for this managed resource.
- displayName String
- A user-friendly name. This name is not necessarily unique.
- displayName StringStarts With 
- filters
List<GetApplications Filter> 
- ownerPrincipal StringId 
- The OCID of the user who created the resource.
- sparkVersion String
- The Spark version utilized to run the application.
- applications
GetApplications Application[] 
- The list of applications.
- compartmentId string
- The OCID of a compartment.
- id string
- The provider-assigned unique ID for this managed resource.
- displayName string
- A user-friendly name. This name is not necessarily unique.
- displayName stringStarts With 
- filters
GetApplications Filter[] 
- ownerPrincipal stringId 
- The OCID of the user who created the resource.
- sparkVersion string
- The Spark version utilized to run the application.
- applications
Sequence[GetApplications Application] 
- The list of applications.
- compartment_id str
- The OCID of a compartment.
- id str
- The provider-assigned unique ID for this managed resource.
- display_name str
- A user-friendly name. This name is not necessarily unique.
- display_name_ strstarts_ with 
- filters
Sequence[GetApplications Filter] 
- owner_principal_ strid 
- The OCID of the user who created the resource.
- spark_version str
- The Spark version utilized to run the application.
- applications List<Property Map>
- The list of applications.
- compartmentId String
- The OCID of a compartment.
- id String
- The provider-assigned unique ID for this managed resource.
- displayName String
- A user-friendly name. This name is not necessarily unique.
- displayName StringStarts With 
- filters List<Property Map>
- ownerPrincipal StringId 
- The OCID of the user who created the resource.
- sparkVersion String
- The Spark version utilized to run the application.
Supporting Types
GetApplicationsApplication  
- ApplicationLog List<GetConfigs Applications Application Application Log Config> 
- Logging details of Application logs for Data Flow Run.
- ArchiveUri string
- A comma separated list of one or more archive files as Oracle Cloud Infrastructure URIs. For example, oci://path/to/a.zip,oci://path/to/b.zip. An Oracle Cloud Infrastructure URI of an archive.zip file containing custom dependencies that may be used to support the execution of a Python, Java, or Scala application. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
- Arguments List<string>
- The arguments passed to the running application as command line arguments. An argument is either a plain text or a placeholder. Placeholders are replaced using values from the parameters map. Each placeholder specified must be represented in the parameters map else the request (POST or PUT) will fail with a HTTP 400 status code. Placeholders are specified as Service Api Spec, wherenameis the name of the parameter. Example:[ "--input", "${input_file}", "--name", "John Doe" ]If "input_file" has a value of "mydata.xml", then the value above will be translated to--input mydata.xml --name "John Doe"
- ClassName string
- The class for the application.
- CompartmentId string
- The OCID of the compartment.
- Configuration Dictionary<string, string>
- The Spark configuration passed to the running process. See https://spark.apache.org/docs/latest/configuration.html#available-properties. Example: { "spark.app.name" : "My App Name", "spark.shuffle.io.maxRetries" : "4" } Note: Not all Spark properties are permitted to be set. Attempting to set a property that is not allowed to be overwritten will cause a 400 status to be returned.
- Dictionary<string, string>
- Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see Resource Tags. Example: {"Operations.CostCenter": "42"}
- Description string
- A user-friendly description.
- DisplayName string
- The query parameter for the Spark application name.
- DriverShape string
- The VM shape for the driver. Sets the driver cores and memory.
- DriverShape List<GetConfigs Applications Application Driver Shape Config> 
- This is used to configure the shape of the driver or executor if a flexible shape is used.
- Execute string
- The input used for spark-submit command. For more details see https://spark.apache.org/docs/latest/submitting-applications.html#launching-applications-with-spark-submit. Supported options include --class,--file,--jars,--conf,--py-files, and main application file with arguments. Example:--jars oci://path/to/a.jar,oci://path/to/b.jar --files oci://path/to/a.json,oci://path/to/b.csv --py-files oci://path/to/a.py,oci://path/to/b.py --conf spark.sql.crossJoin.enabled=true --class org.apache.spark.examples.SparkPi oci://path/to/main.jar 10Note: If execute is specified together with applicationId, className, configuration, fileUri, language, arguments, parameters during application create/update, or run create/submit, Data Flow service will use derived information from execute input only.
- ExecutorShape string
- The VM shape for the executors. Sets the executor cores and memory.
- ExecutorShape List<GetConfigs Applications Application Executor Shape Config> 
- This is used to configure the shape of the driver or executor if a flexible shape is used.
- FileUri string
- An Oracle Cloud Infrastructure URI of the file containing the application to execute. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
- Dictionary<string, string>
- Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see Resource Tags. Example: {"Department": "Finance"}
- Id string
- The application ID.
- IdleTimeout stringIn Minutes 
- The timeout value in minutes used to manage Runs. A Run would be stopped after inactivity for this amount of time period. Note: This parameter is currently only applicable for Runs of type SESSION. Default value is 2880 minutes (2 days)
- Language string
- The Spark language.
- LogsBucket stringUri 
- An Oracle Cloud Infrastructure URI of the bucket where the Spark job logs are to be uploaded. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
- MaxDuration stringIn Minutes 
- The maximum duration in minutes for which an Application should run. Data Flow Run would be terminated once it reaches this duration from the time it transitions to IN_PROGRESSstate.
- MetastoreId string
- The OCID of Oracle Cloud Infrastructure Hive Metastore.
- NumExecutors int
- The number of executor VMs requested.
- OwnerPrincipal stringId 
- The OCID of the user who created the resource.
- OwnerUser stringName 
- The username of the user who created the resource. If the username of the owner does not exist, nullwill be returned and the caller should refer to the ownerPrincipalId value instead.
- Parameters
List<GetApplications Application Parameter> 
- An array of name/value pairs used to fill placeholders found in properties like Application.arguments. The name must be a string of one or more word characters (a-z, A-Z, 0-9, _). The value can be a string of 0 or more characters of any kind. Example: [ { name: "iterations", value: "10"}, { name: "input_file", value: "mydata.xml" }, { name: "variable_x", value: "${x}"} ]
- PoolId string
- The OCID of a pool. Unique Id to indentify a dataflow pool resource.
- PrivateEndpoint stringId 
- The OCID of a private endpoint.
- SparkVersion string
- The Spark version utilized to run the application.
- State string
- The current state of this application.
- TerminateRuns boolOn Deletion 
- TimeCreated string
- The date and time the resource was created, expressed in RFC 3339 timestamp format. Example: 2018-04-03T21:10:29.600Z
- TimeUpdated string
- The date and time the resource was updated, expressed in RFC 3339 timestamp format. Example: 2018-04-03T21:10:29.600Z
- Type string
- The Spark application processing type.
- WarehouseBucket stringUri 
- An Oracle Cloud Infrastructure URI of the bucket to be used as default warehouse directory for BATCH SQL runs. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
- ApplicationLog []GetConfigs Applications Application Application Log Config 
- Logging details of Application logs for Data Flow Run.
- ArchiveUri string
- A comma separated list of one or more archive files as Oracle Cloud Infrastructure URIs. For example, oci://path/to/a.zip,oci://path/to/b.zip. An Oracle Cloud Infrastructure URI of an archive.zip file containing custom dependencies that may be used to support the execution of a Python, Java, or Scala application. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
- Arguments []string
- The arguments passed to the running application as command line arguments. An argument is either a plain text or a placeholder. Placeholders are replaced using values from the parameters map. Each placeholder specified must be represented in the parameters map else the request (POST or PUT) will fail with a HTTP 400 status code. Placeholders are specified as Service Api Spec, wherenameis the name of the parameter. Example:[ "--input", "${input_file}", "--name", "John Doe" ]If "input_file" has a value of "mydata.xml", then the value above will be translated to--input mydata.xml --name "John Doe"
- ClassName string
- The class for the application.
- CompartmentId string
- The OCID of the compartment.
- Configuration map[string]string
- The Spark configuration passed to the running process. See https://spark.apache.org/docs/latest/configuration.html#available-properties. Example: { "spark.app.name" : "My App Name", "spark.shuffle.io.maxRetries" : "4" } Note: Not all Spark properties are permitted to be set. Attempting to set a property that is not allowed to be overwritten will cause a 400 status to be returned.
- map[string]string
- Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see Resource Tags. Example: {"Operations.CostCenter": "42"}
- Description string
- A user-friendly description.
- DisplayName string
- The query parameter for the Spark application name.
- DriverShape string
- The VM shape for the driver. Sets the driver cores and memory.
- DriverShape []GetConfigs Applications Application Driver Shape Config 
- This is used to configure the shape of the driver or executor if a flexible shape is used.
- Execute string
- The input used for spark-submit command. For more details see https://spark.apache.org/docs/latest/submitting-applications.html#launching-applications-with-spark-submit. Supported options include --class,--file,--jars,--conf,--py-files, and main application file with arguments. Example:--jars oci://path/to/a.jar,oci://path/to/b.jar --files oci://path/to/a.json,oci://path/to/b.csv --py-files oci://path/to/a.py,oci://path/to/b.py --conf spark.sql.crossJoin.enabled=true --class org.apache.spark.examples.SparkPi oci://path/to/main.jar 10Note: If execute is specified together with applicationId, className, configuration, fileUri, language, arguments, parameters during application create/update, or run create/submit, Data Flow service will use derived information from execute input only.
- ExecutorShape string
- The VM shape for the executors. Sets the executor cores and memory.
- ExecutorShape []GetConfigs Applications Application Executor Shape Config 
- This is used to configure the shape of the driver or executor if a flexible shape is used.
- FileUri string
- An Oracle Cloud Infrastructure URI of the file containing the application to execute. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
- map[string]string
- Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see Resource Tags. Example: {"Department": "Finance"}
- Id string
- The application ID.
- IdleTimeout stringIn Minutes 
- The timeout value in minutes used to manage Runs. A Run would be stopped after inactivity for this amount of time period. Note: This parameter is currently only applicable for Runs of type SESSION. Default value is 2880 minutes (2 days)
- Language string
- The Spark language.
- LogsBucket stringUri 
- An Oracle Cloud Infrastructure URI of the bucket where the Spark job logs are to be uploaded. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
- MaxDuration stringIn Minutes 
- The maximum duration in minutes for which an Application should run. Data Flow Run would be terminated once it reaches this duration from the time it transitions to IN_PROGRESSstate.
- MetastoreId string
- The OCID of Oracle Cloud Infrastructure Hive Metastore.
- NumExecutors int
- The number of executor VMs requested.
- OwnerPrincipal stringId 
- The OCID of the user who created the resource.
- OwnerUser stringName 
- The username of the user who created the resource. If the username of the owner does not exist, nullwill be returned and the caller should refer to the ownerPrincipalId value instead.
- Parameters
[]GetApplications Application Parameter 
- An array of name/value pairs used to fill placeholders found in properties like Application.arguments. The name must be a string of one or more word characters (a-z, A-Z, 0-9, _). The value can be a string of 0 or more characters of any kind. Example: [ { name: "iterations", value: "10"}, { name: "input_file", value: "mydata.xml" }, { name: "variable_x", value: "${x}"} ]
- PoolId string
- The OCID of a pool. Unique Id to indentify a dataflow pool resource.
- PrivateEndpoint stringId 
- The OCID of a private endpoint.
- SparkVersion string
- The Spark version utilized to run the application.
- State string
- The current state of this application.
- TerminateRuns boolOn Deletion 
- TimeCreated string
- The date and time the resource was created, expressed in RFC 3339 timestamp format. Example: 2018-04-03T21:10:29.600Z
- TimeUpdated string
- The date and time the resource was updated, expressed in RFC 3339 timestamp format. Example: 2018-04-03T21:10:29.600Z
- Type string
- The Spark application processing type.
- WarehouseBucket stringUri 
- An Oracle Cloud Infrastructure URI of the bucket to be used as default warehouse directory for BATCH SQL runs. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
- applicationLog List<GetConfigs Applications Application Application Log Config> 
- Logging details of Application logs for Data Flow Run.
- archiveUri String
- A comma separated list of one or more archive files as Oracle Cloud Infrastructure URIs. For example, oci://path/to/a.zip,oci://path/to/b.zip. An Oracle Cloud Infrastructure URI of an archive.zip file containing custom dependencies that may be used to support the execution of a Python, Java, or Scala application. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
- arguments List<String>
- The arguments passed to the running application as command line arguments. An argument is either a plain text or a placeholder. Placeholders are replaced using values from the parameters map. Each placeholder specified must be represented in the parameters map else the request (POST or PUT) will fail with a HTTP 400 status code. Placeholders are specified as Service Api Spec, wherenameis the name of the parameter. Example:[ "--input", "${input_file}", "--name", "John Doe" ]If "input_file" has a value of "mydata.xml", then the value above will be translated to--input mydata.xml --name "John Doe"
- className String
- The class for the application.
- compartmentId String
- The OCID of the compartment.
- configuration Map<String,String>
- The Spark configuration passed to the running process. See https://spark.apache.org/docs/latest/configuration.html#available-properties. Example: { "spark.app.name" : "My App Name", "spark.shuffle.io.maxRetries" : "4" } Note: Not all Spark properties are permitted to be set. Attempting to set a property that is not allowed to be overwritten will cause a 400 status to be returned.
- Map<String,String>
- Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see Resource Tags. Example: {"Operations.CostCenter": "42"}
- description String
- A user-friendly description.
- displayName String
- The query parameter for the Spark application name.
- driverShape String
- The VM shape for the driver. Sets the driver cores and memory.
- driverShape List<GetConfigs Applications Application Driver Shape Config> 
- This is used to configure the shape of the driver or executor if a flexible shape is used.
- execute String
- The input used for spark-submit command. For more details see https://spark.apache.org/docs/latest/submitting-applications.html#launching-applications-with-spark-submit. Supported options include --class,--file,--jars,--conf,--py-files, and main application file with arguments. Example:--jars oci://path/to/a.jar,oci://path/to/b.jar --files oci://path/to/a.json,oci://path/to/b.csv --py-files oci://path/to/a.py,oci://path/to/b.py --conf spark.sql.crossJoin.enabled=true --class org.apache.spark.examples.SparkPi oci://path/to/main.jar 10Note: If execute is specified together with applicationId, className, configuration, fileUri, language, arguments, parameters during application create/update, or run create/submit, Data Flow service will use derived information from execute input only.
- executorShape String
- The VM shape for the executors. Sets the executor cores and memory.
- executorShape List<GetConfigs Applications Application Executor Shape Config> 
- This is used to configure the shape of the driver or executor if a flexible shape is used.
- fileUri String
- An Oracle Cloud Infrastructure URI of the file containing the application to execute. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
- Map<String,String>
- Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see Resource Tags. Example: {"Department": "Finance"}
- id String
- The application ID.
- idleTimeout StringIn Minutes 
- The timeout value in minutes used to manage Runs. A Run would be stopped after inactivity for this amount of time period. Note: This parameter is currently only applicable for Runs of type SESSION. Default value is 2880 minutes (2 days)
- language String
- The Spark language.
- logsBucket StringUri 
- An Oracle Cloud Infrastructure URI of the bucket where the Spark job logs are to be uploaded. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
- maxDuration StringIn Minutes 
- The maximum duration in minutes for which an Application should run. Data Flow Run would be terminated once it reaches this duration from the time it transitions to IN_PROGRESSstate.
- metastoreId String
- The OCID of Oracle Cloud Infrastructure Hive Metastore.
- numExecutors Integer
- The number of executor VMs requested.
- ownerPrincipal StringId 
- The OCID of the user who created the resource.
- ownerUser StringName 
- The username of the user who created the resource. If the username of the owner does not exist, nullwill be returned and the caller should refer to the ownerPrincipalId value instead.
- parameters
List<GetApplications Application Parameter> 
- An array of name/value pairs used to fill placeholders found in properties like Application.arguments. The name must be a string of one or more word characters (a-z, A-Z, 0-9, _). The value can be a string of 0 or more characters of any kind. Example: [ { name: "iterations", value: "10"}, { name: "input_file", value: "mydata.xml" }, { name: "variable_x", value: "${x}"} ]
- poolId String
- The OCID of a pool. Unique Id to indentify a dataflow pool resource.
- privateEndpoint StringId 
- The OCID of a private endpoint.
- sparkVersion String
- The Spark version utilized to run the application.
- state String
- The current state of this application.
- terminateRuns BooleanOn Deletion 
- timeCreated String
- The date and time the resource was created, expressed in RFC 3339 timestamp format. Example: 2018-04-03T21:10:29.600Z
- timeUpdated String
- The date and time the resource was updated, expressed in RFC 3339 timestamp format. Example: 2018-04-03T21:10:29.600Z
- type String
- The Spark application processing type.
- warehouseBucket StringUri 
- An Oracle Cloud Infrastructure URI of the bucket to be used as default warehouse directory for BATCH SQL runs. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
- applicationLog GetConfigs Applications Application Application Log Config[] 
- Logging details of Application logs for Data Flow Run.
- archiveUri string
- A comma separated list of one or more archive files as Oracle Cloud Infrastructure URIs. For example, oci://path/to/a.zip,oci://path/to/b.zip. An Oracle Cloud Infrastructure URI of an archive.zip file containing custom dependencies that may be used to support the execution of a Python, Java, or Scala application. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
- arguments string[]
- The arguments passed to the running application as command line arguments. An argument is either a plain text or a placeholder. Placeholders are replaced using values from the parameters map. Each placeholder specified must be represented in the parameters map else the request (POST or PUT) will fail with a HTTP 400 status code. Placeholders are specified as Service Api Spec, wherenameis the name of the parameter. Example:[ "--input", "${input_file}", "--name", "John Doe" ]If "input_file" has a value of "mydata.xml", then the value above will be translated to--input mydata.xml --name "John Doe"
- className string
- The class for the application.
- compartmentId string
- The OCID of the compartment.
- configuration {[key: string]: string}
- The Spark configuration passed to the running process. See https://spark.apache.org/docs/latest/configuration.html#available-properties. Example: { "spark.app.name" : "My App Name", "spark.shuffle.io.maxRetries" : "4" } Note: Not all Spark properties are permitted to be set. Attempting to set a property that is not allowed to be overwritten will cause a 400 status to be returned.
- {[key: string]: string}
- Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see Resource Tags. Example: {"Operations.CostCenter": "42"}
- description string
- A user-friendly description.
- displayName string
- The query parameter for the Spark application name.
- driverShape string
- The VM shape for the driver. Sets the driver cores and memory.
- driverShape GetConfigs Applications Application Driver Shape Config[] 
- This is used to configure the shape of the driver or executor if a flexible shape is used.
- execute string
- The input used for spark-submit command. For more details see https://spark.apache.org/docs/latest/submitting-applications.html#launching-applications-with-spark-submit. Supported options include --class,--file,--jars,--conf,--py-files, and main application file with arguments. Example:--jars oci://path/to/a.jar,oci://path/to/b.jar --files oci://path/to/a.json,oci://path/to/b.csv --py-files oci://path/to/a.py,oci://path/to/b.py --conf spark.sql.crossJoin.enabled=true --class org.apache.spark.examples.SparkPi oci://path/to/main.jar 10Note: If execute is specified together with applicationId, className, configuration, fileUri, language, arguments, parameters during application create/update, or run create/submit, Data Flow service will use derived information from execute input only.
- executorShape string
- The VM shape for the executors. Sets the executor cores and memory.
- executorShape GetConfigs Applications Application Executor Shape Config[] 
- This is used to configure the shape of the driver or executor if a flexible shape is used.
- fileUri string
- An Oracle Cloud Infrastructure URI of the file containing the application to execute. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
- {[key: string]: string}
- Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see Resource Tags. Example: {"Department": "Finance"}
- id string
- The application ID.
- idleTimeout stringIn Minutes 
- The timeout value in minutes used to manage Runs. A Run would be stopped after inactivity for this amount of time period. Note: This parameter is currently only applicable for Runs of type SESSION. Default value is 2880 minutes (2 days)
- language string
- The Spark language.
- logsBucket stringUri 
- An Oracle Cloud Infrastructure URI of the bucket where the Spark job logs are to be uploaded. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
- maxDuration stringIn Minutes 
- The maximum duration in minutes for which an Application should run. Data Flow Run would be terminated once it reaches this duration from the time it transitions to IN_PROGRESSstate.
- metastoreId string
- The OCID of Oracle Cloud Infrastructure Hive Metastore.
- numExecutors number
- The number of executor VMs requested.
- ownerPrincipal stringId 
- The OCID of the user who created the resource.
- ownerUser stringName 
- The username of the user who created the resource. If the username of the owner does not exist, nullwill be returned and the caller should refer to the ownerPrincipalId value instead.
- parameters
GetApplications Application Parameter[] 
- An array of name/value pairs used to fill placeholders found in properties like Application.arguments. The name must be a string of one or more word characters (a-z, A-Z, 0-9, _). The value can be a string of 0 or more characters of any kind. Example: [ { name: "iterations", value: "10"}, { name: "input_file", value: "mydata.xml" }, { name: "variable_x", value: "${x}"} ]
- poolId string
- The OCID of a pool. Unique Id to indentify a dataflow pool resource.
- privateEndpoint stringId 
- The OCID of a private endpoint.
- sparkVersion string
- The Spark version utilized to run the application.
- state string
- The current state of this application.
- terminateRuns booleanOn Deletion 
- timeCreated string
- The date and time the resource was created, expressed in RFC 3339 timestamp format. Example: 2018-04-03T21:10:29.600Z
- timeUpdated string
- The date and time the resource was updated, expressed in RFC 3339 timestamp format. Example: 2018-04-03T21:10:29.600Z
- type string
- The Spark application processing type.
- warehouseBucket stringUri 
- An Oracle Cloud Infrastructure URI of the bucket to be used as default warehouse directory for BATCH SQL runs. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
- application_log_ Sequence[Getconfigs Applications Application Application Log Config] 
- Logging details of Application logs for Data Flow Run.
- archive_uri str
- A comma separated list of one or more archive files as Oracle Cloud Infrastructure URIs. For example, oci://path/to/a.zip,oci://path/to/b.zip. An Oracle Cloud Infrastructure URI of an archive.zip file containing custom dependencies that may be used to support the execution of a Python, Java, or Scala application. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
- arguments Sequence[str]
- The arguments passed to the running application as command line arguments. An argument is either a plain text or a placeholder. Placeholders are replaced using values from the parameters map. Each placeholder specified must be represented in the parameters map else the request (POST or PUT) will fail with a HTTP 400 status code. Placeholders are specified as Service Api Spec, wherenameis the name of the parameter. Example:[ "--input", "${input_file}", "--name", "John Doe" ]If "input_file" has a value of "mydata.xml", then the value above will be translated to--input mydata.xml --name "John Doe"
- class_name str
- The class for the application.
- compartment_id str
- The OCID of the compartment.
- configuration Mapping[str, str]
- The Spark configuration passed to the running process. See https://spark.apache.org/docs/latest/configuration.html#available-properties. Example: { "spark.app.name" : "My App Name", "spark.shuffle.io.maxRetries" : "4" } Note: Not all Spark properties are permitted to be set. Attempting to set a property that is not allowed to be overwritten will cause a 400 status to be returned.
- Mapping[str, str]
- Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see Resource Tags. Example: {"Operations.CostCenter": "42"}
- description str
- A user-friendly description.
- display_name str
- The query parameter for the Spark application name.
- driver_shape str
- The VM shape for the driver. Sets the driver cores and memory.
- driver_shape_ Sequence[Getconfigs Applications Application Driver Shape Config] 
- This is used to configure the shape of the driver or executor if a flexible shape is used.
- execute str
- The input used for spark-submit command. For more details see https://spark.apache.org/docs/latest/submitting-applications.html#launching-applications-with-spark-submit. Supported options include --class,--file,--jars,--conf,--py-files, and main application file with arguments. Example:--jars oci://path/to/a.jar,oci://path/to/b.jar --files oci://path/to/a.json,oci://path/to/b.csv --py-files oci://path/to/a.py,oci://path/to/b.py --conf spark.sql.crossJoin.enabled=true --class org.apache.spark.examples.SparkPi oci://path/to/main.jar 10Note: If execute is specified together with applicationId, className, configuration, fileUri, language, arguments, parameters during application create/update, or run create/submit, Data Flow service will use derived information from execute input only.
- executor_shape str
- The VM shape for the executors. Sets the executor cores and memory.
- executor_shape_ Sequence[Getconfigs Applications Application Executor Shape Config] 
- This is used to configure the shape of the driver or executor if a flexible shape is used.
- file_uri str
- An Oracle Cloud Infrastructure URI of the file containing the application to execute. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
- Mapping[str, str]
- Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see Resource Tags. Example: {"Department": "Finance"}
- id str
- The application ID.
- idle_timeout_ strin_ minutes 
- The timeout value in minutes used to manage Runs. A Run would be stopped after inactivity for this amount of time period. Note: This parameter is currently only applicable for Runs of type SESSION. Default value is 2880 minutes (2 days)
- language str
- The Spark language.
- logs_bucket_ struri 
- An Oracle Cloud Infrastructure URI of the bucket where the Spark job logs are to be uploaded. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
- max_duration_ strin_ minutes 
- The maximum duration in minutes for which an Application should run. Data Flow Run would be terminated once it reaches this duration from the time it transitions to IN_PROGRESSstate.
- metastore_id str
- The OCID of Oracle Cloud Infrastructure Hive Metastore.
- num_executors int
- The number of executor VMs requested.
- owner_principal_ strid 
- The OCID of the user who created the resource.
- owner_user_ strname 
- The username of the user who created the resource. If the username of the owner does not exist, nullwill be returned and the caller should refer to the ownerPrincipalId value instead.
- parameters
Sequence[GetApplications Application Parameter] 
- An array of name/value pairs used to fill placeholders found in properties like Application.arguments. The name must be a string of one or more word characters (a-z, A-Z, 0-9, _). The value can be a string of 0 or more characters of any kind. Example: [ { name: "iterations", value: "10"}, { name: "input_file", value: "mydata.xml" }, { name: "variable_x", value: "${x}"} ]
- pool_id str
- The OCID of a pool. Unique Id to indentify a dataflow pool resource.
- private_endpoint_ strid 
- The OCID of a private endpoint.
- spark_version str
- The Spark version utilized to run the application.
- state str
- The current state of this application.
- terminate_runs_ boolon_ deletion 
- time_created str
- The date and time the resource was created, expressed in RFC 3339 timestamp format. Example: 2018-04-03T21:10:29.600Z
- time_updated str
- The date and time the resource was updated, expressed in RFC 3339 timestamp format. Example: 2018-04-03T21:10:29.600Z
- type str
- The Spark application processing type.
- warehouse_bucket_ struri 
- An Oracle Cloud Infrastructure URI of the bucket to be used as default warehouse directory for BATCH SQL runs. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
- applicationLog List<Property Map>Configs 
- Logging details of Application logs for Data Flow Run.
- archiveUri String
- A comma separated list of one or more archive files as Oracle Cloud Infrastructure URIs. For example, oci://path/to/a.zip,oci://path/to/b.zip. An Oracle Cloud Infrastructure URI of an archive.zip file containing custom dependencies that may be used to support the execution of a Python, Java, or Scala application. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
- arguments List<String>
- The arguments passed to the running application as command line arguments. An argument is either a plain text or a placeholder. Placeholders are replaced using values from the parameters map. Each placeholder specified must be represented in the parameters map else the request (POST or PUT) will fail with a HTTP 400 status code. Placeholders are specified as Service Api Spec, wherenameis the name of the parameter. Example:[ "--input", "${input_file}", "--name", "John Doe" ]If "input_file" has a value of "mydata.xml", then the value above will be translated to--input mydata.xml --name "John Doe"
- className String
- The class for the application.
- compartmentId String
- The OCID of the compartment.
- configuration Map<String>
- The Spark configuration passed to the running process. See https://spark.apache.org/docs/latest/configuration.html#available-properties. Example: { "spark.app.name" : "My App Name", "spark.shuffle.io.maxRetries" : "4" } Note: Not all Spark properties are permitted to be set. Attempting to set a property that is not allowed to be overwritten will cause a 400 status to be returned.
- Map<String>
- Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see Resource Tags. Example: {"Operations.CostCenter": "42"}
- description String
- A user-friendly description.
- displayName String
- The query parameter for the Spark application name.
- driverShape String
- The VM shape for the driver. Sets the driver cores and memory.
- driverShape List<Property Map>Configs 
- This is used to configure the shape of the driver or executor if a flexible shape is used.
- execute String
- The input used for spark-submit command. For more details see https://spark.apache.org/docs/latest/submitting-applications.html#launching-applications-with-spark-submit. Supported options include --class,--file,--jars,--conf,--py-files, and main application file with arguments. Example:--jars oci://path/to/a.jar,oci://path/to/b.jar --files oci://path/to/a.json,oci://path/to/b.csv --py-files oci://path/to/a.py,oci://path/to/b.py --conf spark.sql.crossJoin.enabled=true --class org.apache.spark.examples.SparkPi oci://path/to/main.jar 10Note: If execute is specified together with applicationId, className, configuration, fileUri, language, arguments, parameters during application create/update, or run create/submit, Data Flow service will use derived information from execute input only.
- executorShape String
- The VM shape for the executors. Sets the executor cores and memory.
- executorShape List<Property Map>Configs 
- This is used to configure the shape of the driver or executor if a flexible shape is used.
- fileUri String
- An Oracle Cloud Infrastructure URI of the file containing the application to execute. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
- Map<String>
- Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see Resource Tags. Example: {"Department": "Finance"}
- id String
- The application ID.
- idleTimeout StringIn Minutes 
- The timeout value in minutes used to manage Runs. A Run would be stopped after inactivity for this amount of time period. Note: This parameter is currently only applicable for Runs of type SESSION. Default value is 2880 minutes (2 days)
- language String
- The Spark language.
- logsBucket StringUri 
- An Oracle Cloud Infrastructure URI of the bucket where the Spark job logs are to be uploaded. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
- maxDuration StringIn Minutes 
- The maximum duration in minutes for which an Application should run. Data Flow Run would be terminated once it reaches this duration from the time it transitions to IN_PROGRESSstate.
- metastoreId String
- The OCID of Oracle Cloud Infrastructure Hive Metastore.
- numExecutors Number
- The number of executor VMs requested.
- ownerPrincipal StringId 
- The OCID of the user who created the resource.
- ownerUser StringName 
- The username of the user who created the resource. If the username of the owner does not exist, nullwill be returned and the caller should refer to the ownerPrincipalId value instead.
- parameters List<Property Map>
- An array of name/value pairs used to fill placeholders found in properties like Application.arguments. The name must be a string of one or more word characters (a-z, A-Z, 0-9, _). The value can be a string of 0 or more characters of any kind. Example: [ { name: "iterations", value: "10"}, { name: "input_file", value: "mydata.xml" }, { name: "variable_x", value: "${x}"} ]
- poolId String
- The OCID of a pool. Unique Id to indentify a dataflow pool resource.
- privateEndpoint StringId 
- The OCID of a private endpoint.
- sparkVersion String
- The Spark version utilized to run the application.
- state String
- The current state of this application.
- terminateRuns BooleanOn Deletion 
- timeCreated String
- The date and time the resource was created, expressed in RFC 3339 timestamp format. Example: 2018-04-03T21:10:29.600Z
- timeUpdated String
- The date and time the resource was updated, expressed in RFC 3339 timestamp format. Example: 2018-04-03T21:10:29.600Z
- type String
- The Spark application processing type.
- warehouseBucket StringUri 
- An Oracle Cloud Infrastructure URI of the bucket to be used as default warehouse directory for BATCH SQL runs. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
GetApplicationsApplicationApplicationLogConfig     
- LogGroup stringId 
- The log group id for where log objects will be for Data Flow Runs.
- LogId string
- The log id of the log object the Application Logs of Data Flow Run will be shipped to.
- LogGroup stringId 
- The log group id for where log objects will be for Data Flow Runs.
- LogId string
- The log id of the log object the Application Logs of Data Flow Run will be shipped to.
- logGroup StringId 
- The log group id for where log objects will be for Data Flow Runs.
- logId String
- The log id of the log object the Application Logs of Data Flow Run will be shipped to.
- logGroup stringId 
- The log group id for where log objects will be for Data Flow Runs.
- logId string
- The log id of the log object the Application Logs of Data Flow Run will be shipped to.
- log_group_ strid 
- The log group id for where log objects will be for Data Flow Runs.
- log_id str
- The log id of the log object the Application Logs of Data Flow Run will be shipped to.
- logGroup StringId 
- The log group id for where log objects will be for Data Flow Runs.
- logId String
- The log id of the log object the Application Logs of Data Flow Run will be shipped to.
GetApplicationsApplicationDriverShapeConfig     
- MemoryIn doubleGbs 
- The amount of memory used for the driver or executors.
- Ocpus double
- The total number of OCPUs used for the driver or executors. See here for details.
- MemoryIn float64Gbs 
- The amount of memory used for the driver or executors.
- Ocpus float64
- The total number of OCPUs used for the driver or executors. See here for details.
- memoryIn DoubleGbs 
- The amount of memory used for the driver or executors.
- ocpus Double
- The total number of OCPUs used for the driver or executors. See here for details.
- memoryIn numberGbs 
- The amount of memory used for the driver or executors.
- ocpus number
- The total number of OCPUs used for the driver or executors. See here for details.
- memory_in_ floatgbs 
- The amount of memory used for the driver or executors.
- ocpus float
- The total number of OCPUs used for the driver or executors. See here for details.
- memoryIn NumberGbs 
- The amount of memory used for the driver or executors.
- ocpus Number
- The total number of OCPUs used for the driver or executors. See here for details.
GetApplicationsApplicationExecutorShapeConfig     
- MemoryIn doubleGbs 
- The amount of memory used for the driver or executors.
- Ocpus double
- The total number of OCPUs used for the driver or executors. See here for details.
- MemoryIn float64Gbs 
- The amount of memory used for the driver or executors.
- Ocpus float64
- The total number of OCPUs used for the driver or executors. See here for details.
- memoryIn DoubleGbs 
- The amount of memory used for the driver or executors.
- ocpus Double
- The total number of OCPUs used for the driver or executors. See here for details.
- memoryIn numberGbs 
- The amount of memory used for the driver or executors.
- ocpus number
- The total number of OCPUs used for the driver or executors. See here for details.
- memory_in_ floatgbs 
- The amount of memory used for the driver or executors.
- ocpus float
- The total number of OCPUs used for the driver or executors. See here for details.
- memoryIn NumberGbs 
- The amount of memory used for the driver or executors.
- ocpus Number
- The total number of OCPUs used for the driver or executors. See here for details.
GetApplicationsApplicationParameter   
GetApplicationsFilter  
Package Details
- Repository
- oci pulumi/pulumi-oci
- License
- Apache-2.0
- Notes
- This Pulumi package is based on the ociTerraform Provider.