dataproc.v1beta1.job
"Job is the Schema for the Jobs API. Manages a job resource within a Dataproc cluster."
Index
fn new(name)
obj metadata
fn withAnnotations(annotations)
fn withAnnotationsMixin(annotations)
fn withClusterName(clusterName)
fn withCreationTimestamp(creationTimestamp)
fn withDeletionGracePeriodSeconds(deletionGracePeriodSeconds)
fn withDeletionTimestamp(deletionTimestamp)
fn withFinalizers(finalizers)
fn withFinalizersMixin(finalizers)
fn withGenerateName(generateName)
fn withGeneration(generation)
fn withLabels(labels)
fn withLabelsMixin(labels)
fn withName(name)
fn withNamespace(namespace)
fn withOwnerReferences(ownerReferences)
fn withOwnerReferencesMixin(ownerReferences)
fn withResourceVersion(resourceVersion)
fn withSelfLink(selfLink)
fn withUid(uid)
obj spec
fn withDeletionPolicy(deletionPolicy)
fn withManagementPolicies(managementPolicies)
fn withManagementPoliciesMixin(managementPolicies)
obj spec.forProvider
fn withForceDelete(forceDelete)
fn withHadoopConfig(hadoopConfig)
fn withHadoopConfigMixin(hadoopConfig)
fn withHiveConfig(hiveConfig)
fn withHiveConfigMixin(hiveConfig)
fn withLabels(labels)
fn withLabelsMixin(labels)
fn withPigConfig(pigConfig)
fn withPigConfigMixin(pigConfig)
fn withPlacement(placement)
fn withPlacementMixin(placement)
fn withPrestoConfig(prestoConfig)
fn withPrestoConfigMixin(prestoConfig)
fn withProject(project)
fn withPysparkConfig(pysparkConfig)
fn withPysparkConfigMixin(pysparkConfig)
fn withReference(reference)
fn withReferenceMixin(reference)
fn withRegion(region)
fn withScheduling(scheduling)
fn withSchedulingMixin(scheduling)
fn withSparkConfig(sparkConfig)
fn withSparkConfigMixin(sparkConfig)
fn withSparksqlConfig(sparksqlConfig)
fn withSparksqlConfigMixin(sparksqlConfig)
obj spec.forProvider.hadoopConfig
fn withArchiveUris(archiveUris)
fn withArchiveUrisMixin(archiveUris)
fn withArgs(args)
fn withArgsMixin(args)
fn withFileUris(fileUris)
fn withFileUrisMixin(fileUris)
fn withJarFileUris(jarFileUris)
fn withJarFileUrisMixin(jarFileUris)
fn withLoggingConfig(loggingConfig)
fn withLoggingConfigMixin(loggingConfig)
fn withMainClass(mainClass)
fn withMainJarFileUri(mainJarFileUri)
fn withProperties(properties)
fn withPropertiesMixin(properties)
obj spec.forProvider.hadoopConfig.loggingConfig
obj spec.forProvider.hiveConfig
fn withContinueOnFailure(continueOnFailure)
fn withJarFileUris(jarFileUris)
fn withJarFileUrisMixin(jarFileUris)
fn withProperties(properties)
fn withPropertiesMixin(properties)
fn withQueryFileUri(queryFileUri)
fn withQueryList(queryList)
fn withQueryListMixin(queryList)
fn withScriptVariables(scriptVariables)
fn withScriptVariablesMixin(scriptVariables)
obj spec.forProvider.pigConfig
fn withContinueOnFailure(continueOnFailure)
fn withJarFileUris(jarFileUris)
fn withJarFileUrisMixin(jarFileUris)
fn withLoggingConfig(loggingConfig)
fn withLoggingConfigMixin(loggingConfig)
fn withProperties(properties)
fn withPropertiesMixin(properties)
fn withQueryFileUri(queryFileUri)
fn withQueryList(queryList)
fn withQueryListMixin(queryList)
fn withScriptVariables(scriptVariables)
fn withScriptVariablesMixin(scriptVariables)
obj spec.forProvider.pigConfig.loggingConfig
obj spec.forProvider.placement
obj spec.forProvider.prestoConfig
fn withClientTags(clientTags)
fn withClientTagsMixin(clientTags)
fn withContinueOnFailure(continueOnFailure)
fn withLoggingConfig(loggingConfig)
fn withLoggingConfigMixin(loggingConfig)
fn withOutputFormat(outputFormat)
fn withProperties(properties)
fn withPropertiesMixin(properties)
fn withQueryFileUri(queryFileUri)
fn withQueryList(queryList)
fn withQueryListMixin(queryList)
obj spec.forProvider.prestoConfig.loggingConfig
obj spec.forProvider.pysparkConfig
fn withArchiveUris(archiveUris)
fn withArchiveUrisMixin(archiveUris)
fn withArgs(args)
fn withArgsMixin(args)
fn withFileUris(fileUris)
fn withFileUrisMixin(fileUris)
fn withJarFileUris(jarFileUris)
fn withJarFileUrisMixin(jarFileUris)
fn withLoggingConfig(loggingConfig)
fn withLoggingConfigMixin(loggingConfig)
fn withMainPythonFileUri(mainPythonFileUri)
fn withProperties(properties)
fn withPropertiesMixin(properties)
fn withPythonFileUris(pythonFileUris)
fn withPythonFileUrisMixin(pythonFileUris)
obj spec.forProvider.pysparkConfig.loggingConfig
obj spec.forProvider.reference
obj spec.forProvider.regionRef
obj spec.forProvider.regionSelector
obj spec.forProvider.scheduling
obj spec.forProvider.sparkConfig
fn withArchiveUris(archiveUris)
fn withArchiveUrisMixin(archiveUris)
fn withArgs(args)
fn withArgsMixin(args)
fn withFileUris(fileUris)
fn withFileUrisMixin(fileUris)
fn withJarFileUris(jarFileUris)
fn withJarFileUrisMixin(jarFileUris)
fn withLoggingConfig(loggingConfig)
fn withLoggingConfigMixin(loggingConfig)
fn withMainClass(mainClass)
fn withMainJarFileUri(mainJarFileUri)
fn withProperties(properties)
fn withPropertiesMixin(properties)
obj spec.forProvider.sparkConfig.loggingConfig
obj spec.forProvider.sparksqlConfig
fn withJarFileUris(jarFileUris)
fn withJarFileUrisMixin(jarFileUris)
fn withLoggingConfig(loggingConfig)
fn withLoggingConfigMixin(loggingConfig)
fn withProperties(properties)
fn withPropertiesMixin(properties)
fn withQueryFileUri(queryFileUri)
fn withQueryList(queryList)
fn withQueryListMixin(queryList)
fn withScriptVariables(scriptVariables)
fn withScriptVariablesMixin(scriptVariables)
obj spec.forProvider.sparksqlConfig.loggingConfig
obj spec.initProvider
fn withForceDelete(forceDelete)
fn withHadoopConfig(hadoopConfig)
fn withHadoopConfigMixin(hadoopConfig)
fn withHiveConfig(hiveConfig)
fn withHiveConfigMixin(hiveConfig)
fn withLabels(labels)
fn withLabelsMixin(labels)
fn withPigConfig(pigConfig)
fn withPigConfigMixin(pigConfig)
fn withPlacement(placement)
fn withPlacementMixin(placement)
fn withPrestoConfig(prestoConfig)
fn withPrestoConfigMixin(prestoConfig)
fn withProject(project)
fn withPysparkConfig(pysparkConfig)
fn withPysparkConfigMixin(pysparkConfig)
fn withReference(reference)
fn withReferenceMixin(reference)
fn withScheduling(scheduling)
fn withSchedulingMixin(scheduling)
fn withSparkConfig(sparkConfig)
fn withSparkConfigMixin(sparkConfig)
fn withSparksqlConfig(sparksqlConfig)
fn withSparksqlConfigMixin(sparksqlConfig)
obj spec.initProvider.hadoopConfig
fn withArchiveUris(archiveUris)
fn withArchiveUrisMixin(archiveUris)
fn withArgs(args)
fn withArgsMixin(args)
fn withFileUris(fileUris)
fn withFileUrisMixin(fileUris)
fn withJarFileUris(jarFileUris)
fn withJarFileUrisMixin(jarFileUris)
fn withLoggingConfig(loggingConfig)
fn withLoggingConfigMixin(loggingConfig)
fn withMainClass(mainClass)
fn withMainJarFileUri(mainJarFileUri)
fn withProperties(properties)
fn withPropertiesMixin(properties)
obj spec.initProvider.hadoopConfig.loggingConfig
obj spec.initProvider.hiveConfig
fn withContinueOnFailure(continueOnFailure)
fn withJarFileUris(jarFileUris)
fn withJarFileUrisMixin(jarFileUris)
fn withProperties(properties)
fn withPropertiesMixin(properties)
fn withQueryFileUri(queryFileUri)
fn withQueryList(queryList)
fn withQueryListMixin(queryList)
fn withScriptVariables(scriptVariables)
fn withScriptVariablesMixin(scriptVariables)
obj spec.initProvider.pigConfig
fn withContinueOnFailure(continueOnFailure)
fn withJarFileUris(jarFileUris)
fn withJarFileUrisMixin(jarFileUris)
fn withLoggingConfig(loggingConfig)
fn withLoggingConfigMixin(loggingConfig)
fn withProperties(properties)
fn withPropertiesMixin(properties)
fn withQueryFileUri(queryFileUri)
fn withQueryList(queryList)
fn withQueryListMixin(queryList)
fn withScriptVariables(scriptVariables)
fn withScriptVariablesMixin(scriptVariables)
obj spec.initProvider.pigConfig.loggingConfig
obj spec.initProvider.prestoConfig
fn withClientTags(clientTags)
fn withClientTagsMixin(clientTags)
fn withContinueOnFailure(continueOnFailure)
fn withLoggingConfig(loggingConfig)
fn withLoggingConfigMixin(loggingConfig)
fn withOutputFormat(outputFormat)
fn withProperties(properties)
fn withPropertiesMixin(properties)
fn withQueryFileUri(queryFileUri)
fn withQueryList(queryList)
fn withQueryListMixin(queryList)
obj spec.initProvider.prestoConfig.loggingConfig
obj spec.initProvider.pysparkConfig
fn withArchiveUris(archiveUris)
fn withArchiveUrisMixin(archiveUris)
fn withArgs(args)
fn withArgsMixin(args)
fn withFileUris(fileUris)
fn withFileUrisMixin(fileUris)
fn withJarFileUris(jarFileUris)
fn withJarFileUrisMixin(jarFileUris)
fn withLoggingConfig(loggingConfig)
fn withLoggingConfigMixin(loggingConfig)
fn withMainPythonFileUri(mainPythonFileUri)
fn withProperties(properties)
fn withPropertiesMixin(properties)
fn withPythonFileUris(pythonFileUris)
fn withPythonFileUrisMixin(pythonFileUris)
obj spec.initProvider.pysparkConfig.loggingConfig
obj spec.initProvider.reference
obj spec.initProvider.scheduling
obj spec.initProvider.sparkConfig
fn withArchiveUris(archiveUris)
fn withArchiveUrisMixin(archiveUris)
fn withArgs(args)
fn withArgsMixin(args)
fn withFileUris(fileUris)
fn withFileUrisMixin(fileUris)
fn withJarFileUris(jarFileUris)
fn withJarFileUrisMixin(jarFileUris)
fn withLoggingConfig(loggingConfig)
fn withLoggingConfigMixin(loggingConfig)
fn withMainClass(mainClass)
fn withMainJarFileUri(mainJarFileUri)
fn withProperties(properties)
fn withPropertiesMixin(properties)
obj spec.initProvider.sparkConfig.loggingConfig
obj spec.initProvider.sparksqlConfig
fn withJarFileUris(jarFileUris)
fn withJarFileUrisMixin(jarFileUris)
fn withLoggingConfig(loggingConfig)
fn withLoggingConfigMixin(loggingConfig)
fn withProperties(properties)
fn withPropertiesMixin(properties)
fn withQueryFileUri(queryFileUri)
fn withQueryList(queryList)
fn withQueryListMixin(queryList)
fn withScriptVariables(scriptVariables)
fn withScriptVariablesMixin(scriptVariables)
obj spec.initProvider.sparksqlConfig.loggingConfig
obj spec.providerConfigRef
obj spec.providerRef
obj spec.publishConnectionDetailsTo
obj spec.writeConnectionSecretToRef
Fields
fn new
new(name)
new returns an instance of Job
obj metadata
"ObjectMeta is metadata that all persisted resources must have, which includes all objects users must create."
fn metadata.withAnnotations
withAnnotations(annotations)
"Annotations is an unstructured key value map stored with a resource that may be set by external tools to store and retrieve arbitrary metadata. They are not queryable and should be preserved when modifying objects. More info: http://kubernetes.io/docs/user-guide/annotations"
fn metadata.withAnnotationsMixin
withAnnotationsMixin(annotations)
"Annotations is an unstructured key value map stored with a resource that may be set by external tools to store and retrieve arbitrary metadata. They are not queryable and should be preserved when modifying objects. More info: http://kubernetes.io/docs/user-guide/annotations"
Note: This function appends passed data to existing values
fn metadata.withClusterName
withClusterName(clusterName)
"The name of the cluster which the object belongs to. This is used to distinguish resources with same name and namespace in different clusters. This field is not set anywhere right now and apiserver is going to ignore it if set in create or update request."
fn metadata.withCreationTimestamp
withCreationTimestamp(creationTimestamp)
"Time is a wrapper around time.Time which supports correct marshaling to YAML and JSON. Wrappers are provided for many of the factory methods that the time package offers."
fn metadata.withDeletionGracePeriodSeconds
withDeletionGracePeriodSeconds(deletionGracePeriodSeconds)
"Number of seconds allowed for this object to gracefully terminate before it will be removed from the system. Only set when deletionTimestamp is also set. May only be shortened. Read-only."
fn metadata.withDeletionTimestamp
withDeletionTimestamp(deletionTimestamp)
"Time is a wrapper around time.Time which supports correct marshaling to YAML and JSON. Wrappers are provided for many of the factory methods that the time package offers."
fn metadata.withFinalizers
withFinalizers(finalizers)
"Must be empty before the object is deleted from the registry. Each entry is an identifier for the responsible component that will remove the entry from the list. If the deletionTimestamp of the object is non-nil, entries in this list can only be removed. Finalizers may be processed and removed in any order. Order is NOT enforced because it introduces significant risk of stuck finalizers. finalizers is a shared field, any actor with permission can reorder it. If the finalizer list is processed in order, then this can lead to a situation in which the component responsible for the first finalizer in the list is waiting for a signal (field value, external system, or other) produced by a component responsible for a finalizer later in the list, resulting in a deadlock. Without enforced ordering finalizers are free to order amongst themselves and are not vulnerable to ordering changes in the list."
fn metadata.withFinalizersMixin
withFinalizersMixin(finalizers)
"Must be empty before the object is deleted from the registry. Each entry is an identifier for the responsible component that will remove the entry from the list. If the deletionTimestamp of the object is non-nil, entries in this list can only be removed. Finalizers may be processed and removed in any order. Order is NOT enforced because it introduces significant risk of stuck finalizers. finalizers is a shared field, any actor with permission can reorder it. If the finalizer list is processed in order, then this can lead to a situation in which the component responsible for the first finalizer in the list is waiting for a signal (field value, external system, or other) produced by a component responsible for a finalizer later in the list, resulting in a deadlock. Without enforced ordering finalizers are free to order amongst themselves and are not vulnerable to ordering changes in the list."
Note: This function appends passed data to existing values
fn metadata.withGenerateName
withGenerateName(generateName)
"GenerateName is an optional prefix, used by the server, to generate a unique name ONLY IF the Name field has not been provided. If this field is used, the name returned to the client will be different than the name passed. This value will also be combined with a unique suffix. The provided value has the same validation rules as the Name field, and may be truncated by the length of the suffix required to make the value unique on the server.\n\nIf this field is specified and the generated name exists, the server will NOT return a 409 - instead, it will either return 201 Created or 500 with Reason ServerTimeout indicating a unique name could not be found in the time allotted, and the client should retry (optionally after the time indicated in the Retry-After header).\n\nApplied only if Name is not specified. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#idempotency"
fn metadata.withGeneration
withGeneration(generation)
"A sequence number representing a specific generation of the desired state. Populated by the system. Read-only."
fn metadata.withLabels
withLabels(labels)
"Map of string keys and values that can be used to organize and categorize (scope and select) objects. May match selectors of replication controllers and services. More info: http://kubernetes.io/docs/user-guide/labels"
fn metadata.withLabelsMixin
withLabelsMixin(labels)
"Map of string keys and values that can be used to organize and categorize (scope and select) objects. May match selectors of replication controllers and services. More info: http://kubernetes.io/docs/user-guide/labels"
Note: This function appends passed data to existing values
fn metadata.withName
withName(name)
"Name must be unique within a namespace. Is required when creating resources, although some resources may allow a client to request the generation of an appropriate name automatically. Name is primarily intended for creation idempotence and configuration definition. Cannot be updated. More info: http://kubernetes.io/docs/user-guide/identifiers#names"
fn metadata.withNamespace
withNamespace(namespace)
"Namespace defines the space within which each name must be unique. An empty namespace is equivalent to the \"default\" namespace, but \"default\" is the canonical representation. Not all objects are required to be scoped to a namespace - the value of this field for those objects will be empty.\n\nMust be a DNS_LABEL. Cannot be updated. More info: http://kubernetes.io/docs/user-guide/namespaces"
fn metadata.withOwnerReferences
withOwnerReferences(ownerReferences)
"List of objects depended by this object. If ALL objects in the list have been deleted, this object will be garbage collected. If this object is managed by a controller, then an entry in this list will point to this controller, with the controller field set to true. There cannot be more than one managing controller."
fn metadata.withOwnerReferencesMixin
withOwnerReferencesMixin(ownerReferences)
"List of objects depended by this object. If ALL objects in the list have been deleted, this object will be garbage collected. If this object is managed by a controller, then an entry in this list will point to this controller, with the controller field set to true. There cannot be more than one managing controller."
Note: This function appends passed data to existing values
fn metadata.withResourceVersion
withResourceVersion(resourceVersion)
"An opaque value that represents the internal version of this object that can be used by clients to determine when objects have changed. May be used for optimistic concurrency, change detection, and the watch operation on a resource or set of resources. Clients must treat these values as opaque and passed unmodified back to the server. They may only be valid for a particular resource or set of resources.\n\nPopulated by the system. Read-only. Value must be treated as opaque by clients and . More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#concurrency-control-and-consistency"
fn metadata.withSelfLink
withSelfLink(selfLink)
"SelfLink is a URL representing this object. Populated by the system. Read-only.\n\nDEPRECATED Kubernetes will stop propagating this field in 1.20 release and the field is planned to be removed in 1.21 release."
fn metadata.withUid
withUid(uid)
"UID is the unique in time and space value for this object. It is typically generated by the server on successful creation of a resource and is not allowed to change on PUT operations.\n\nPopulated by the system. Read-only. More info: http://kubernetes.io/docs/user-guide/identifiers#uids"
obj spec
"JobSpec defines the desired state of Job"
fn spec.withDeletionPolicy
withDeletionPolicy(deletionPolicy)
"DeletionPolicy specifies what will happen to the underlying external when this managed resource is deleted - either \"Delete\" or \"Orphan\" the external resource. This field is planned to be deprecated in favor of the ManagementPolicies field in a future release. Currently, both could be set independently and non-default values would be honored if the feature flag is enabled. See the design doc for more information: https://github.com/crossplane/crossplane/blob/499895a25d1a1a0ba1604944ef98ac7a1a71f197/design/design-doc-observe-only-resources.md?plain=1#L223"
fn spec.withManagementPolicies
withManagementPolicies(managementPolicies)
"THIS IS AN ALPHA FIELD. Do not use it in production. It is not honored unless the relevant Crossplane feature flag is enabled, and may be changed or removed without notice. ManagementPolicies specify the array of actions Crossplane is allowed to take on the managed and external resources. This field is planned to replace the DeletionPolicy field in a future release. Currently, both could be set independently and non-default values would be honored if the feature flag is enabled. If both are custom, the DeletionPolicy field will be ignored. See the design doc for more information: https://github.com/crossplane/crossplane/blob/499895a25d1a1a0ba1604944ef98ac7a1a71f197/design/design-doc-observe-only-resources.md?plain=1#L223 and this one: https://github.com/crossplane/crossplane/blob/444267e84783136daa93568b364a5f01228cacbe/design/one-pager-ignore-changes.md"
fn spec.withManagementPoliciesMixin
withManagementPoliciesMixin(managementPolicies)
"THIS IS AN ALPHA FIELD. Do not use it in production. It is not honored unless the relevant Crossplane feature flag is enabled, and may be changed or removed without notice. ManagementPolicies specify the array of actions Crossplane is allowed to take on the managed and external resources. This field is planned to replace the DeletionPolicy field in a future release. Currently, both could be set independently and non-default values would be honored if the feature flag is enabled. If both are custom, the DeletionPolicy field will be ignored. See the design doc for more information: https://github.com/crossplane/crossplane/blob/499895a25d1a1a0ba1604944ef98ac7a1a71f197/design/design-doc-observe-only-resources.md?plain=1#L223 and this one: https://github.com/crossplane/crossplane/blob/444267e84783136daa93568b364a5f01228cacbe/design/one-pager-ignore-changes.md"
Note: This function appends passed data to existing values
obj spec.forProvider
fn spec.forProvider.withForceDelete
withForceDelete(forceDelete)
"By default, you can only delete inactive jobs within Dataproc. Setting this to true, and calling destroy, will ensure that the job is first cancelled before issuing the delete."
fn spec.forProvider.withHadoopConfig
withHadoopConfig(hadoopConfig)
fn spec.forProvider.withHadoopConfigMixin
withHadoopConfigMixin(hadoopConfig)
Note: This function appends passed data to existing values
fn spec.forProvider.withHiveConfig
withHiveConfig(hiveConfig)
fn spec.forProvider.withHiveConfigMixin
withHiveConfigMixin(hiveConfig)
Note: This function appends passed data to existing values
fn spec.forProvider.withLabels
withLabels(labels)
"The list of labels (key/value pairs) to add to the job."
fn spec.forProvider.withLabelsMixin
withLabelsMixin(labels)
"The list of labels (key/value pairs) to add to the job."
Note: This function appends passed data to existing values
fn spec.forProvider.withPigConfig
withPigConfig(pigConfig)
fn spec.forProvider.withPigConfigMixin
withPigConfigMixin(pigConfig)
Note: This function appends passed data to existing values
fn spec.forProvider.withPlacement
withPlacement(placement)
fn spec.forProvider.withPlacementMixin
withPlacementMixin(placement)
Note: This function appends passed data to existing values
fn spec.forProvider.withPrestoConfig
withPrestoConfig(prestoConfig)
fn spec.forProvider.withPrestoConfigMixin
withPrestoConfigMixin(prestoConfig)
Note: This function appends passed data to existing values
fn spec.forProvider.withProject
withProject(project)
"The project in which the cluster can be found and jobs subsequently run against. If it is not provided, the provider project is used."
fn spec.forProvider.withPysparkConfig
withPysparkConfig(pysparkConfig)
fn spec.forProvider.withPysparkConfigMixin
withPysparkConfigMixin(pysparkConfig)
Note: This function appends passed data to existing values
fn spec.forProvider.withReference
withReference(reference)
fn spec.forProvider.withReferenceMixin
withReferenceMixin(reference)
Note: This function appends passed data to existing values
fn spec.forProvider.withRegion
withRegion(region)
"The Cloud Dataproc region. This essentially determines which clusters are available for this job to be submitted to. If not specified, defaults to global."
fn spec.forProvider.withScheduling
withScheduling(scheduling)
fn spec.forProvider.withSchedulingMixin
withSchedulingMixin(scheduling)
Note: This function appends passed data to existing values
fn spec.forProvider.withSparkConfig
withSparkConfig(sparkConfig)
fn spec.forProvider.withSparkConfigMixin
withSparkConfigMixin(sparkConfig)
Note: This function appends passed data to existing values
fn spec.forProvider.withSparksqlConfig
withSparksqlConfig(sparksqlConfig)
fn spec.forProvider.withSparksqlConfigMixin
withSparksqlConfigMixin(sparksqlConfig)
Note: This function appends passed data to existing values
obj spec.forProvider.hadoopConfig
fn spec.forProvider.hadoopConfig.withArchiveUris
withArchiveUris(archiveUris)
"HCFS URIs of archives to be extracted in the working directory of .jar, .tar, .tar.gz, .tgz, and .zip."
fn spec.forProvider.hadoopConfig.withArchiveUrisMixin
withArchiveUrisMixin(archiveUris)
"HCFS URIs of archives to be extracted in the working directory of .jar, .tar, .tar.gz, .tgz, and .zip."
Note: This function appends passed data to existing values
fn spec.forProvider.hadoopConfig.withArgs
withArgs(args)
"The arguments to pass to the driver. Do not include arguments, such as -libjars or -Dfoo=bar, that can be set as job properties, since a collision may occur that causes an incorrect job submission."
fn spec.forProvider.hadoopConfig.withArgsMixin
withArgsMixin(args)
"The arguments to pass to the driver. Do not include arguments, such as -libjars or -Dfoo=bar, that can be set as job properties, since a collision may occur that causes an incorrect job submission."
Note: This function appends passed data to existing values
fn spec.forProvider.hadoopConfig.withFileUris
withFileUris(fileUris)
"HCFS URIs of files to be copied to the working directory of Hadoop drivers and distributed tasks. Useful for naively parallel tasks."
fn spec.forProvider.hadoopConfig.withFileUrisMixin
withFileUrisMixin(fileUris)
"HCFS URIs of files to be copied to the working directory of Hadoop drivers and distributed tasks. Useful for naively parallel tasks."
Note: This function appends passed data to existing values
fn spec.forProvider.hadoopConfig.withJarFileUris
withJarFileUris(jarFileUris)
"HCFS URIs of jar files to add to the CLASSPATHs of the Spark driver and tasks."
fn spec.forProvider.hadoopConfig.withJarFileUrisMixin
withJarFileUrisMixin(jarFileUris)
"HCFS URIs of jar files to add to the CLASSPATHs of the Spark driver and tasks."
Note: This function appends passed data to existing values
fn spec.forProvider.hadoopConfig.withLoggingConfig
withLoggingConfig(loggingConfig)
fn spec.forProvider.hadoopConfig.withLoggingConfigMixin
withLoggingConfigMixin(loggingConfig)
Note: This function appends passed data to existing values
fn spec.forProvider.hadoopConfig.withMainClass
withMainClass(mainClass)
"The name of the driver's main class. The jar file containing the class must be in the default CLASSPATH or specified in jar_file_uris. Conflicts with main_jar_file_uri"
fn spec.forProvider.hadoopConfig.withMainJarFileUri
withMainJarFileUri(mainJarFileUri)
"The HCFS URI of the jar file containing the main class. Examples: 'gs://foo-bucket/analytics-binaries/extract-useful-metrics-mr.jar' 'hdfs:/tmp/test-samples/custom-wordcount.jar' 'file:///home/usr/lib/hadoop-mapreduce/hadoop-mapreduce-examples.jar'. Conflicts with main_class"
fn spec.forProvider.hadoopConfig.withProperties
withProperties(properties)
"A mapping of property names to values, used to configure Hadoop. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/hadoop/conf/*-site and classes in user code.."
fn spec.forProvider.hadoopConfig.withPropertiesMixin
withPropertiesMixin(properties)
"A mapping of property names to values, used to configure Hadoop. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/hadoop/conf/*-site and classes in user code.."
Note: This function appends passed data to existing values
obj spec.forProvider.hadoopConfig.loggingConfig
fn spec.forProvider.hadoopConfig.loggingConfig.withDriverLogLevels
withDriverLogLevels(driverLogLevels)
"The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'"
fn spec.forProvider.hadoopConfig.loggingConfig.withDriverLogLevelsMixin
withDriverLogLevelsMixin(driverLogLevels)
"The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'"
Note: This function appends passed data to existing values
obj spec.forProvider.hiveConfig
fn spec.forProvider.hiveConfig.withContinueOnFailure
withContinueOnFailure(continueOnFailure)
"Whether to continue executing queries if a query fails. The default value is false. Setting to true can be useful when executing independent parallel queries. Defaults to false."
fn spec.forProvider.hiveConfig.withJarFileUris
withJarFileUris(jarFileUris)
"HCFS URIs of jar files to add to the CLASSPATH of the Hive server and Hadoop MapReduce (MR) tasks. Can contain Hive SerDes and UDFs."
fn spec.forProvider.hiveConfig.withJarFileUrisMixin
withJarFileUrisMixin(jarFileUris)
"HCFS URIs of jar files to add to the CLASSPATH of the Hive server and Hadoop MapReduce (MR) tasks. Can contain Hive SerDes and UDFs."
Note: This function appends passed data to existing values
fn spec.forProvider.hiveConfig.withProperties
withProperties(properties)
"A mapping of property names and values, used to configure Hive. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/hadoop/conf/*-site.xml, /etc/hive/conf/hive-site.xml, and classes in user code.."
fn spec.forProvider.hiveConfig.withPropertiesMixin
withPropertiesMixin(properties)
"A mapping of property names and values, used to configure Hive. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/hadoop/conf/*-site.xml, /etc/hive/conf/hive-site.xml, and classes in user code.."
Note: This function appends passed data to existing values
fn spec.forProvider.hiveConfig.withQueryFileUri
withQueryFileUri(queryFileUri)
"HCFS URI of file containing Hive script to execute as the job. Conflicts with query_list"
fn spec.forProvider.hiveConfig.withQueryList
withQueryList(queryList)
"The list of Hive queries or statements to execute as part of the job. Conflicts with query_file_uri"
fn spec.forProvider.hiveConfig.withQueryListMixin
withQueryListMixin(queryList)
"The list of Hive queries or statements to execute as part of the job. Conflicts with query_file_uri"
Note: This function appends passed data to existing values
fn spec.forProvider.hiveConfig.withScriptVariables
withScriptVariables(scriptVariables)
"Mapping of query variable names to values (equivalent to the Hive command: SET name=\"value\";)."
fn spec.forProvider.hiveConfig.withScriptVariablesMixin
withScriptVariablesMixin(scriptVariables)
"Mapping of query variable names to values (equivalent to the Hive command: SET name=\"value\";)."
Note: This function appends passed data to existing values
obj spec.forProvider.pigConfig
fn spec.forProvider.pigConfig.withContinueOnFailure
withContinueOnFailure(continueOnFailure)
"Whether to continue executing queries if a query fails. The default value is false. Setting to true can be useful when executing independent parallel queries. Defaults to false."
fn spec.forProvider.pigConfig.withJarFileUris
withJarFileUris(jarFileUris)
"HCFS URIs of jar files to add to the CLASSPATH of the Pig Client and Hadoop MapReduce (MR) tasks. Can contain Pig UDFs."
fn spec.forProvider.pigConfig.withJarFileUrisMixin
withJarFileUrisMixin(jarFileUris)
"HCFS URIs of jar files to add to the CLASSPATH of the Pig Client and Hadoop MapReduce (MR) tasks. Can contain Pig UDFs."
Note: This function appends passed data to existing values
fn spec.forProvider.pigConfig.withLoggingConfig
withLoggingConfig(loggingConfig)
fn spec.forProvider.pigConfig.withLoggingConfigMixin
withLoggingConfigMixin(loggingConfig)
Note: This function appends passed data to existing values
fn spec.forProvider.pigConfig.withProperties
withProperties(properties)
"A mapping of property names to values, used to configure Pig. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/hadoop/conf/*-site.xml, /etc/pig/conf/pig.properties, and classes in user code."
fn spec.forProvider.pigConfig.withPropertiesMixin
withPropertiesMixin(properties)
"A mapping of property names to values, used to configure Pig. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/hadoop/conf/*-site.xml, /etc/pig/conf/pig.properties, and classes in user code."
Note: This function appends passed data to existing values
fn spec.forProvider.pigConfig.withQueryFileUri
withQueryFileUri(queryFileUri)
"HCFS URI of file containing Hive script to execute as the job. Conflicts with query_list"
fn spec.forProvider.pigConfig.withQueryList
withQueryList(queryList)
"The list of Hive queries or statements to execute as part of the job. Conflicts with query_file_uri"
fn spec.forProvider.pigConfig.withQueryListMixin
withQueryListMixin(queryList)
"The list of Hive queries or statements to execute as part of the job. Conflicts with query_file_uri"
Note: This function appends passed data to existing values
fn spec.forProvider.pigConfig.withScriptVariables
withScriptVariables(scriptVariables)
"Mapping of query variable names to values (equivalent to the Pig command: name=[value])."
fn spec.forProvider.pigConfig.withScriptVariablesMixin
withScriptVariablesMixin(scriptVariables)
"Mapping of query variable names to values (equivalent to the Pig command: name=[value])."
Note: This function appends passed data to existing values
obj spec.forProvider.pigConfig.loggingConfig
fn spec.forProvider.pigConfig.loggingConfig.withDriverLogLevels
withDriverLogLevels(driverLogLevels)
"The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'"
fn spec.forProvider.pigConfig.loggingConfig.withDriverLogLevelsMixin
withDriverLogLevelsMixin(driverLogLevels)
"The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'"
Note: This function appends passed data to existing values
obj spec.forProvider.placement
fn spec.forProvider.placement.withClusterName
withClusterName(clusterName)
"The name of the cluster where the job will be submitted."
obj spec.forProvider.placement.clusterNameRef
"Reference to a Cluster in dataproc to populate clusterName."
fn spec.forProvider.placement.clusterNameRef.withName
withName(name)
"Name of the referenced object."
obj spec.forProvider.placement.clusterNameRef.policy
"Policies for referencing."
fn spec.forProvider.placement.clusterNameRef.policy.withResolution
withResolution(resolution)
"Resolution specifies whether resolution of this reference is required. The default is 'Required', which means the reconcile will fail if the reference cannot be resolved. 'Optional' means this reference will be a no-op if it cannot be resolved."
fn spec.forProvider.placement.clusterNameRef.policy.withResolve
withResolve(resolve)
"Resolve specifies when this reference should be resolved. The default is 'IfNotPresent', which will attempt to resolve the reference only when the corresponding field is not present. Use 'Always' to resolve the reference on every reconcile."
obj spec.forProvider.placement.clusterNameSelector
"Selector for a Cluster in dataproc to populate clusterName."
fn spec.forProvider.placement.clusterNameSelector.withMatchControllerRef
withMatchControllerRef(matchControllerRef)
"MatchControllerRef ensures an object with the same controller reference as the selecting object is selected."
fn spec.forProvider.placement.clusterNameSelector.withMatchLabels
withMatchLabels(matchLabels)
"MatchLabels ensures an object with matching labels is selected."
fn spec.forProvider.placement.clusterNameSelector.withMatchLabelsMixin
withMatchLabelsMixin(matchLabels)
"MatchLabels ensures an object with matching labels is selected."
Note: This function appends passed data to existing values
obj spec.forProvider.placement.clusterNameSelector.policy
"Policies for selection."
fn spec.forProvider.placement.clusterNameSelector.policy.withResolution
withResolution(resolution)
"Resolution specifies whether resolution of this reference is required. The default is 'Required', which means the reconcile will fail if the reference cannot be resolved. 'Optional' means this reference will be a no-op if it cannot be resolved."
fn spec.forProvider.placement.clusterNameSelector.policy.withResolve
withResolve(resolve)
"Resolve specifies when this reference should be resolved. The default is 'IfNotPresent', which will attempt to resolve the reference only when the corresponding field is not present. Use 'Always' to resolve the reference on every reconcile."
obj spec.forProvider.prestoConfig
fn spec.forProvider.prestoConfig.withClientTags
withClientTags(clientTags)
"Presto client tags to attach to this query."
fn spec.forProvider.prestoConfig.withClientTagsMixin
withClientTagsMixin(clientTags)
"Presto client tags to attach to this query."
Note: This function appends passed data to existing values
fn spec.forProvider.prestoConfig.withContinueOnFailure
withContinueOnFailure(continueOnFailure)
"Whether to continue executing queries if a query fails. Setting to true can be useful when executing independent parallel queries. Defaults to false."
fn spec.forProvider.prestoConfig.withLoggingConfig
withLoggingConfig(loggingConfig)
fn spec.forProvider.prestoConfig.withLoggingConfigMixin
withLoggingConfigMixin(loggingConfig)
Note: This function appends passed data to existing values
fn spec.forProvider.prestoConfig.withOutputFormat
withOutputFormat(outputFormat)
"The format in which query output will be displayed. See the Presto documentation for supported output formats."
fn spec.forProvider.prestoConfig.withProperties
withProperties(properties)
"A mapping of property names to values. Used to set Presto session properties Equivalent to using the --session flag in the Presto CLI."
fn spec.forProvider.prestoConfig.withPropertiesMixin
withPropertiesMixin(properties)
"A mapping of property names to values. Used to set Presto session properties Equivalent to using the --session flag in the Presto CLI."
Note: This function appends passed data to existing values
fn spec.forProvider.prestoConfig.withQueryFileUri
withQueryFileUri(queryFileUri)
"The HCFS URI of the script that contains SQL queries. Conflicts with query_list"
fn spec.forProvider.prestoConfig.withQueryList
withQueryList(queryList)
"The list of SQL queries or statements to execute as part of the job. Conflicts with query_file_uri"
fn spec.forProvider.prestoConfig.withQueryListMixin
withQueryListMixin(queryList)
"The list of SQL queries or statements to execute as part of the job. Conflicts with query_file_uri"
Note: This function appends passed data to existing values
obj spec.forProvider.prestoConfig.loggingConfig
fn spec.forProvider.prestoConfig.loggingConfig.withDriverLogLevels
withDriverLogLevels(driverLogLevels)
"The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'"
fn spec.forProvider.prestoConfig.loggingConfig.withDriverLogLevelsMixin
withDriverLogLevelsMixin(driverLogLevels)
"The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'"
Note: This function appends passed data to existing values
obj spec.forProvider.pysparkConfig
fn spec.forProvider.pysparkConfig.withArchiveUris
withArchiveUris(archiveUris)
"HCFS URIs of archives to be extracted in the working directory of .jar, .tar, .tar.gz, .tgz, and .zip."
fn spec.forProvider.pysparkConfig.withArchiveUrisMixin
withArchiveUrisMixin(archiveUris)
"HCFS URIs of archives to be extracted in the working directory of .jar, .tar, .tar.gz, .tgz, and .zip."
Note: This function appends passed data to existing values
fn spec.forProvider.pysparkConfig.withArgs
withArgs(args)
"The arguments to pass to the driver."
fn spec.forProvider.pysparkConfig.withArgsMixin
withArgsMixin(args)
"The arguments to pass to the driver."
Note: This function appends passed data to existing values
fn spec.forProvider.pysparkConfig.withFileUris
withFileUris(fileUris)
"HCFS URIs of files to be copied to the working directory of Python drivers and distributed tasks. Useful for naively parallel tasks."
fn spec.forProvider.pysparkConfig.withFileUrisMixin
withFileUrisMixin(fileUris)
"HCFS URIs of files to be copied to the working directory of Python drivers and distributed tasks. Useful for naively parallel tasks."
Note: This function appends passed data to existing values
fn spec.forProvider.pysparkConfig.withJarFileUris
withJarFileUris(jarFileUris)
"HCFS URIs of jar files to add to the CLASSPATHs of the Python driver and tasks."
fn spec.forProvider.pysparkConfig.withJarFileUrisMixin
withJarFileUrisMixin(jarFileUris)
"HCFS URIs of jar files to add to the CLASSPATHs of the Python driver and tasks."
Note: This function appends passed data to existing values
fn spec.forProvider.pysparkConfig.withLoggingConfig
withLoggingConfig(loggingConfig)
fn spec.forProvider.pysparkConfig.withLoggingConfigMixin
withLoggingConfigMixin(loggingConfig)
Note: This function appends passed data to existing values
fn spec.forProvider.pysparkConfig.withMainPythonFileUri
withMainPythonFileUri(mainPythonFileUri)
"The HCFS URI of the main Python file to use as the driver. Must be a .py file."
fn spec.forProvider.pysparkConfig.withProperties
withProperties(properties)
"A mapping of property names to values, used to configure PySpark. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/spark/conf/spark-defaults.conf and classes in user code."
fn spec.forProvider.pysparkConfig.withPropertiesMixin
withPropertiesMixin(properties)
"A mapping of property names to values, used to configure PySpark. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/spark/conf/spark-defaults.conf and classes in user code."
Note: This function appends passed data to existing values
fn spec.forProvider.pysparkConfig.withPythonFileUris
withPythonFileUris(pythonFileUris)
"HCFS file URIs of Python files to pass to the PySpark framework. Supported file types: .py, .egg, and .zip."
fn spec.forProvider.pysparkConfig.withPythonFileUrisMixin
withPythonFileUrisMixin(pythonFileUris)
"HCFS file URIs of Python files to pass to the PySpark framework. Supported file types: .py, .egg, and .zip."
Note: This function appends passed data to existing values
obj spec.forProvider.pysparkConfig.loggingConfig
fn spec.forProvider.pysparkConfig.loggingConfig.withDriverLogLevels
withDriverLogLevels(driverLogLevels)
"The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'"
fn spec.forProvider.pysparkConfig.loggingConfig.withDriverLogLevelsMixin
withDriverLogLevelsMixin(driverLogLevels)
"The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'"
Note: This function appends passed data to existing values
obj spec.forProvider.reference
fn spec.forProvider.reference.withJobId
withJobId(jobId)
obj spec.forProvider.regionRef
"Reference to a Cluster in dataproc to populate region."
fn spec.forProvider.regionRef.withName
withName(name)
"Name of the referenced object."
obj spec.forProvider.regionRef.policy
"Policies for referencing."
fn spec.forProvider.regionRef.policy.withResolution
withResolution(resolution)
"Resolution specifies whether resolution of this reference is required. The default is 'Required', which means the reconcile will fail if the reference cannot be resolved. 'Optional' means this reference will be a no-op if it cannot be resolved."
fn spec.forProvider.regionRef.policy.withResolve
withResolve(resolve)
"Resolve specifies when this reference should be resolved. The default is 'IfNotPresent', which will attempt to resolve the reference only when the corresponding field is not present. Use 'Always' to resolve the reference on every reconcile."
obj spec.forProvider.regionSelector
"Selector for a Cluster in dataproc to populate region."
fn spec.forProvider.regionSelector.withMatchControllerRef
withMatchControllerRef(matchControllerRef)
"MatchControllerRef ensures an object with the same controller reference as the selecting object is selected."
fn spec.forProvider.regionSelector.withMatchLabels
withMatchLabels(matchLabels)
"MatchLabels ensures an object with matching labels is selected."
fn spec.forProvider.regionSelector.withMatchLabelsMixin
withMatchLabelsMixin(matchLabels)
"MatchLabels ensures an object with matching labels is selected."
Note: This function appends passed data to existing values
obj spec.forProvider.regionSelector.policy
"Policies for selection."
fn spec.forProvider.regionSelector.policy.withResolution
withResolution(resolution)
"Resolution specifies whether resolution of this reference is required. The default is 'Required', which means the reconcile will fail if the reference cannot be resolved. 'Optional' means this reference will be a no-op if it cannot be resolved."
fn spec.forProvider.regionSelector.policy.withResolve
withResolve(resolve)
"Resolve specifies when this reference should be resolved. The default is 'IfNotPresent', which will attempt to resolve the reference only when the corresponding field is not present. Use 'Always' to resolve the reference on every reconcile."
obj spec.forProvider.scheduling
fn spec.forProvider.scheduling.withMaxFailuresPerHour
withMaxFailuresPerHour(maxFailuresPerHour)
"Maximum number of times per hour a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed."
fn spec.forProvider.scheduling.withMaxFailuresTotal
withMaxFailuresTotal(maxFailuresTotal)
"Maximum number of times in total a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed."
obj spec.forProvider.sparkConfig
fn spec.forProvider.sparkConfig.withArchiveUris
withArchiveUris(archiveUris)
"HCFS URIs of archives to be extracted in the working directory of .jar, .tar, .tar.gz, .tgz, and .zip."
fn spec.forProvider.sparkConfig.withArchiveUrisMixin
withArchiveUrisMixin(archiveUris)
"HCFS URIs of archives to be extracted in the working directory of .jar, .tar, .tar.gz, .tgz, and .zip."
Note: This function appends passed data to existing values
fn spec.forProvider.sparkConfig.withArgs
withArgs(args)
"The arguments to pass to the driver."
fn spec.forProvider.sparkConfig.withArgsMixin
withArgsMixin(args)
"The arguments to pass to the driver."
Note: This function appends passed data to existing values
fn spec.forProvider.sparkConfig.withFileUris
withFileUris(fileUris)
"HCFS URIs of files to be copied to the working directory of Spark drivers and distributed tasks. Useful for naively parallel tasks."
fn spec.forProvider.sparkConfig.withFileUrisMixin
withFileUrisMixin(fileUris)
"HCFS URIs of files to be copied to the working directory of Spark drivers and distributed tasks. Useful for naively parallel tasks."
Note: This function appends passed data to existing values
fn spec.forProvider.sparkConfig.withJarFileUris
withJarFileUris(jarFileUris)
"HCFS URIs of jar files to add to the CLASSPATHs of the Spark driver and tasks."
fn spec.forProvider.sparkConfig.withJarFileUrisMixin
withJarFileUrisMixin(jarFileUris)
"HCFS URIs of jar files to add to the CLASSPATHs of the Spark driver and tasks."
Note: This function appends passed data to existing values
fn spec.forProvider.sparkConfig.withLoggingConfig
withLoggingConfig(loggingConfig)
fn spec.forProvider.sparkConfig.withLoggingConfigMixin
withLoggingConfigMixin(loggingConfig)
Note: This function appends passed data to existing values
fn spec.forProvider.sparkConfig.withMainClass
withMainClass(mainClass)
"The class containing the main method of the driver. Must be in a provided jar or jar that is already on the classpath. Conflicts with main_jar_file_uri"
fn spec.forProvider.sparkConfig.withMainJarFileUri
withMainJarFileUri(mainJarFileUri)
"The HCFS URI of jar file containing the driver jar. Conflicts with main_class"
fn spec.forProvider.sparkConfig.withProperties
withProperties(properties)
"A mapping of property names to values, used to configure Spark. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/spark/conf/spark-defaults.conf and classes in user code."
fn spec.forProvider.sparkConfig.withPropertiesMixin
withPropertiesMixin(properties)
"A mapping of property names to values, used to configure Spark. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/spark/conf/spark-defaults.conf and classes in user code."
Note: This function appends passed data to existing values
obj spec.forProvider.sparkConfig.loggingConfig
fn spec.forProvider.sparkConfig.loggingConfig.withDriverLogLevels
withDriverLogLevels(driverLogLevels)
"The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'"
fn spec.forProvider.sparkConfig.loggingConfig.withDriverLogLevelsMixin
withDriverLogLevelsMixin(driverLogLevels)
"The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'"
Note: This function appends passed data to existing values
obj spec.forProvider.sparksqlConfig
fn spec.forProvider.sparksqlConfig.withJarFileUris
withJarFileUris(jarFileUris)
"HCFS URIs of jar files to be added to the Spark CLASSPATH."
fn spec.forProvider.sparksqlConfig.withJarFileUrisMixin
withJarFileUrisMixin(jarFileUris)
"HCFS URIs of jar files to be added to the Spark CLASSPATH."
Note: This function appends passed data to existing values
fn spec.forProvider.sparksqlConfig.withLoggingConfig
withLoggingConfig(loggingConfig)
fn spec.forProvider.sparksqlConfig.withLoggingConfigMixin
withLoggingConfigMixin(loggingConfig)
Note: This function appends passed data to existing values
fn spec.forProvider.sparksqlConfig.withProperties
withProperties(properties)
"A mapping of property names to values, used to configure Spark SQL's SparkConf. Properties that conflict with values set by the Cloud Dataproc API may be overwritten."
fn spec.forProvider.sparksqlConfig.withPropertiesMixin
withPropertiesMixin(properties)
"A mapping of property names to values, used to configure Spark SQL's SparkConf. Properties that conflict with values set by the Cloud Dataproc API may be overwritten."
Note: This function appends passed data to existing values
fn spec.forProvider.sparksqlConfig.withQueryFileUri
withQueryFileUri(queryFileUri)
"The HCFS URI of the script that contains SQL queries. Conflicts with query_list"
fn spec.forProvider.sparksqlConfig.withQueryList
withQueryList(queryList)
"The list of SQL queries or statements to execute as part of the job. Conflicts with query_file_uri"
fn spec.forProvider.sparksqlConfig.withQueryListMixin
withQueryListMixin(queryList)
"The list of SQL queries or statements to execute as part of the job. Conflicts with query_file_uri"
Note: This function appends passed data to existing values
fn spec.forProvider.sparksqlConfig.withScriptVariables
withScriptVariables(scriptVariables)
"Mapping of query variable names to values (equivalent to the Spark SQL command: SET name=\"value\";)."
fn spec.forProvider.sparksqlConfig.withScriptVariablesMixin
withScriptVariablesMixin(scriptVariables)
"Mapping of query variable names to values (equivalent to the Spark SQL command: SET name=\"value\";)."
Note: This function appends passed data to existing values
obj spec.forProvider.sparksqlConfig.loggingConfig
fn spec.forProvider.sparksqlConfig.loggingConfig.withDriverLogLevels
withDriverLogLevels(driverLogLevels)
"The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'"
fn spec.forProvider.sparksqlConfig.loggingConfig.withDriverLogLevelsMixin
withDriverLogLevelsMixin(driverLogLevels)
"The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'"
Note: This function appends passed data to existing values
obj spec.initProvider
"THIS IS AN ALPHA FIELD. Do not use it in production. It is not honored unless the relevant Crossplane feature flag is enabled, and may be changed or removed without notice. InitProvider holds the same fields as ForProvider, with the exception of Identifier and other resource reference fields. The fields that are in InitProvider are merged into ForProvider when the resource is created. The same fields are also added to the terraform ignore_changes hook, to avoid updating them after creation. This is useful for fields that are required on creation, but we do not desire to update them after creation, for example because of an external controller is managing them, like an autoscaler."
fn spec.initProvider.withForceDelete
withForceDelete(forceDelete)
"By default, you can only delete inactive jobs within Dataproc. Setting this to true, and calling destroy, will ensure that the job is first cancelled before issuing the delete."
fn spec.initProvider.withHadoopConfig
withHadoopConfig(hadoopConfig)
fn spec.initProvider.withHadoopConfigMixin
withHadoopConfigMixin(hadoopConfig)
Note: This function appends passed data to existing values
fn spec.initProvider.withHiveConfig
withHiveConfig(hiveConfig)
fn spec.initProvider.withHiveConfigMixin
withHiveConfigMixin(hiveConfig)
Note: This function appends passed data to existing values
fn spec.initProvider.withLabels
withLabels(labels)
"The list of labels (key/value pairs) to add to the job."
fn spec.initProvider.withLabelsMixin
withLabelsMixin(labels)
"The list of labels (key/value pairs) to add to the job."
Note: This function appends passed data to existing values
fn spec.initProvider.withPigConfig
withPigConfig(pigConfig)
fn spec.initProvider.withPigConfigMixin
withPigConfigMixin(pigConfig)
Note: This function appends passed data to existing values
fn spec.initProvider.withPlacement
withPlacement(placement)
fn spec.initProvider.withPlacementMixin
withPlacementMixin(placement)
Note: This function appends passed data to existing values
fn spec.initProvider.withPrestoConfig
withPrestoConfig(prestoConfig)
fn spec.initProvider.withPrestoConfigMixin
withPrestoConfigMixin(prestoConfig)
Note: This function appends passed data to existing values
fn spec.initProvider.withProject
withProject(project)
"The project in which the cluster can be found and jobs subsequently run against. If it is not provided, the provider project is used."
fn spec.initProvider.withPysparkConfig
withPysparkConfig(pysparkConfig)
fn spec.initProvider.withPysparkConfigMixin
withPysparkConfigMixin(pysparkConfig)
Note: This function appends passed data to existing values
fn spec.initProvider.withReference
withReference(reference)
fn spec.initProvider.withReferenceMixin
withReferenceMixin(reference)
Note: This function appends passed data to existing values
fn spec.initProvider.withScheduling
withScheduling(scheduling)
fn spec.initProvider.withSchedulingMixin
withSchedulingMixin(scheduling)
Note: This function appends passed data to existing values
fn spec.initProvider.withSparkConfig
withSparkConfig(sparkConfig)
fn spec.initProvider.withSparkConfigMixin
withSparkConfigMixin(sparkConfig)
Note: This function appends passed data to existing values
fn spec.initProvider.withSparksqlConfig
withSparksqlConfig(sparksqlConfig)
fn spec.initProvider.withSparksqlConfigMixin
withSparksqlConfigMixin(sparksqlConfig)
Note: This function appends passed data to existing values
obj spec.initProvider.hadoopConfig
fn spec.initProvider.hadoopConfig.withArchiveUris
withArchiveUris(archiveUris)
"HCFS URIs of archives to be extracted in the working directory of .jar, .tar, .tar.gz, .tgz, and .zip."
fn spec.initProvider.hadoopConfig.withArchiveUrisMixin
withArchiveUrisMixin(archiveUris)
"HCFS URIs of archives to be extracted in the working directory of .jar, .tar, .tar.gz, .tgz, and .zip."
Note: This function appends passed data to existing values
fn spec.initProvider.hadoopConfig.withArgs
withArgs(args)
"The arguments to pass to the driver. Do not include arguments, such as -libjars or -Dfoo=bar, that can be set as job properties, since a collision may occur that causes an incorrect job submission."
fn spec.initProvider.hadoopConfig.withArgsMixin
withArgsMixin(args)
"The arguments to pass to the driver. Do not include arguments, such as -libjars or -Dfoo=bar, that can be set as job properties, since a collision may occur that causes an incorrect job submission."
Note: This function appends passed data to existing values
fn spec.initProvider.hadoopConfig.withFileUris
withFileUris(fileUris)
"HCFS URIs of files to be copied to the working directory of Hadoop drivers and distributed tasks. Useful for naively parallel tasks."
fn spec.initProvider.hadoopConfig.withFileUrisMixin
withFileUrisMixin(fileUris)
"HCFS URIs of files to be copied to the working directory of Hadoop drivers and distributed tasks. Useful for naively parallel tasks."
Note: This function appends passed data to existing values
fn spec.initProvider.hadoopConfig.withJarFileUris
withJarFileUris(jarFileUris)
"HCFS URIs of jar files to add to the CLASSPATHs of the Spark driver and tasks."
fn spec.initProvider.hadoopConfig.withJarFileUrisMixin
withJarFileUrisMixin(jarFileUris)
"HCFS URIs of jar files to add to the CLASSPATHs of the Spark driver and tasks."
Note: This function appends passed data to existing values
fn spec.initProvider.hadoopConfig.withLoggingConfig
withLoggingConfig(loggingConfig)
fn spec.initProvider.hadoopConfig.withLoggingConfigMixin
withLoggingConfigMixin(loggingConfig)
Note: This function appends passed data to existing values
fn spec.initProvider.hadoopConfig.withMainClass
withMainClass(mainClass)
"The name of the driver's main class. The jar file containing the class must be in the default CLASSPATH or specified in jar_file_uris. Conflicts with main_jar_file_uri"
fn spec.initProvider.hadoopConfig.withMainJarFileUri
withMainJarFileUri(mainJarFileUri)
"The HCFS URI of the jar file containing the main class. Examples: 'gs://foo-bucket/analytics-binaries/extract-useful-metrics-mr.jar' 'hdfs:/tmp/test-samples/custom-wordcount.jar' 'file:///home/usr/lib/hadoop-mapreduce/hadoop-mapreduce-examples.jar'. Conflicts with main_class"
fn spec.initProvider.hadoopConfig.withProperties
withProperties(properties)
"A mapping of property names to values, used to configure Hadoop. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/hadoop/conf/*-site and classes in user code.."
fn spec.initProvider.hadoopConfig.withPropertiesMixin
withPropertiesMixin(properties)
"A mapping of property names to values, used to configure Hadoop. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/hadoop/conf/*-site and classes in user code.."
Note: This function appends passed data to existing values
obj spec.initProvider.hadoopConfig.loggingConfig
fn spec.initProvider.hadoopConfig.loggingConfig.withDriverLogLevels
withDriverLogLevels(driverLogLevels)
"The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'"
fn spec.initProvider.hadoopConfig.loggingConfig.withDriverLogLevelsMixin
withDriverLogLevelsMixin(driverLogLevels)
"The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'"
Note: This function appends passed data to existing values
obj spec.initProvider.hiveConfig
fn spec.initProvider.hiveConfig.withContinueOnFailure
withContinueOnFailure(continueOnFailure)
"Whether to continue executing queries if a query fails. The default value is false. Setting to true can be useful when executing independent parallel queries. Defaults to false."
fn spec.initProvider.hiveConfig.withJarFileUris
withJarFileUris(jarFileUris)
"HCFS URIs of jar files to add to the CLASSPATH of the Hive server and Hadoop MapReduce (MR) tasks. Can contain Hive SerDes and UDFs."
fn spec.initProvider.hiveConfig.withJarFileUrisMixin
withJarFileUrisMixin(jarFileUris)
"HCFS URIs of jar files to add to the CLASSPATH of the Hive server and Hadoop MapReduce (MR) tasks. Can contain Hive SerDes and UDFs."
Note: This function appends passed data to existing values
fn spec.initProvider.hiveConfig.withProperties
withProperties(properties)
"A mapping of property names and values, used to configure Hive. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/hadoop/conf/*-site.xml, /etc/hive/conf/hive-site.xml, and classes in user code.."
fn spec.initProvider.hiveConfig.withPropertiesMixin
withPropertiesMixin(properties)
"A mapping of property names and values, used to configure Hive. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/hadoop/conf/*-site.xml, /etc/hive/conf/hive-site.xml, and classes in user code.."
Note: This function appends passed data to existing values
fn spec.initProvider.hiveConfig.withQueryFileUri
withQueryFileUri(queryFileUri)
"HCFS URI of file containing Hive script to execute as the job. Conflicts with query_list"
fn spec.initProvider.hiveConfig.withQueryList
withQueryList(queryList)
"The list of Hive queries or statements to execute as part of the job. Conflicts with query_file_uri"
fn spec.initProvider.hiveConfig.withQueryListMixin
withQueryListMixin(queryList)
"The list of Hive queries or statements to execute as part of the job. Conflicts with query_file_uri"
Note: This function appends passed data to existing values
fn spec.initProvider.hiveConfig.withScriptVariables
withScriptVariables(scriptVariables)
"Mapping of query variable names to values (equivalent to the Hive command: SET name=\"value\";)."
fn spec.initProvider.hiveConfig.withScriptVariablesMixin
withScriptVariablesMixin(scriptVariables)
"Mapping of query variable names to values (equivalent to the Hive command: SET name=\"value\";)."
Note: This function appends passed data to existing values
obj spec.initProvider.pigConfig
fn spec.initProvider.pigConfig.withContinueOnFailure
withContinueOnFailure(continueOnFailure)
"Whether to continue executing queries if a query fails. The default value is false. Setting to true can be useful when executing independent parallel queries. Defaults to false."
fn spec.initProvider.pigConfig.withJarFileUris
withJarFileUris(jarFileUris)
"HCFS URIs of jar files to add to the CLASSPATH of the Pig Client and Hadoop MapReduce (MR) tasks. Can contain Pig UDFs."
fn spec.initProvider.pigConfig.withJarFileUrisMixin
withJarFileUrisMixin(jarFileUris)
"HCFS URIs of jar files to add to the CLASSPATH of the Pig Client and Hadoop MapReduce (MR) tasks. Can contain Pig UDFs."
Note: This function appends passed data to existing values
fn spec.initProvider.pigConfig.withLoggingConfig
withLoggingConfig(loggingConfig)
fn spec.initProvider.pigConfig.withLoggingConfigMixin
withLoggingConfigMixin(loggingConfig)
Note: This function appends passed data to existing values
fn spec.initProvider.pigConfig.withProperties
withProperties(properties)
"A mapping of property names to values, used to configure Pig. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/hadoop/conf/*-site.xml, /etc/pig/conf/pig.properties, and classes in user code."
fn spec.initProvider.pigConfig.withPropertiesMixin
withPropertiesMixin(properties)
"A mapping of property names to values, used to configure Pig. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/hadoop/conf/*-site.xml, /etc/pig/conf/pig.properties, and classes in user code."
Note: This function appends passed data to existing values
fn spec.initProvider.pigConfig.withQueryFileUri
withQueryFileUri(queryFileUri)
"HCFS URI of file containing Hive script to execute as the job. Conflicts with query_list"
fn spec.initProvider.pigConfig.withQueryList
withQueryList(queryList)
"The list of Hive queries or statements to execute as part of the job. Conflicts with query_file_uri"
fn spec.initProvider.pigConfig.withQueryListMixin
withQueryListMixin(queryList)
"The list of Hive queries or statements to execute as part of the job. Conflicts with query_file_uri"
Note: This function appends passed data to existing values
fn spec.initProvider.pigConfig.withScriptVariables
withScriptVariables(scriptVariables)
"Mapping of query variable names to values (equivalent to the Pig command: name=[value])."
fn spec.initProvider.pigConfig.withScriptVariablesMixin
withScriptVariablesMixin(scriptVariables)
"Mapping of query variable names to values (equivalent to the Pig command: name=[value])."
Note: This function appends passed data to existing values
obj spec.initProvider.pigConfig.loggingConfig
fn spec.initProvider.pigConfig.loggingConfig.withDriverLogLevels
withDriverLogLevels(driverLogLevels)
"The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'"
fn spec.initProvider.pigConfig.loggingConfig.withDriverLogLevelsMixin
withDriverLogLevelsMixin(driverLogLevels)
"The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'"
Note: This function appends passed data to existing values
obj spec.initProvider.prestoConfig
fn spec.initProvider.prestoConfig.withClientTags
withClientTags(clientTags)
"Presto client tags to attach to this query."
fn spec.initProvider.prestoConfig.withClientTagsMixin
withClientTagsMixin(clientTags)
"Presto client tags to attach to this query."
Note: This function appends passed data to existing values
fn spec.initProvider.prestoConfig.withContinueOnFailure
withContinueOnFailure(continueOnFailure)
"Whether to continue executing queries if a query fails. Setting to true can be useful when executing independent parallel queries. Defaults to false."
fn spec.initProvider.prestoConfig.withLoggingConfig
withLoggingConfig(loggingConfig)
fn spec.initProvider.prestoConfig.withLoggingConfigMixin
withLoggingConfigMixin(loggingConfig)
Note: This function appends passed data to existing values
fn spec.initProvider.prestoConfig.withOutputFormat
withOutputFormat(outputFormat)
"The format in which query output will be displayed. See the Presto documentation for supported output formats."
fn spec.initProvider.prestoConfig.withProperties
withProperties(properties)
"A mapping of property names to values. Used to set Presto session properties Equivalent to using the --session flag in the Presto CLI."
fn spec.initProvider.prestoConfig.withPropertiesMixin
withPropertiesMixin(properties)
"A mapping of property names to values. Used to set Presto session properties Equivalent to using the --session flag in the Presto CLI."
Note: This function appends passed data to existing values
fn spec.initProvider.prestoConfig.withQueryFileUri
withQueryFileUri(queryFileUri)
"The HCFS URI of the script that contains SQL queries. Conflicts with query_list"
fn spec.initProvider.prestoConfig.withQueryList
withQueryList(queryList)
"The list of SQL queries or statements to execute as part of the job. Conflicts with query_file_uri"
fn spec.initProvider.prestoConfig.withQueryListMixin
withQueryListMixin(queryList)
"The list of SQL queries or statements to execute as part of the job. Conflicts with query_file_uri"
Note: This function appends passed data to existing values
obj spec.initProvider.prestoConfig.loggingConfig
fn spec.initProvider.prestoConfig.loggingConfig.withDriverLogLevels
withDriverLogLevels(driverLogLevels)
"The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'"
fn spec.initProvider.prestoConfig.loggingConfig.withDriverLogLevelsMixin
withDriverLogLevelsMixin(driverLogLevels)
"The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'"
Note: This function appends passed data to existing values
obj spec.initProvider.pysparkConfig
fn spec.initProvider.pysparkConfig.withArchiveUris
withArchiveUris(archiveUris)
"HCFS URIs of archives to be extracted in the working directory of .jar, .tar, .tar.gz, .tgz, and .zip."
fn spec.initProvider.pysparkConfig.withArchiveUrisMixin
withArchiveUrisMixin(archiveUris)
"HCFS URIs of archives to be extracted in the working directory of .jar, .tar, .tar.gz, .tgz, and .zip."
Note: This function appends passed data to existing values
fn spec.initProvider.pysparkConfig.withArgs
withArgs(args)
"The arguments to pass to the driver."
fn spec.initProvider.pysparkConfig.withArgsMixin
withArgsMixin(args)
"The arguments to pass to the driver."
Note: This function appends passed data to existing values
fn spec.initProvider.pysparkConfig.withFileUris
withFileUris(fileUris)
"HCFS URIs of files to be copied to the working directory of Python drivers and distributed tasks. Useful for naively parallel tasks."
fn spec.initProvider.pysparkConfig.withFileUrisMixin
withFileUrisMixin(fileUris)
"HCFS URIs of files to be copied to the working directory of Python drivers and distributed tasks. Useful for naively parallel tasks."
Note: This function appends passed data to existing values
fn spec.initProvider.pysparkConfig.withJarFileUris
withJarFileUris(jarFileUris)
"HCFS URIs of jar files to add to the CLASSPATHs of the Python driver and tasks."
fn spec.initProvider.pysparkConfig.withJarFileUrisMixin
withJarFileUrisMixin(jarFileUris)
"HCFS URIs of jar files to add to the CLASSPATHs of the Python driver and tasks."
Note: This function appends passed data to existing values
fn spec.initProvider.pysparkConfig.withLoggingConfig
withLoggingConfig(loggingConfig)
fn spec.initProvider.pysparkConfig.withLoggingConfigMixin
withLoggingConfigMixin(loggingConfig)
Note: This function appends passed data to existing values
fn spec.initProvider.pysparkConfig.withMainPythonFileUri
withMainPythonFileUri(mainPythonFileUri)
"The HCFS URI of the main Python file to use as the driver. Must be a .py file."
fn spec.initProvider.pysparkConfig.withProperties
withProperties(properties)
"A mapping of property names to values, used to configure PySpark. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/spark/conf/spark-defaults.conf and classes in user code."
fn spec.initProvider.pysparkConfig.withPropertiesMixin
withPropertiesMixin(properties)
"A mapping of property names to values, used to configure PySpark. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/spark/conf/spark-defaults.conf and classes in user code."
Note: This function appends passed data to existing values
fn spec.initProvider.pysparkConfig.withPythonFileUris
withPythonFileUris(pythonFileUris)
"HCFS file URIs of Python files to pass to the PySpark framework. Supported file types: .py, .egg, and .zip."
fn spec.initProvider.pysparkConfig.withPythonFileUrisMixin
withPythonFileUrisMixin(pythonFileUris)
"HCFS file URIs of Python files to pass to the PySpark framework. Supported file types: .py, .egg, and .zip."
Note: This function appends passed data to existing values
obj spec.initProvider.pysparkConfig.loggingConfig
fn spec.initProvider.pysparkConfig.loggingConfig.withDriverLogLevels
withDriverLogLevels(driverLogLevels)
"The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'"
fn spec.initProvider.pysparkConfig.loggingConfig.withDriverLogLevelsMixin
withDriverLogLevelsMixin(driverLogLevels)
"The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'"
Note: This function appends passed data to existing values
obj spec.initProvider.reference
fn spec.initProvider.reference.withJobId
withJobId(jobId)
obj spec.initProvider.scheduling
fn spec.initProvider.scheduling.withMaxFailuresPerHour
withMaxFailuresPerHour(maxFailuresPerHour)
"Maximum number of times per hour a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed."
fn spec.initProvider.scheduling.withMaxFailuresTotal
withMaxFailuresTotal(maxFailuresTotal)
"Maximum number of times in total a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed."
obj spec.initProvider.sparkConfig
fn spec.initProvider.sparkConfig.withArchiveUris
withArchiveUris(archiveUris)
"HCFS URIs of archives to be extracted in the working directory of .jar, .tar, .tar.gz, .tgz, and .zip."
fn spec.initProvider.sparkConfig.withArchiveUrisMixin
withArchiveUrisMixin(archiveUris)
"HCFS URIs of archives to be extracted in the working directory of .jar, .tar, .tar.gz, .tgz, and .zip."
Note: This function appends passed data to existing values
fn spec.initProvider.sparkConfig.withArgs
withArgs(args)
"The arguments to pass to the driver."
fn spec.initProvider.sparkConfig.withArgsMixin
withArgsMixin(args)
"The arguments to pass to the driver."
Note: This function appends passed data to existing values
fn spec.initProvider.sparkConfig.withFileUris
withFileUris(fileUris)
"HCFS URIs of files to be copied to the working directory of Spark drivers and distributed tasks. Useful for naively parallel tasks."
fn spec.initProvider.sparkConfig.withFileUrisMixin
withFileUrisMixin(fileUris)
"HCFS URIs of files to be copied to the working directory of Spark drivers and distributed tasks. Useful for naively parallel tasks."
Note: This function appends passed data to existing values
fn spec.initProvider.sparkConfig.withJarFileUris
withJarFileUris(jarFileUris)
"HCFS URIs of jar files to add to the CLASSPATHs of the Spark driver and tasks."
fn spec.initProvider.sparkConfig.withJarFileUrisMixin
withJarFileUrisMixin(jarFileUris)
"HCFS URIs of jar files to add to the CLASSPATHs of the Spark driver and tasks."
Note: This function appends passed data to existing values
fn spec.initProvider.sparkConfig.withLoggingConfig
withLoggingConfig(loggingConfig)
fn spec.initProvider.sparkConfig.withLoggingConfigMixin
withLoggingConfigMixin(loggingConfig)
Note: This function appends passed data to existing values
fn spec.initProvider.sparkConfig.withMainClass
withMainClass(mainClass)
"The class containing the main method of the driver. Must be in a provided jar or jar that is already on the classpath. Conflicts with main_jar_file_uri"
fn spec.initProvider.sparkConfig.withMainJarFileUri
withMainJarFileUri(mainJarFileUri)
"The HCFS URI of jar file containing the driver jar. Conflicts with main_class"
fn spec.initProvider.sparkConfig.withProperties
withProperties(properties)
"A mapping of property names to values, used to configure Spark. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/spark/conf/spark-defaults.conf and classes in user code."
fn spec.initProvider.sparkConfig.withPropertiesMixin
withPropertiesMixin(properties)
"A mapping of property names to values, used to configure Spark. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/spark/conf/spark-defaults.conf and classes in user code."
Note: This function appends passed data to existing values
obj spec.initProvider.sparkConfig.loggingConfig
fn spec.initProvider.sparkConfig.loggingConfig.withDriverLogLevels
withDriverLogLevels(driverLogLevels)
"The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'"
fn spec.initProvider.sparkConfig.loggingConfig.withDriverLogLevelsMixin
withDriverLogLevelsMixin(driverLogLevels)
"The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'"
Note: This function appends passed data to existing values
obj spec.initProvider.sparksqlConfig
fn spec.initProvider.sparksqlConfig.withJarFileUris
withJarFileUris(jarFileUris)
"HCFS URIs of jar files to be added to the Spark CLASSPATH."
fn spec.initProvider.sparksqlConfig.withJarFileUrisMixin
withJarFileUrisMixin(jarFileUris)
"HCFS URIs of jar files to be added to the Spark CLASSPATH."
Note: This function appends passed data to existing values
fn spec.initProvider.sparksqlConfig.withLoggingConfig
withLoggingConfig(loggingConfig)
fn spec.initProvider.sparksqlConfig.withLoggingConfigMixin
withLoggingConfigMixin(loggingConfig)
Note: This function appends passed data to existing values
fn spec.initProvider.sparksqlConfig.withProperties
withProperties(properties)
"A mapping of property names to values, used to configure Spark SQL's SparkConf. Properties that conflict with values set by the Cloud Dataproc API may be overwritten."
fn spec.initProvider.sparksqlConfig.withPropertiesMixin
withPropertiesMixin(properties)
"A mapping of property names to values, used to configure Spark SQL's SparkConf. Properties that conflict with values set by the Cloud Dataproc API may be overwritten."
Note: This function appends passed data to existing values
fn spec.initProvider.sparksqlConfig.withQueryFileUri
withQueryFileUri(queryFileUri)
"The HCFS URI of the script that contains SQL queries. Conflicts with query_list"
fn spec.initProvider.sparksqlConfig.withQueryList
withQueryList(queryList)
"The list of SQL queries or statements to execute as part of the job. Conflicts with query_file_uri"
fn spec.initProvider.sparksqlConfig.withQueryListMixin
withQueryListMixin(queryList)
"The list of SQL queries or statements to execute as part of the job. Conflicts with query_file_uri"
Note: This function appends passed data to existing values
fn spec.initProvider.sparksqlConfig.withScriptVariables
withScriptVariables(scriptVariables)
"Mapping of query variable names to values (equivalent to the Spark SQL command: SET name=\"value\";)."
fn spec.initProvider.sparksqlConfig.withScriptVariablesMixin
withScriptVariablesMixin(scriptVariables)
"Mapping of query variable names to values (equivalent to the Spark SQL command: SET name=\"value\";)."
Note: This function appends passed data to existing values
obj spec.initProvider.sparksqlConfig.loggingConfig
fn spec.initProvider.sparksqlConfig.loggingConfig.withDriverLogLevels
withDriverLogLevels(driverLogLevels)
"The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'"
fn spec.initProvider.sparksqlConfig.loggingConfig.withDriverLogLevelsMixin
withDriverLogLevelsMixin(driverLogLevels)
"The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'"
Note: This function appends passed data to existing values
obj spec.providerConfigRef
"ProviderConfigReference specifies how the provider that will be used to create, observe, update, and delete this managed resource should be configured."
fn spec.providerConfigRef.withName
withName(name)
"Name of the referenced object."
obj spec.providerConfigRef.policy
"Policies for referencing."
fn spec.providerConfigRef.policy.withResolution
withResolution(resolution)
"Resolution specifies whether resolution of this reference is required. The default is 'Required', which means the reconcile will fail if the reference cannot be resolved. 'Optional' means this reference will be a no-op if it cannot be resolved."
fn spec.providerConfigRef.policy.withResolve
withResolve(resolve)
"Resolve specifies when this reference should be resolved. The default is 'IfNotPresent', which will attempt to resolve the reference only when the corresponding field is not present. Use 'Always' to resolve the reference on every reconcile."
obj spec.providerRef
"ProviderReference specifies the provider that will be used to create, observe, update, and delete this managed resource. Deprecated: Please use ProviderConfigReference, i.e. providerConfigRef
"
fn spec.providerRef.withName
withName(name)
"Name of the referenced object."
obj spec.providerRef.policy
"Policies for referencing."
fn spec.providerRef.policy.withResolution
withResolution(resolution)
"Resolution specifies whether resolution of this reference is required. The default is 'Required', which means the reconcile will fail if the reference cannot be resolved. 'Optional' means this reference will be a no-op if it cannot be resolved."
fn spec.providerRef.policy.withResolve
withResolve(resolve)
"Resolve specifies when this reference should be resolved. The default is 'IfNotPresent', which will attempt to resolve the reference only when the corresponding field is not present. Use 'Always' to resolve the reference on every reconcile."
obj spec.publishConnectionDetailsTo
"PublishConnectionDetailsTo specifies the connection secret config which contains a name, metadata and a reference to secret store config to which any connection details for this managed resource should be written. Connection details frequently include the endpoint, username, and password required to connect to the managed resource."
fn spec.publishConnectionDetailsTo.withName
withName(name)
"Name is the name of the connection secret."
obj spec.publishConnectionDetailsTo.configRef
"SecretStoreConfigRef specifies which secret store config should be used for this ConnectionSecret."
fn spec.publishConnectionDetailsTo.configRef.withName
withName(name)
"Name of the referenced object."
obj spec.publishConnectionDetailsTo.configRef.policy
"Policies for referencing."
fn spec.publishConnectionDetailsTo.configRef.policy.withResolution
withResolution(resolution)
"Resolution specifies whether resolution of this reference is required. The default is 'Required', which means the reconcile will fail if the reference cannot be resolved. 'Optional' means this reference will be a no-op if it cannot be resolved."
fn spec.publishConnectionDetailsTo.configRef.policy.withResolve
withResolve(resolve)
"Resolve specifies when this reference should be resolved. The default is 'IfNotPresent', which will attempt to resolve the reference only when the corresponding field is not present. Use 'Always' to resolve the reference on every reconcile."
obj spec.publishConnectionDetailsTo.metadata
"Metadata is the metadata for connection secret."
fn spec.publishConnectionDetailsTo.metadata.withAnnotations
withAnnotations(annotations)
"Annotations are the annotations to be added to connection secret. - For Kubernetes secrets, this will be used as \"metadata.annotations\". - It is up to Secret Store implementation for others store types."
fn spec.publishConnectionDetailsTo.metadata.withAnnotationsMixin
withAnnotationsMixin(annotations)
"Annotations are the annotations to be added to connection secret. - For Kubernetes secrets, this will be used as \"metadata.annotations\". - It is up to Secret Store implementation for others store types."
Note: This function appends passed data to existing values
fn spec.publishConnectionDetailsTo.metadata.withLabels
withLabels(labels)
"Labels are the labels/tags to be added to connection secret. - For Kubernetes secrets, this will be used as \"metadata.labels\". - It is up to Secret Store implementation for others store types."
fn spec.publishConnectionDetailsTo.metadata.withLabelsMixin
withLabelsMixin(labels)
"Labels are the labels/tags to be added to connection secret. - For Kubernetes secrets, this will be used as \"metadata.labels\". - It is up to Secret Store implementation for others store types."
Note: This function appends passed data to existing values
fn spec.publishConnectionDetailsTo.metadata.withType
withType(type)
"Type is the SecretType for the connection secret. - Only valid for Kubernetes Secret Stores."
obj spec.writeConnectionSecretToRef
"WriteConnectionSecretToReference specifies the namespace and name of a Secret to which any connection details for this managed resource should be written. Connection details frequently include the endpoint, username, and password required to connect to the managed resource. This field is planned to be replaced in a future release in favor of PublishConnectionDetailsTo. Currently, both could be set independently and connection details would be published to both without affecting each other."
fn spec.writeConnectionSecretToRef.withName
withName(name)
"Name of the secret."
fn spec.writeConnectionSecretToRef.withNamespace
withNamespace(namespace)
"Namespace of the secret."