public static final class PySparkBatch.Builder extends GeneratedMessageV3.Builder<PySparkBatch.Builder> implements PySparkBatchOrBuilder
A configuration for running an [Apache PySpark](https://spark.apache.org/docs/latest/api/python/getting_started/quickstart.html) batch workload.Protobuf type
google.cloud.dataproc.v1.PySparkBatch
getAllFields, getField, getFieldBuilder, getOneofFieldDescriptor, getParentForChildren, getRepeatedField, getRepeatedFieldBuilder, getRepeatedFieldCount, getUnknownFields, getUnknownFieldSetBuilder, hasField, hasOneof, internalGetMapField, internalGetMutableMapField, isClean, markClean, mergeUnknownLengthDelimitedField, mergeUnknownVarintField, newBuilderForField, onBuilt, onChanged, parseUnknownField, setUnknownFieldSetBuilder, setUnknownFieldsProto3
findInitializationErrors, getInitializationErrorString, internalMergeFrom, mergeFrom, mergeFrom, mergeFrom, mergeFrom, mergeFrom, mergeFrom, mergeFrom, mergeFrom, mergeFrom, newUninitializedMessageException, toString
addAll, addAll, mergeDelimitedFrom, mergeDelimitedFrom, mergeFrom, newUninitializedMessageException
equals, finalize, getClass, hashCode, notify, notifyAll, wait, wait, wait
findInitializationErrors, getAllFields, getField, getInitializationErrorString, getOneofFieldDescriptor, getRepeatedField, getRepeatedFieldCount, getUnknownFields, hasField, hasOneof
mergeDelimitedFrom, mergeDelimitedFrom
mergeFrom
public static final Descriptors.Descriptor getDescriptor()
protected GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable()
internalGetFieldAccessorTable
in class GeneratedMessageV3.Builder<PySparkBatch.Builder>
public PySparkBatch.Builder clear()
clear
in interface Message.Builder
clear
in interface MessageLite.Builder
clear
in class GeneratedMessageV3.Builder<PySparkBatch.Builder>
public Descriptors.Descriptor getDescriptorForType()
getDescriptorForType
in interface Message.Builder
getDescriptorForType
in interface MessageOrBuilder
getDescriptorForType
in class GeneratedMessageV3.Builder<PySparkBatch.Builder>
public PySparkBatch getDefaultInstanceForType()
getDefaultInstanceForType
in interface MessageLiteOrBuilder
getDefaultInstanceForType
in interface MessageOrBuilder
public PySparkBatch build()
build
in interface Message.Builder
build
in interface MessageLite.Builder
public PySparkBatch buildPartial()
buildPartial
in interface Message.Builder
buildPartial
in interface MessageLite.Builder
public PySparkBatch.Builder clone()
clone
in interface Message.Builder
clone
in interface MessageLite.Builder
clone
in class GeneratedMessageV3.Builder<PySparkBatch.Builder>
public PySparkBatch.Builder setField(Descriptors.FieldDescriptor field, Object value)
setField
in interface Message.Builder
setField
in class GeneratedMessageV3.Builder<PySparkBatch.Builder>
public PySparkBatch.Builder clearField(Descriptors.FieldDescriptor field)
clearField
in interface Message.Builder
clearField
in class GeneratedMessageV3.Builder<PySparkBatch.Builder>
public PySparkBatch.Builder clearOneof(Descriptors.OneofDescriptor oneof)
clearOneof
in interface Message.Builder
clearOneof
in class GeneratedMessageV3.Builder<PySparkBatch.Builder>
public PySparkBatch.Builder setRepeatedField(Descriptors.FieldDescriptor field, int index, Object value)
setRepeatedField
in interface Message.Builder
setRepeatedField
in class GeneratedMessageV3.Builder<PySparkBatch.Builder>
public PySparkBatch.Builder addRepeatedField(Descriptors.FieldDescriptor field, Object value)
addRepeatedField
in interface Message.Builder
addRepeatedField
in class GeneratedMessageV3.Builder<PySparkBatch.Builder>
public PySparkBatch.Builder mergeFrom(Message other)
mergeFrom
in interface Message.Builder
mergeFrom
in class AbstractMessage.Builder<PySparkBatch.Builder>
public PySparkBatch.Builder mergeFrom(PySparkBatch other)
public final boolean isInitialized()
isInitialized
in interface MessageLiteOrBuilder
isInitialized
in class GeneratedMessageV3.Builder<PySparkBatch.Builder>
public PySparkBatch.Builder mergeFrom(CodedInputStream input, ExtensionRegistryLite extensionRegistry) throws IOException
mergeFrom
in interface Message.Builder
mergeFrom
in interface MessageLite.Builder
mergeFrom
in class AbstractMessage.Builder<PySparkBatch.Builder>
IOException
public String getMainPythonFileUri()
Required. The HCFS URI of the main Python file to use as the Spark driver. Must be a .py file.
string main_python_file_uri = 1 [(.google.api.field_behavior) = REQUIRED];
getMainPythonFileUri
in interface PySparkBatchOrBuilder
public ByteString getMainPythonFileUriBytes()
Required. The HCFS URI of the main Python file to use as the Spark driver. Must be a .py file.
string main_python_file_uri = 1 [(.google.api.field_behavior) = REQUIRED];
getMainPythonFileUriBytes
in interface PySparkBatchOrBuilder
public PySparkBatch.Builder setMainPythonFileUri(String value)
Required. The HCFS URI of the main Python file to use as the Spark driver. Must be a .py file.
string main_python_file_uri = 1 [(.google.api.field_behavior) = REQUIRED];
value
- The mainPythonFileUri to set.public PySparkBatch.Builder clearMainPythonFileUri()
Required. The HCFS URI of the main Python file to use as the Spark driver. Must be a .py file.
string main_python_file_uri = 1 [(.google.api.field_behavior) = REQUIRED];
public PySparkBatch.Builder setMainPythonFileUriBytes(ByteString value)
Required. The HCFS URI of the main Python file to use as the Spark driver. Must be a .py file.
string main_python_file_uri = 1 [(.google.api.field_behavior) = REQUIRED];
value
- The bytes for mainPythonFileUri to set.public ProtocolStringList getArgsList()
Optional. The arguments to pass to the driver. Do not include arguments that can be set as batch properties, such as `--conf`, since a collision can occur that causes an incorrect batch submission.
repeated string args = 2 [(.google.api.field_behavior) = OPTIONAL];
getArgsList
in interface PySparkBatchOrBuilder
public int getArgsCount()
Optional. The arguments to pass to the driver. Do not include arguments that can be set as batch properties, such as `--conf`, since a collision can occur that causes an incorrect batch submission.
repeated string args = 2 [(.google.api.field_behavior) = OPTIONAL];
getArgsCount
in interface PySparkBatchOrBuilder
public String getArgs(int index)
Optional. The arguments to pass to the driver. Do not include arguments that can be set as batch properties, such as `--conf`, since a collision can occur that causes an incorrect batch submission.
repeated string args = 2 [(.google.api.field_behavior) = OPTIONAL];
getArgs
in interface PySparkBatchOrBuilder
index
- The index of the element to return.public ByteString getArgsBytes(int index)
Optional. The arguments to pass to the driver. Do not include arguments that can be set as batch properties, such as `--conf`, since a collision can occur that causes an incorrect batch submission.
repeated string args = 2 [(.google.api.field_behavior) = OPTIONAL];
getArgsBytes
in interface PySparkBatchOrBuilder
index
- The index of the value to return.public PySparkBatch.Builder setArgs(int index, String value)
Optional. The arguments to pass to the driver. Do not include arguments that can be set as batch properties, such as `--conf`, since a collision can occur that causes an incorrect batch submission.
repeated string args = 2 [(.google.api.field_behavior) = OPTIONAL];
index
- The index to set the value at.value
- The args to set.public PySparkBatch.Builder addArgs(String value)
Optional. The arguments to pass to the driver. Do not include arguments that can be set as batch properties, such as `--conf`, since a collision can occur that causes an incorrect batch submission.
repeated string args = 2 [(.google.api.field_behavior) = OPTIONAL];
value
- The args to add.public PySparkBatch.Builder addAllArgs(Iterable<String> values)
Optional. The arguments to pass to the driver. Do not include arguments that can be set as batch properties, such as `--conf`, since a collision can occur that causes an incorrect batch submission.
repeated string args = 2 [(.google.api.field_behavior) = OPTIONAL];
values
- The args to add.public PySparkBatch.Builder clearArgs()
Optional. The arguments to pass to the driver. Do not include arguments that can be set as batch properties, such as `--conf`, since a collision can occur that causes an incorrect batch submission.
repeated string args = 2 [(.google.api.field_behavior) = OPTIONAL];
public PySparkBatch.Builder addArgsBytes(ByteString value)
Optional. The arguments to pass to the driver. Do not include arguments that can be set as batch properties, such as `--conf`, since a collision can occur that causes an incorrect batch submission.
repeated string args = 2 [(.google.api.field_behavior) = OPTIONAL];
value
- The bytes of the args to add.public ProtocolStringList getPythonFileUrisList()
Optional. HCFS file URIs of Python files to pass to the PySpark framework. Supported file types: `.py`, `.egg`, and `.zip`.
repeated string python_file_uris = 3 [(.google.api.field_behavior) = OPTIONAL];
getPythonFileUrisList
in interface PySparkBatchOrBuilder
public int getPythonFileUrisCount()
Optional. HCFS file URIs of Python files to pass to the PySpark framework. Supported file types: `.py`, `.egg`, and `.zip`.
repeated string python_file_uris = 3 [(.google.api.field_behavior) = OPTIONAL];
getPythonFileUrisCount
in interface PySparkBatchOrBuilder
public String getPythonFileUris(int index)
Optional. HCFS file URIs of Python files to pass to the PySpark framework. Supported file types: `.py`, `.egg`, and `.zip`.
repeated string python_file_uris = 3 [(.google.api.field_behavior) = OPTIONAL];
getPythonFileUris
in interface PySparkBatchOrBuilder
index
- The index of the element to return.public ByteString getPythonFileUrisBytes(int index)
Optional. HCFS file URIs of Python files to pass to the PySpark framework. Supported file types: `.py`, `.egg`, and `.zip`.
repeated string python_file_uris = 3 [(.google.api.field_behavior) = OPTIONAL];
getPythonFileUrisBytes
in interface PySparkBatchOrBuilder
index
- The index of the value to return.public PySparkBatch.Builder setPythonFileUris(int index, String value)
Optional. HCFS file URIs of Python files to pass to the PySpark framework. Supported file types: `.py`, `.egg`, and `.zip`.
repeated string python_file_uris = 3 [(.google.api.field_behavior) = OPTIONAL];
index
- The index to set the value at.value
- The pythonFileUris to set.public PySparkBatch.Builder addPythonFileUris(String value)
Optional. HCFS file URIs of Python files to pass to the PySpark framework. Supported file types: `.py`, `.egg`, and `.zip`.
repeated string python_file_uris = 3 [(.google.api.field_behavior) = OPTIONAL];
value
- The pythonFileUris to add.public PySparkBatch.Builder addAllPythonFileUris(Iterable<String> values)
Optional. HCFS file URIs of Python files to pass to the PySpark framework. Supported file types: `.py`, `.egg`, and `.zip`.
repeated string python_file_uris = 3 [(.google.api.field_behavior) = OPTIONAL];
values
- The pythonFileUris to add.public PySparkBatch.Builder clearPythonFileUris()
Optional. HCFS file URIs of Python files to pass to the PySpark framework. Supported file types: `.py`, `.egg`, and `.zip`.
repeated string python_file_uris = 3 [(.google.api.field_behavior) = OPTIONAL];
public PySparkBatch.Builder addPythonFileUrisBytes(ByteString value)
Optional. HCFS file URIs of Python files to pass to the PySpark framework. Supported file types: `.py`, `.egg`, and `.zip`.
repeated string python_file_uris = 3 [(.google.api.field_behavior) = OPTIONAL];
value
- The bytes of the pythonFileUris to add.public ProtocolStringList getJarFileUrisList()
Optional. HCFS URIs of jar files to add to the classpath of the Spark driver and tasks.
repeated string jar_file_uris = 4 [(.google.api.field_behavior) = OPTIONAL];
getJarFileUrisList
in interface PySparkBatchOrBuilder
public int getJarFileUrisCount()
Optional. HCFS URIs of jar files to add to the classpath of the Spark driver and tasks.
repeated string jar_file_uris = 4 [(.google.api.field_behavior) = OPTIONAL];
getJarFileUrisCount
in interface PySparkBatchOrBuilder
public String getJarFileUris(int index)
Optional. HCFS URIs of jar files to add to the classpath of the Spark driver and tasks.
repeated string jar_file_uris = 4 [(.google.api.field_behavior) = OPTIONAL];
getJarFileUris
in interface PySparkBatchOrBuilder
index
- The index of the element to return.public ByteString getJarFileUrisBytes(int index)
Optional. HCFS URIs of jar files to add to the classpath of the Spark driver and tasks.
repeated string jar_file_uris = 4 [(.google.api.field_behavior) = OPTIONAL];
getJarFileUrisBytes
in interface PySparkBatchOrBuilder
index
- The index of the value to return.public PySparkBatch.Builder setJarFileUris(int index, String value)
Optional. HCFS URIs of jar files to add to the classpath of the Spark driver and tasks.
repeated string jar_file_uris = 4 [(.google.api.field_behavior) = OPTIONAL];
index
- The index to set the value at.value
- The jarFileUris to set.public PySparkBatch.Builder addJarFileUris(String value)
Optional. HCFS URIs of jar files to add to the classpath of the Spark driver and tasks.
repeated string jar_file_uris = 4 [(.google.api.field_behavior) = OPTIONAL];
value
- The jarFileUris to add.public PySparkBatch.Builder addAllJarFileUris(Iterable<String> values)
Optional. HCFS URIs of jar files to add to the classpath of the Spark driver and tasks.
repeated string jar_file_uris = 4 [(.google.api.field_behavior) = OPTIONAL];
values
- The jarFileUris to add.public PySparkBatch.Builder clearJarFileUris()
Optional. HCFS URIs of jar files to add to the classpath of the Spark driver and tasks.
repeated string jar_file_uris = 4 [(.google.api.field_behavior) = OPTIONAL];
public PySparkBatch.Builder addJarFileUrisBytes(ByteString value)
Optional. HCFS URIs of jar files to add to the classpath of the Spark driver and tasks.
repeated string jar_file_uris = 4 [(.google.api.field_behavior) = OPTIONAL];
value
- The bytes of the jarFileUris to add.public ProtocolStringList getFileUrisList()
Optional. HCFS URIs of files to be placed in the working directory of each executor.
repeated string file_uris = 5 [(.google.api.field_behavior) = OPTIONAL];
getFileUrisList
in interface PySparkBatchOrBuilder
public int getFileUrisCount()
Optional. HCFS URIs of files to be placed in the working directory of each executor.
repeated string file_uris = 5 [(.google.api.field_behavior) = OPTIONAL];
getFileUrisCount
in interface PySparkBatchOrBuilder
public String getFileUris(int index)
Optional. HCFS URIs of files to be placed in the working directory of each executor.
repeated string file_uris = 5 [(.google.api.field_behavior) = OPTIONAL];
getFileUris
in interface PySparkBatchOrBuilder
index
- The index of the element to return.public ByteString getFileUrisBytes(int index)
Optional. HCFS URIs of files to be placed in the working directory of each executor.
repeated string file_uris = 5 [(.google.api.field_behavior) = OPTIONAL];
getFileUrisBytes
in interface PySparkBatchOrBuilder
index
- The index of the value to return.public PySparkBatch.Builder setFileUris(int index, String value)
Optional. HCFS URIs of files to be placed in the working directory of each executor.
repeated string file_uris = 5 [(.google.api.field_behavior) = OPTIONAL];
index
- The index to set the value at.value
- The fileUris to set.public PySparkBatch.Builder addFileUris(String value)
Optional. HCFS URIs of files to be placed in the working directory of each executor.
repeated string file_uris = 5 [(.google.api.field_behavior) = OPTIONAL];
value
- The fileUris to add.public PySparkBatch.Builder addAllFileUris(Iterable<String> values)
Optional. HCFS URIs of files to be placed in the working directory of each executor.
repeated string file_uris = 5 [(.google.api.field_behavior) = OPTIONAL];
values
- The fileUris to add.public PySparkBatch.Builder clearFileUris()
Optional. HCFS URIs of files to be placed in the working directory of each executor.
repeated string file_uris = 5 [(.google.api.field_behavior) = OPTIONAL];
public PySparkBatch.Builder addFileUrisBytes(ByteString value)
Optional. HCFS URIs of files to be placed in the working directory of each executor.
repeated string file_uris = 5 [(.google.api.field_behavior) = OPTIONAL];
value
- The bytes of the fileUris to add.public ProtocolStringList getArchiveUrisList()
Optional. HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: `.jar`, `.tar`, `.tar.gz`, `.tgz`, and `.zip`.
repeated string archive_uris = 6 [(.google.api.field_behavior) = OPTIONAL];
getArchiveUrisList
in interface PySparkBatchOrBuilder
public int getArchiveUrisCount()
Optional. HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: `.jar`, `.tar`, `.tar.gz`, `.tgz`, and `.zip`.
repeated string archive_uris = 6 [(.google.api.field_behavior) = OPTIONAL];
getArchiveUrisCount
in interface PySparkBatchOrBuilder
public String getArchiveUris(int index)
Optional. HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: `.jar`, `.tar`, `.tar.gz`, `.tgz`, and `.zip`.
repeated string archive_uris = 6 [(.google.api.field_behavior) = OPTIONAL];
getArchiveUris
in interface PySparkBatchOrBuilder
index
- The index of the element to return.public ByteString getArchiveUrisBytes(int index)
Optional. HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: `.jar`, `.tar`, `.tar.gz`, `.tgz`, and `.zip`.
repeated string archive_uris = 6 [(.google.api.field_behavior) = OPTIONAL];
getArchiveUrisBytes
in interface PySparkBatchOrBuilder
index
- The index of the value to return.public PySparkBatch.Builder setArchiveUris(int index, String value)
Optional. HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: `.jar`, `.tar`, `.tar.gz`, `.tgz`, and `.zip`.
repeated string archive_uris = 6 [(.google.api.field_behavior) = OPTIONAL];
index
- The index to set the value at.value
- The archiveUris to set.public PySparkBatch.Builder addArchiveUris(String value)
Optional. HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: `.jar`, `.tar`, `.tar.gz`, `.tgz`, and `.zip`.
repeated string archive_uris = 6 [(.google.api.field_behavior) = OPTIONAL];
value
- The archiveUris to add.public PySparkBatch.Builder addAllArchiveUris(Iterable<String> values)
Optional. HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: `.jar`, `.tar`, `.tar.gz`, `.tgz`, and `.zip`.
repeated string archive_uris = 6 [(.google.api.field_behavior) = OPTIONAL];
values
- The archiveUris to add.public PySparkBatch.Builder clearArchiveUris()
Optional. HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: `.jar`, `.tar`, `.tar.gz`, `.tgz`, and `.zip`.
repeated string archive_uris = 6 [(.google.api.field_behavior) = OPTIONAL];
public PySparkBatch.Builder addArchiveUrisBytes(ByteString value)
Optional. HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: `.jar`, `.tar`, `.tar.gz`, `.tgz`, and `.zip`.
repeated string archive_uris = 6 [(.google.api.field_behavior) = OPTIONAL];
value
- The bytes of the archiveUris to add.public final PySparkBatch.Builder setUnknownFields(UnknownFieldSet unknownFields)
setUnknownFields
in interface Message.Builder
setUnknownFields
in class GeneratedMessageV3.Builder<PySparkBatch.Builder>
public final PySparkBatch.Builder mergeUnknownFields(UnknownFieldSet unknownFields)
mergeUnknownFields
in interface Message.Builder
mergeUnknownFields
in class GeneratedMessageV3.Builder<PySparkBatch.Builder>
Copyright © 2022 Google LLC. All rights reserved.