Package com.google.cloud.dataproc.v1
Class PySparkJob.Builder
- java.lang.Object
-
- com.google.protobuf.AbstractMessageLite.Builder
-
- com.google.protobuf.AbstractMessage.Builder<BuilderT>
-
- com.google.protobuf.GeneratedMessageV3.Builder<PySparkJob.Builder>
-
- com.google.cloud.dataproc.v1.PySparkJob.Builder
-
- All Implemented Interfaces:
PySparkJobOrBuilder
,com.google.protobuf.Message.Builder
,com.google.protobuf.MessageLite.Builder
,com.google.protobuf.MessageLiteOrBuilder
,com.google.protobuf.MessageOrBuilder
,Cloneable
- Enclosing class:
- PySparkJob
public static final class PySparkJob.Builder extends com.google.protobuf.GeneratedMessageV3.Builder<PySparkJob.Builder> implements PySparkJobOrBuilder
A Dataproc job for running [Apache PySpark](https://spark.apache.org/docs/0.9.0/python-programming-guide.html) applications on YARN.
Protobuf typegoogle.cloud.dataproc.v1.PySparkJob
-
-
Method Summary
All Methods Static Methods Instance Methods Concrete Methods Deprecated Methods Modifier and Type Method Description PySparkJob.Builder
addAllArchiveUris(Iterable<String> values)
Optional.PySparkJob.Builder
addAllArgs(Iterable<String> values)
Optional.PySparkJob.Builder
addAllFileUris(Iterable<String> values)
Optional.PySparkJob.Builder
addAllJarFileUris(Iterable<String> values)
Optional.PySparkJob.Builder
addAllPythonFileUris(Iterable<String> values)
Optional.PySparkJob.Builder
addArchiveUris(String value)
Optional.PySparkJob.Builder
addArchiveUrisBytes(com.google.protobuf.ByteString value)
Optional.PySparkJob.Builder
addArgs(String value)
Optional.PySparkJob.Builder
addArgsBytes(com.google.protobuf.ByteString value)
Optional.PySparkJob.Builder
addFileUris(String value)
Optional.PySparkJob.Builder
addFileUrisBytes(com.google.protobuf.ByteString value)
Optional.PySparkJob.Builder
addJarFileUris(String value)
Optional.PySparkJob.Builder
addJarFileUrisBytes(com.google.protobuf.ByteString value)
Optional.PySparkJob.Builder
addPythonFileUris(String value)
Optional.PySparkJob.Builder
addPythonFileUrisBytes(com.google.protobuf.ByteString value)
Optional.PySparkJob.Builder
addRepeatedField(com.google.protobuf.Descriptors.FieldDescriptor field, Object value)
PySparkJob
build()
PySparkJob
buildPartial()
PySparkJob.Builder
clear()
PySparkJob.Builder
clearArchiveUris()
Optional.PySparkJob.Builder
clearArgs()
Optional.PySparkJob.Builder
clearField(com.google.protobuf.Descriptors.FieldDescriptor field)
PySparkJob.Builder
clearFileUris()
Optional.PySparkJob.Builder
clearJarFileUris()
Optional.PySparkJob.Builder
clearLoggingConfig()
Optional.PySparkJob.Builder
clearMainPythonFileUri()
Required.PySparkJob.Builder
clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof)
PySparkJob.Builder
clearProperties()
PySparkJob.Builder
clearPythonFileUris()
Optional.PySparkJob.Builder
clone()
boolean
containsProperties(String key)
Optional.String
getArchiveUris(int index)
Optional.com.google.protobuf.ByteString
getArchiveUrisBytes(int index)
Optional.int
getArchiveUrisCount()
Optional.com.google.protobuf.ProtocolStringList
getArchiveUrisList()
Optional.String
getArgs(int index)
Optional.com.google.protobuf.ByteString
getArgsBytes(int index)
Optional.int
getArgsCount()
Optional.com.google.protobuf.ProtocolStringList
getArgsList()
Optional.PySparkJob
getDefaultInstanceForType()
static com.google.protobuf.Descriptors.Descriptor
getDescriptor()
com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()
String
getFileUris(int index)
Optional.com.google.protobuf.ByteString
getFileUrisBytes(int index)
Optional.int
getFileUrisCount()
Optional.com.google.protobuf.ProtocolStringList
getFileUrisList()
Optional.String
getJarFileUris(int index)
Optional.com.google.protobuf.ByteString
getJarFileUrisBytes(int index)
Optional.int
getJarFileUrisCount()
Optional.com.google.protobuf.ProtocolStringList
getJarFileUrisList()
Optional.LoggingConfig
getLoggingConfig()
Optional.LoggingConfig.Builder
getLoggingConfigBuilder()
Optional.LoggingConfigOrBuilder
getLoggingConfigOrBuilder()
Optional.String
getMainPythonFileUri()
Required.com.google.protobuf.ByteString
getMainPythonFileUriBytes()
Required.Map<String,String>
getMutableProperties()
Deprecated.Map<String,String>
getProperties()
Deprecated.int
getPropertiesCount()
Optional.Map<String,String>
getPropertiesMap()
Optional.String
getPropertiesOrDefault(String key, String defaultValue)
Optional.String
getPropertiesOrThrow(String key)
Optional.String
getPythonFileUris(int index)
Optional.com.google.protobuf.ByteString
getPythonFileUrisBytes(int index)
Optional.int
getPythonFileUrisCount()
Optional.com.google.protobuf.ProtocolStringList
getPythonFileUrisList()
Optional.boolean
hasLoggingConfig()
Optional.protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable()
protected com.google.protobuf.MapField
internalGetMapField(int number)
protected com.google.protobuf.MapField
internalGetMutableMapField(int number)
boolean
isInitialized()
PySparkJob.Builder
mergeFrom(PySparkJob other)
PySparkJob.Builder
mergeFrom(com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
PySparkJob.Builder
mergeFrom(com.google.protobuf.Message other)
PySparkJob.Builder
mergeLoggingConfig(LoggingConfig value)
Optional.PySparkJob.Builder
mergeUnknownFields(com.google.protobuf.UnknownFieldSet unknownFields)
PySparkJob.Builder
putAllProperties(Map<String,String> values)
Optional.PySparkJob.Builder
putProperties(String key, String value)
Optional.PySparkJob.Builder
removeProperties(String key)
Optional.PySparkJob.Builder
setArchiveUris(int index, String value)
Optional.PySparkJob.Builder
setArgs(int index, String value)
Optional.PySparkJob.Builder
setField(com.google.protobuf.Descriptors.FieldDescriptor field, Object value)
PySparkJob.Builder
setFileUris(int index, String value)
Optional.PySparkJob.Builder
setJarFileUris(int index, String value)
Optional.PySparkJob.Builder
setLoggingConfig(LoggingConfig value)
Optional.PySparkJob.Builder
setLoggingConfig(LoggingConfig.Builder builderForValue)
Optional.PySparkJob.Builder
setMainPythonFileUri(String value)
Required.PySparkJob.Builder
setMainPythonFileUriBytes(com.google.protobuf.ByteString value)
Required.PySparkJob.Builder
setPythonFileUris(int index, String value)
Optional.PySparkJob.Builder
setRepeatedField(com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value)
PySparkJob.Builder
setUnknownFields(com.google.protobuf.UnknownFieldSet unknownFields)
-
Methods inherited from class com.google.protobuf.GeneratedMessageV3.Builder
getAllFields, getField, getFieldBuilder, getOneofFieldDescriptor, getParentForChildren, getRepeatedField, getRepeatedFieldBuilder, getRepeatedFieldCount, getUnknownFields, getUnknownFieldSetBuilder, hasField, hasOneof, isClean, markClean, mergeUnknownLengthDelimitedField, mergeUnknownVarintField, newBuilderForField, onBuilt, onChanged, parseUnknownField, setUnknownFieldSetBuilder, setUnknownFieldsProto3
-
Methods inherited from class com.google.protobuf.AbstractMessage.Builder
findInitializationErrors, getInitializationErrorString, internalMergeFrom, mergeFrom, mergeFrom, mergeFrom, mergeFrom, mergeFrom, mergeFrom, mergeFrom, mergeFrom, mergeFrom, newUninitializedMessageException, toString
-
Methods inherited from class com.google.protobuf.AbstractMessageLite.Builder
addAll, addAll, mergeDelimitedFrom, mergeDelimitedFrom, mergeFrom, newUninitializedMessageException
-
Methods inherited from class java.lang.Object
equals, finalize, getClass, hashCode, notify, notifyAll, wait, wait, wait
-
-
-
-
Method Detail
-
getDescriptor
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()
-
internalGetMapField
protected com.google.protobuf.MapField internalGetMapField(int number)
- Overrides:
internalGetMapField
in classcom.google.protobuf.GeneratedMessageV3.Builder<PySparkJob.Builder>
-
internalGetMutableMapField
protected com.google.protobuf.MapField internalGetMutableMapField(int number)
- Overrides:
internalGetMutableMapField
in classcom.google.protobuf.GeneratedMessageV3.Builder<PySparkJob.Builder>
-
internalGetFieldAccessorTable
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable()
- Specified by:
internalGetFieldAccessorTable
in classcom.google.protobuf.GeneratedMessageV3.Builder<PySparkJob.Builder>
-
clear
public PySparkJob.Builder clear()
- Specified by:
clear
in interfacecom.google.protobuf.Message.Builder
- Specified by:
clear
in interfacecom.google.protobuf.MessageLite.Builder
- Overrides:
clear
in classcom.google.protobuf.GeneratedMessageV3.Builder<PySparkJob.Builder>
-
getDescriptorForType
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType()
- Specified by:
getDescriptorForType
in interfacecom.google.protobuf.Message.Builder
- Specified by:
getDescriptorForType
in interfacecom.google.protobuf.MessageOrBuilder
- Overrides:
getDescriptorForType
in classcom.google.protobuf.GeneratedMessageV3.Builder<PySparkJob.Builder>
-
getDefaultInstanceForType
public PySparkJob getDefaultInstanceForType()
- Specified by:
getDefaultInstanceForType
in interfacecom.google.protobuf.MessageLiteOrBuilder
- Specified by:
getDefaultInstanceForType
in interfacecom.google.protobuf.MessageOrBuilder
-
build
public PySparkJob build()
- Specified by:
build
in interfacecom.google.protobuf.Message.Builder
- Specified by:
build
in interfacecom.google.protobuf.MessageLite.Builder
-
buildPartial
public PySparkJob buildPartial()
- Specified by:
buildPartial
in interfacecom.google.protobuf.Message.Builder
- Specified by:
buildPartial
in interfacecom.google.protobuf.MessageLite.Builder
-
clone
public PySparkJob.Builder clone()
- Specified by:
clone
in interfacecom.google.protobuf.Message.Builder
- Specified by:
clone
in interfacecom.google.protobuf.MessageLite.Builder
- Overrides:
clone
in classcom.google.protobuf.GeneratedMessageV3.Builder<PySparkJob.Builder>
-
setField
public PySparkJob.Builder setField(com.google.protobuf.Descriptors.FieldDescriptor field, Object value)
- Specified by:
setField
in interfacecom.google.protobuf.Message.Builder
- Overrides:
setField
in classcom.google.protobuf.GeneratedMessageV3.Builder<PySparkJob.Builder>
-
clearField
public PySparkJob.Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field)
- Specified by:
clearField
in interfacecom.google.protobuf.Message.Builder
- Overrides:
clearField
in classcom.google.protobuf.GeneratedMessageV3.Builder<PySparkJob.Builder>
-
clearOneof
public PySparkJob.Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof)
- Specified by:
clearOneof
in interfacecom.google.protobuf.Message.Builder
- Overrides:
clearOneof
in classcom.google.protobuf.GeneratedMessageV3.Builder<PySparkJob.Builder>
-
setRepeatedField
public PySparkJob.Builder setRepeatedField(com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value)
- Specified by:
setRepeatedField
in interfacecom.google.protobuf.Message.Builder
- Overrides:
setRepeatedField
in classcom.google.protobuf.GeneratedMessageV3.Builder<PySparkJob.Builder>
-
addRepeatedField
public PySparkJob.Builder addRepeatedField(com.google.protobuf.Descriptors.FieldDescriptor field, Object value)
- Specified by:
addRepeatedField
in interfacecom.google.protobuf.Message.Builder
- Overrides:
addRepeatedField
in classcom.google.protobuf.GeneratedMessageV3.Builder<PySparkJob.Builder>
-
mergeFrom
public PySparkJob.Builder mergeFrom(com.google.protobuf.Message other)
- Specified by:
mergeFrom
in interfacecom.google.protobuf.Message.Builder
- Overrides:
mergeFrom
in classcom.google.protobuf.AbstractMessage.Builder<PySparkJob.Builder>
-
mergeFrom
public PySparkJob.Builder mergeFrom(PySparkJob other)
-
isInitialized
public final boolean isInitialized()
- Specified by:
isInitialized
in interfacecom.google.protobuf.MessageLiteOrBuilder
- Overrides:
isInitialized
in classcom.google.protobuf.GeneratedMessageV3.Builder<PySparkJob.Builder>
-
mergeFrom
public PySparkJob.Builder mergeFrom(com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws IOException
- Specified by:
mergeFrom
in interfacecom.google.protobuf.Message.Builder
- Specified by:
mergeFrom
in interfacecom.google.protobuf.MessageLite.Builder
- Overrides:
mergeFrom
in classcom.google.protobuf.AbstractMessage.Builder<PySparkJob.Builder>
- Throws:
IOException
-
getMainPythonFileUri
public String getMainPythonFileUri()
Required. The HCFS URI of the main Python file to use as the driver. Must be a .py file.
string main_python_file_uri = 1 [(.google.api.field_behavior) = REQUIRED];
- Specified by:
getMainPythonFileUri
in interfacePySparkJobOrBuilder
- Returns:
- The mainPythonFileUri.
-
getMainPythonFileUriBytes
public com.google.protobuf.ByteString getMainPythonFileUriBytes()
Required. The HCFS URI of the main Python file to use as the driver. Must be a .py file.
string main_python_file_uri = 1 [(.google.api.field_behavior) = REQUIRED];
- Specified by:
getMainPythonFileUriBytes
in interfacePySparkJobOrBuilder
- Returns:
- The bytes for mainPythonFileUri.
-
setMainPythonFileUri
public PySparkJob.Builder setMainPythonFileUri(String value)
Required. The HCFS URI of the main Python file to use as the driver. Must be a .py file.
string main_python_file_uri = 1 [(.google.api.field_behavior) = REQUIRED];
- Parameters:
value
- The mainPythonFileUri to set.- Returns:
- This builder for chaining.
-
clearMainPythonFileUri
public PySparkJob.Builder clearMainPythonFileUri()
Required. The HCFS URI of the main Python file to use as the driver. Must be a .py file.
string main_python_file_uri = 1 [(.google.api.field_behavior) = REQUIRED];
- Returns:
- This builder for chaining.
-
setMainPythonFileUriBytes
public PySparkJob.Builder setMainPythonFileUriBytes(com.google.protobuf.ByteString value)
Required. The HCFS URI of the main Python file to use as the driver. Must be a .py file.
string main_python_file_uri = 1 [(.google.api.field_behavior) = REQUIRED];
- Parameters:
value
- The bytes for mainPythonFileUri to set.- Returns:
- This builder for chaining.
-
getArgsList
public com.google.protobuf.ProtocolStringList getArgsList()
Optional. The arguments to pass to the driver. Do not include arguments, such as `--conf`, that can be set as job properties, since a collision may occur that causes an incorrect job submission.
repeated string args = 2 [(.google.api.field_behavior) = OPTIONAL];
- Specified by:
getArgsList
in interfacePySparkJobOrBuilder
- Returns:
- A list containing the args.
-
getArgsCount
public int getArgsCount()
Optional. The arguments to pass to the driver. Do not include arguments, such as `--conf`, that can be set as job properties, since a collision may occur that causes an incorrect job submission.
repeated string args = 2 [(.google.api.field_behavior) = OPTIONAL];
- Specified by:
getArgsCount
in interfacePySparkJobOrBuilder
- Returns:
- The count of args.
-
getArgs
public String getArgs(int index)
Optional. The arguments to pass to the driver. Do not include arguments, such as `--conf`, that can be set as job properties, since a collision may occur that causes an incorrect job submission.
repeated string args = 2 [(.google.api.field_behavior) = OPTIONAL];
- Specified by:
getArgs
in interfacePySparkJobOrBuilder
- Parameters:
index
- The index of the element to return.- Returns:
- The args at the given index.
-
getArgsBytes
public com.google.protobuf.ByteString getArgsBytes(int index)
Optional. The arguments to pass to the driver. Do not include arguments, such as `--conf`, that can be set as job properties, since a collision may occur that causes an incorrect job submission.
repeated string args = 2 [(.google.api.field_behavior) = OPTIONAL];
- Specified by:
getArgsBytes
in interfacePySparkJobOrBuilder
- Parameters:
index
- The index of the value to return.- Returns:
- The bytes of the args at the given index.
-
setArgs
public PySparkJob.Builder setArgs(int index, String value)
Optional. The arguments to pass to the driver. Do not include arguments, such as `--conf`, that can be set as job properties, since a collision may occur that causes an incorrect job submission.
repeated string args = 2 [(.google.api.field_behavior) = OPTIONAL];
- Parameters:
index
- The index to set the value at.value
- The args to set.- Returns:
- This builder for chaining.
-
addArgs
public PySparkJob.Builder addArgs(String value)
Optional. The arguments to pass to the driver. Do not include arguments, such as `--conf`, that can be set as job properties, since a collision may occur that causes an incorrect job submission.
repeated string args = 2 [(.google.api.field_behavior) = OPTIONAL];
- Parameters:
value
- The args to add.- Returns:
- This builder for chaining.
-
addAllArgs
public PySparkJob.Builder addAllArgs(Iterable<String> values)
Optional. The arguments to pass to the driver. Do not include arguments, such as `--conf`, that can be set as job properties, since a collision may occur that causes an incorrect job submission.
repeated string args = 2 [(.google.api.field_behavior) = OPTIONAL];
- Parameters:
values
- The args to add.- Returns:
- This builder for chaining.
-
clearArgs
public PySparkJob.Builder clearArgs()
Optional. The arguments to pass to the driver. Do not include arguments, such as `--conf`, that can be set as job properties, since a collision may occur that causes an incorrect job submission.
repeated string args = 2 [(.google.api.field_behavior) = OPTIONAL];
- Returns:
- This builder for chaining.
-
addArgsBytes
public PySparkJob.Builder addArgsBytes(com.google.protobuf.ByteString value)
Optional. The arguments to pass to the driver. Do not include arguments, such as `--conf`, that can be set as job properties, since a collision may occur that causes an incorrect job submission.
repeated string args = 2 [(.google.api.field_behavior) = OPTIONAL];
- Parameters:
value
- The bytes of the args to add.- Returns:
- This builder for chaining.
-
getPythonFileUrisList
public com.google.protobuf.ProtocolStringList getPythonFileUrisList()
Optional. HCFS file URIs of Python files to pass to the PySpark framework. Supported file types: .py, .egg, and .zip.
repeated string python_file_uris = 3 [(.google.api.field_behavior) = OPTIONAL];
- Specified by:
getPythonFileUrisList
in interfacePySparkJobOrBuilder
- Returns:
- A list containing the pythonFileUris.
-
getPythonFileUrisCount
public int getPythonFileUrisCount()
Optional. HCFS file URIs of Python files to pass to the PySpark framework. Supported file types: .py, .egg, and .zip.
repeated string python_file_uris = 3 [(.google.api.field_behavior) = OPTIONAL];
- Specified by:
getPythonFileUrisCount
in interfacePySparkJobOrBuilder
- Returns:
- The count of pythonFileUris.
-
getPythonFileUris
public String getPythonFileUris(int index)
Optional. HCFS file URIs of Python files to pass to the PySpark framework. Supported file types: .py, .egg, and .zip.
repeated string python_file_uris = 3 [(.google.api.field_behavior) = OPTIONAL];
- Specified by:
getPythonFileUris
in interfacePySparkJobOrBuilder
- Parameters:
index
- The index of the element to return.- Returns:
- The pythonFileUris at the given index.
-
getPythonFileUrisBytes
public com.google.protobuf.ByteString getPythonFileUrisBytes(int index)
Optional. HCFS file URIs of Python files to pass to the PySpark framework. Supported file types: .py, .egg, and .zip.
repeated string python_file_uris = 3 [(.google.api.field_behavior) = OPTIONAL];
- Specified by:
getPythonFileUrisBytes
in interfacePySparkJobOrBuilder
- Parameters:
index
- The index of the value to return.- Returns:
- The bytes of the pythonFileUris at the given index.
-
setPythonFileUris
public PySparkJob.Builder setPythonFileUris(int index, String value)
Optional. HCFS file URIs of Python files to pass to the PySpark framework. Supported file types: .py, .egg, and .zip.
repeated string python_file_uris = 3 [(.google.api.field_behavior) = OPTIONAL];
- Parameters:
index
- The index to set the value at.value
- The pythonFileUris to set.- Returns:
- This builder for chaining.
-
addPythonFileUris
public PySparkJob.Builder addPythonFileUris(String value)
Optional. HCFS file URIs of Python files to pass to the PySpark framework. Supported file types: .py, .egg, and .zip.
repeated string python_file_uris = 3 [(.google.api.field_behavior) = OPTIONAL];
- Parameters:
value
- The pythonFileUris to add.- Returns:
- This builder for chaining.
-
addAllPythonFileUris
public PySparkJob.Builder addAllPythonFileUris(Iterable<String> values)
Optional. HCFS file URIs of Python files to pass to the PySpark framework. Supported file types: .py, .egg, and .zip.
repeated string python_file_uris = 3 [(.google.api.field_behavior) = OPTIONAL];
- Parameters:
values
- The pythonFileUris to add.- Returns:
- This builder for chaining.
-
clearPythonFileUris
public PySparkJob.Builder clearPythonFileUris()
Optional. HCFS file URIs of Python files to pass to the PySpark framework. Supported file types: .py, .egg, and .zip.
repeated string python_file_uris = 3 [(.google.api.field_behavior) = OPTIONAL];
- Returns:
- This builder for chaining.
-
addPythonFileUrisBytes
public PySparkJob.Builder addPythonFileUrisBytes(com.google.protobuf.ByteString value)
Optional. HCFS file URIs of Python files to pass to the PySpark framework. Supported file types: .py, .egg, and .zip.
repeated string python_file_uris = 3 [(.google.api.field_behavior) = OPTIONAL];
- Parameters:
value
- The bytes of the pythonFileUris to add.- Returns:
- This builder for chaining.
-
getJarFileUrisList
public com.google.protobuf.ProtocolStringList getJarFileUrisList()
Optional. HCFS URIs of jar files to add to the CLASSPATHs of the Python driver and tasks.
repeated string jar_file_uris = 4 [(.google.api.field_behavior) = OPTIONAL];
- Specified by:
getJarFileUrisList
in interfacePySparkJobOrBuilder
- Returns:
- A list containing the jarFileUris.
-
getJarFileUrisCount
public int getJarFileUrisCount()
Optional. HCFS URIs of jar files to add to the CLASSPATHs of the Python driver and tasks.
repeated string jar_file_uris = 4 [(.google.api.field_behavior) = OPTIONAL];
- Specified by:
getJarFileUrisCount
in interfacePySparkJobOrBuilder
- Returns:
- The count of jarFileUris.
-
getJarFileUris
public String getJarFileUris(int index)
Optional. HCFS URIs of jar files to add to the CLASSPATHs of the Python driver and tasks.
repeated string jar_file_uris = 4 [(.google.api.field_behavior) = OPTIONAL];
- Specified by:
getJarFileUris
in interfacePySparkJobOrBuilder
- Parameters:
index
- The index of the element to return.- Returns:
- The jarFileUris at the given index.
-
getJarFileUrisBytes
public com.google.protobuf.ByteString getJarFileUrisBytes(int index)
Optional. HCFS URIs of jar files to add to the CLASSPATHs of the Python driver and tasks.
repeated string jar_file_uris = 4 [(.google.api.field_behavior) = OPTIONAL];
- Specified by:
getJarFileUrisBytes
in interfacePySparkJobOrBuilder
- Parameters:
index
- The index of the value to return.- Returns:
- The bytes of the jarFileUris at the given index.
-
setJarFileUris
public PySparkJob.Builder setJarFileUris(int index, String value)
Optional. HCFS URIs of jar files to add to the CLASSPATHs of the Python driver and tasks.
repeated string jar_file_uris = 4 [(.google.api.field_behavior) = OPTIONAL];
- Parameters:
index
- The index to set the value at.value
- The jarFileUris to set.- Returns:
- This builder for chaining.
-
addJarFileUris
public PySparkJob.Builder addJarFileUris(String value)
Optional. HCFS URIs of jar files to add to the CLASSPATHs of the Python driver and tasks.
repeated string jar_file_uris = 4 [(.google.api.field_behavior) = OPTIONAL];
- Parameters:
value
- The jarFileUris to add.- Returns:
- This builder for chaining.
-
addAllJarFileUris
public PySparkJob.Builder addAllJarFileUris(Iterable<String> values)
Optional. HCFS URIs of jar files to add to the CLASSPATHs of the Python driver and tasks.
repeated string jar_file_uris = 4 [(.google.api.field_behavior) = OPTIONAL];
- Parameters:
values
- The jarFileUris to add.- Returns:
- This builder for chaining.
-
clearJarFileUris
public PySparkJob.Builder clearJarFileUris()
Optional. HCFS URIs of jar files to add to the CLASSPATHs of the Python driver and tasks.
repeated string jar_file_uris = 4 [(.google.api.field_behavior) = OPTIONAL];
- Returns:
- This builder for chaining.
-
addJarFileUrisBytes
public PySparkJob.Builder addJarFileUrisBytes(com.google.protobuf.ByteString value)
Optional. HCFS URIs of jar files to add to the CLASSPATHs of the Python driver and tasks.
repeated string jar_file_uris = 4 [(.google.api.field_behavior) = OPTIONAL];
- Parameters:
value
- The bytes of the jarFileUris to add.- Returns:
- This builder for chaining.
-
getFileUrisList
public com.google.protobuf.ProtocolStringList getFileUrisList()
Optional. HCFS URIs of files to be placed in the working directory of each executor. Useful for naively parallel tasks.
repeated string file_uris = 5 [(.google.api.field_behavior) = OPTIONAL];
- Specified by:
getFileUrisList
in interfacePySparkJobOrBuilder
- Returns:
- A list containing the fileUris.
-
getFileUrisCount
public int getFileUrisCount()
Optional. HCFS URIs of files to be placed in the working directory of each executor. Useful for naively parallel tasks.
repeated string file_uris = 5 [(.google.api.field_behavior) = OPTIONAL];
- Specified by:
getFileUrisCount
in interfacePySparkJobOrBuilder
- Returns:
- The count of fileUris.
-
getFileUris
public String getFileUris(int index)
Optional. HCFS URIs of files to be placed in the working directory of each executor. Useful for naively parallel tasks.
repeated string file_uris = 5 [(.google.api.field_behavior) = OPTIONAL];
- Specified by:
getFileUris
in interfacePySparkJobOrBuilder
- Parameters:
index
- The index of the element to return.- Returns:
- The fileUris at the given index.
-
getFileUrisBytes
public com.google.protobuf.ByteString getFileUrisBytes(int index)
Optional. HCFS URIs of files to be placed in the working directory of each executor. Useful for naively parallel tasks.
repeated string file_uris = 5 [(.google.api.field_behavior) = OPTIONAL];
- Specified by:
getFileUrisBytes
in interfacePySparkJobOrBuilder
- Parameters:
index
- The index of the value to return.- Returns:
- The bytes of the fileUris at the given index.
-
setFileUris
public PySparkJob.Builder setFileUris(int index, String value)
Optional. HCFS URIs of files to be placed in the working directory of each executor. Useful for naively parallel tasks.
repeated string file_uris = 5 [(.google.api.field_behavior) = OPTIONAL];
- Parameters:
index
- The index to set the value at.value
- The fileUris to set.- Returns:
- This builder for chaining.
-
addFileUris
public PySparkJob.Builder addFileUris(String value)
Optional. HCFS URIs of files to be placed in the working directory of each executor. Useful for naively parallel tasks.
repeated string file_uris = 5 [(.google.api.field_behavior) = OPTIONAL];
- Parameters:
value
- The fileUris to add.- Returns:
- This builder for chaining.
-
addAllFileUris
public PySparkJob.Builder addAllFileUris(Iterable<String> values)
Optional. HCFS URIs of files to be placed in the working directory of each executor. Useful for naively parallel tasks.
repeated string file_uris = 5 [(.google.api.field_behavior) = OPTIONAL];
- Parameters:
values
- The fileUris to add.- Returns:
- This builder for chaining.
-
clearFileUris
public PySparkJob.Builder clearFileUris()
Optional. HCFS URIs of files to be placed in the working directory of each executor. Useful for naively parallel tasks.
repeated string file_uris = 5 [(.google.api.field_behavior) = OPTIONAL];
- Returns:
- This builder for chaining.
-
addFileUrisBytes
public PySparkJob.Builder addFileUrisBytes(com.google.protobuf.ByteString value)
Optional. HCFS URIs of files to be placed in the working directory of each executor. Useful for naively parallel tasks.
repeated string file_uris = 5 [(.google.api.field_behavior) = OPTIONAL];
- Parameters:
value
- The bytes of the fileUris to add.- Returns:
- This builder for chaining.
-
getArchiveUrisList
public com.google.protobuf.ProtocolStringList getArchiveUrisList()
Optional. HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip.
repeated string archive_uris = 6 [(.google.api.field_behavior) = OPTIONAL];
- Specified by:
getArchiveUrisList
in interfacePySparkJobOrBuilder
- Returns:
- A list containing the archiveUris.
-
getArchiveUrisCount
public int getArchiveUrisCount()
Optional. HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip.
repeated string archive_uris = 6 [(.google.api.field_behavior) = OPTIONAL];
- Specified by:
getArchiveUrisCount
in interfacePySparkJobOrBuilder
- Returns:
- The count of archiveUris.
-
getArchiveUris
public String getArchiveUris(int index)
Optional. HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip.
repeated string archive_uris = 6 [(.google.api.field_behavior) = OPTIONAL];
- Specified by:
getArchiveUris
in interfacePySparkJobOrBuilder
- Parameters:
index
- The index of the element to return.- Returns:
- The archiveUris at the given index.
-
getArchiveUrisBytes
public com.google.protobuf.ByteString getArchiveUrisBytes(int index)
Optional. HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip.
repeated string archive_uris = 6 [(.google.api.field_behavior) = OPTIONAL];
- Specified by:
getArchiveUrisBytes
in interfacePySparkJobOrBuilder
- Parameters:
index
- The index of the value to return.- Returns:
- The bytes of the archiveUris at the given index.
-
setArchiveUris
public PySparkJob.Builder setArchiveUris(int index, String value)
Optional. HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip.
repeated string archive_uris = 6 [(.google.api.field_behavior) = OPTIONAL];
- Parameters:
index
- The index to set the value at.value
- The archiveUris to set.- Returns:
- This builder for chaining.
-
addArchiveUris
public PySparkJob.Builder addArchiveUris(String value)
Optional. HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip.
repeated string archive_uris = 6 [(.google.api.field_behavior) = OPTIONAL];
- Parameters:
value
- The archiveUris to add.- Returns:
- This builder for chaining.
-
addAllArchiveUris
public PySparkJob.Builder addAllArchiveUris(Iterable<String> values)
Optional. HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip.
repeated string archive_uris = 6 [(.google.api.field_behavior) = OPTIONAL];
- Parameters:
values
- The archiveUris to add.- Returns:
- This builder for chaining.
-
clearArchiveUris
public PySparkJob.Builder clearArchiveUris()
Optional. HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip.
repeated string archive_uris = 6 [(.google.api.field_behavior) = OPTIONAL];
- Returns:
- This builder for chaining.
-
addArchiveUrisBytes
public PySparkJob.Builder addArchiveUrisBytes(com.google.protobuf.ByteString value)
Optional. HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip.
repeated string archive_uris = 6 [(.google.api.field_behavior) = OPTIONAL];
- Parameters:
value
- The bytes of the archiveUris to add.- Returns:
- This builder for chaining.
-
getPropertiesCount
public int getPropertiesCount()
Description copied from interface:PySparkJobOrBuilder
Optional. A mapping of property names to values, used to configure PySpark. Properties that conflict with values set by the Dataproc API may be overwritten. Can include properties set in /etc/spark/conf/spark-defaults.conf and classes in user code.
map<string, string> properties = 7 [(.google.api.field_behavior) = OPTIONAL];
- Specified by:
getPropertiesCount
in interfacePySparkJobOrBuilder
-
containsProperties
public boolean containsProperties(String key)
Optional. A mapping of property names to values, used to configure PySpark. Properties that conflict with values set by the Dataproc API may be overwritten. Can include properties set in /etc/spark/conf/spark-defaults.conf and classes in user code.
map<string, string> properties = 7 [(.google.api.field_behavior) = OPTIONAL];
- Specified by:
containsProperties
in interfacePySparkJobOrBuilder
-
getProperties
@Deprecated public Map<String,String> getProperties()
Deprecated.UsegetPropertiesMap()
instead.- Specified by:
getProperties
in interfacePySparkJobOrBuilder
-
getPropertiesMap
public Map<String,String> getPropertiesMap()
Optional. A mapping of property names to values, used to configure PySpark. Properties that conflict with values set by the Dataproc API may be overwritten. Can include properties set in /etc/spark/conf/spark-defaults.conf and classes in user code.
map<string, string> properties = 7 [(.google.api.field_behavior) = OPTIONAL];
- Specified by:
getPropertiesMap
in interfacePySparkJobOrBuilder
-
getPropertiesOrDefault
public String getPropertiesOrDefault(String key, String defaultValue)
Optional. A mapping of property names to values, used to configure PySpark. Properties that conflict with values set by the Dataproc API may be overwritten. Can include properties set in /etc/spark/conf/spark-defaults.conf and classes in user code.
map<string, string> properties = 7 [(.google.api.field_behavior) = OPTIONAL];
- Specified by:
getPropertiesOrDefault
in interfacePySparkJobOrBuilder
-
getPropertiesOrThrow
public String getPropertiesOrThrow(String key)
Optional. A mapping of property names to values, used to configure PySpark. Properties that conflict with values set by the Dataproc API may be overwritten. Can include properties set in /etc/spark/conf/spark-defaults.conf and classes in user code.
map<string, string> properties = 7 [(.google.api.field_behavior) = OPTIONAL];
- Specified by:
getPropertiesOrThrow
in interfacePySparkJobOrBuilder
-
clearProperties
public PySparkJob.Builder clearProperties()
-
removeProperties
public PySparkJob.Builder removeProperties(String key)
Optional. A mapping of property names to values, used to configure PySpark. Properties that conflict with values set by the Dataproc API may be overwritten. Can include properties set in /etc/spark/conf/spark-defaults.conf and classes in user code.
map<string, string> properties = 7 [(.google.api.field_behavior) = OPTIONAL];
-
getMutableProperties
@Deprecated public Map<String,String> getMutableProperties()
Deprecated.Use alternate mutation accessors instead.
-
putProperties
public PySparkJob.Builder putProperties(String key, String value)
Optional. A mapping of property names to values, used to configure PySpark. Properties that conflict with values set by the Dataproc API may be overwritten. Can include properties set in /etc/spark/conf/spark-defaults.conf and classes in user code.
map<string, string> properties = 7 [(.google.api.field_behavior) = OPTIONAL];
-
putAllProperties
public PySparkJob.Builder putAllProperties(Map<String,String> values)
Optional. A mapping of property names to values, used to configure PySpark. Properties that conflict with values set by the Dataproc API may be overwritten. Can include properties set in /etc/spark/conf/spark-defaults.conf and classes in user code.
map<string, string> properties = 7 [(.google.api.field_behavior) = OPTIONAL];
-
hasLoggingConfig
public boolean hasLoggingConfig()
Optional. The runtime log config for job execution.
.google.cloud.dataproc.v1.LoggingConfig logging_config = 8 [(.google.api.field_behavior) = OPTIONAL];
- Specified by:
hasLoggingConfig
in interfacePySparkJobOrBuilder
- Returns:
- Whether the loggingConfig field is set.
-
getLoggingConfig
public LoggingConfig getLoggingConfig()
Optional. The runtime log config for job execution.
.google.cloud.dataproc.v1.LoggingConfig logging_config = 8 [(.google.api.field_behavior) = OPTIONAL];
- Specified by:
getLoggingConfig
in interfacePySparkJobOrBuilder
- Returns:
- The loggingConfig.
-
setLoggingConfig
public PySparkJob.Builder setLoggingConfig(LoggingConfig value)
Optional. The runtime log config for job execution.
.google.cloud.dataproc.v1.LoggingConfig logging_config = 8 [(.google.api.field_behavior) = OPTIONAL];
-
setLoggingConfig
public PySparkJob.Builder setLoggingConfig(LoggingConfig.Builder builderForValue)
Optional. The runtime log config for job execution.
.google.cloud.dataproc.v1.LoggingConfig logging_config = 8 [(.google.api.field_behavior) = OPTIONAL];
-
mergeLoggingConfig
public PySparkJob.Builder mergeLoggingConfig(LoggingConfig value)
Optional. The runtime log config for job execution.
.google.cloud.dataproc.v1.LoggingConfig logging_config = 8 [(.google.api.field_behavior) = OPTIONAL];
-
clearLoggingConfig
public PySparkJob.Builder clearLoggingConfig()
Optional. The runtime log config for job execution.
.google.cloud.dataproc.v1.LoggingConfig logging_config = 8 [(.google.api.field_behavior) = OPTIONAL];
-
getLoggingConfigBuilder
public LoggingConfig.Builder getLoggingConfigBuilder()
Optional. The runtime log config for job execution.
.google.cloud.dataproc.v1.LoggingConfig logging_config = 8 [(.google.api.field_behavior) = OPTIONAL];
-
getLoggingConfigOrBuilder
public LoggingConfigOrBuilder getLoggingConfigOrBuilder()
Optional. The runtime log config for job execution.
.google.cloud.dataproc.v1.LoggingConfig logging_config = 8 [(.google.api.field_behavior) = OPTIONAL];
- Specified by:
getLoggingConfigOrBuilder
in interfacePySparkJobOrBuilder
-
setUnknownFields
public final PySparkJob.Builder setUnknownFields(com.google.protobuf.UnknownFieldSet unknownFields)
- Specified by:
setUnknownFields
in interfacecom.google.protobuf.Message.Builder
- Overrides:
setUnknownFields
in classcom.google.protobuf.GeneratedMessageV3.Builder<PySparkJob.Builder>
-
mergeUnknownFields
public final PySparkJob.Builder mergeUnknownFields(com.google.protobuf.UnknownFieldSet unknownFields)
- Specified by:
mergeUnknownFields
in interfacecom.google.protobuf.Message.Builder
- Overrides:
mergeUnknownFields
in classcom.google.protobuf.GeneratedMessageV3.Builder<PySparkJob.Builder>
-
-