Class PySparkJob.Builder

  • All Implemented Interfaces:
    PySparkJobOrBuilder, com.google.protobuf.Message.Builder, com.google.protobuf.MessageLite.Builder, com.google.protobuf.MessageLiteOrBuilder, com.google.protobuf.MessageOrBuilder, Cloneable
    Enclosing class:
    PySparkJob

    public static final class PySparkJob.Builder
    extends com.google.protobuf.GeneratedMessageV3.Builder<PySparkJob.Builder>
    implements PySparkJobOrBuilder
     A Dataproc job for running
     [Apache
     PySpark](https://spark.apache.org/docs/0.9.0/python-programming-guide.html)
     applications on YARN.
     
    Protobuf type google.cloud.dataproc.v1.PySparkJob
    • Method Detail

      • getDescriptor

        public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()
      • internalGetMapField

        protected com.google.protobuf.MapField internalGetMapField​(int number)
        Overrides:
        internalGetMapField in class com.google.protobuf.GeneratedMessageV3.Builder<PySparkJob.Builder>
      • internalGetMutableMapField

        protected com.google.protobuf.MapField internalGetMutableMapField​(int number)
        Overrides:
        internalGetMutableMapField in class com.google.protobuf.GeneratedMessageV3.Builder<PySparkJob.Builder>
      • internalGetFieldAccessorTable

        protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable()
        Specified by:
        internalGetFieldAccessorTable in class com.google.protobuf.GeneratedMessageV3.Builder<PySparkJob.Builder>
      • clear

        public PySparkJob.Builder clear()
        Specified by:
        clear in interface com.google.protobuf.Message.Builder
        Specified by:
        clear in interface com.google.protobuf.MessageLite.Builder
        Overrides:
        clear in class com.google.protobuf.GeneratedMessageV3.Builder<PySparkJob.Builder>
      • getDescriptorForType

        public com.google.protobuf.Descriptors.Descriptor getDescriptorForType()
        Specified by:
        getDescriptorForType in interface com.google.protobuf.Message.Builder
        Specified by:
        getDescriptorForType in interface com.google.protobuf.MessageOrBuilder
        Overrides:
        getDescriptorForType in class com.google.protobuf.GeneratedMessageV3.Builder<PySparkJob.Builder>
      • getDefaultInstanceForType

        public PySparkJob getDefaultInstanceForType()
        Specified by:
        getDefaultInstanceForType in interface com.google.protobuf.MessageLiteOrBuilder
        Specified by:
        getDefaultInstanceForType in interface com.google.protobuf.MessageOrBuilder
      • build

        public PySparkJob build()
        Specified by:
        build in interface com.google.protobuf.Message.Builder
        Specified by:
        build in interface com.google.protobuf.MessageLite.Builder
      • buildPartial

        public PySparkJob buildPartial()
        Specified by:
        buildPartial in interface com.google.protobuf.Message.Builder
        Specified by:
        buildPartial in interface com.google.protobuf.MessageLite.Builder
      • clone

        public PySparkJob.Builder clone()
        Specified by:
        clone in interface com.google.protobuf.Message.Builder
        Specified by:
        clone in interface com.google.protobuf.MessageLite.Builder
        Overrides:
        clone in class com.google.protobuf.GeneratedMessageV3.Builder<PySparkJob.Builder>
      • setField

        public PySparkJob.Builder setField​(com.google.protobuf.Descriptors.FieldDescriptor field,
                                           Object value)
        Specified by:
        setField in interface com.google.protobuf.Message.Builder
        Overrides:
        setField in class com.google.protobuf.GeneratedMessageV3.Builder<PySparkJob.Builder>
      • clearField

        public PySparkJob.Builder clearField​(com.google.protobuf.Descriptors.FieldDescriptor field)
        Specified by:
        clearField in interface com.google.protobuf.Message.Builder
        Overrides:
        clearField in class com.google.protobuf.GeneratedMessageV3.Builder<PySparkJob.Builder>
      • clearOneof

        public PySparkJob.Builder clearOneof​(com.google.protobuf.Descriptors.OneofDescriptor oneof)
        Specified by:
        clearOneof in interface com.google.protobuf.Message.Builder
        Overrides:
        clearOneof in class com.google.protobuf.GeneratedMessageV3.Builder<PySparkJob.Builder>
      • setRepeatedField

        public PySparkJob.Builder setRepeatedField​(com.google.protobuf.Descriptors.FieldDescriptor field,
                                                   int index,
                                                   Object value)
        Specified by:
        setRepeatedField in interface com.google.protobuf.Message.Builder
        Overrides:
        setRepeatedField in class com.google.protobuf.GeneratedMessageV3.Builder<PySparkJob.Builder>
      • addRepeatedField

        public PySparkJob.Builder addRepeatedField​(com.google.protobuf.Descriptors.FieldDescriptor field,
                                                   Object value)
        Specified by:
        addRepeatedField in interface com.google.protobuf.Message.Builder
        Overrides:
        addRepeatedField in class com.google.protobuf.GeneratedMessageV3.Builder<PySparkJob.Builder>
      • mergeFrom

        public PySparkJob.Builder mergeFrom​(com.google.protobuf.Message other)
        Specified by:
        mergeFrom in interface com.google.protobuf.Message.Builder
        Overrides:
        mergeFrom in class com.google.protobuf.AbstractMessage.Builder<PySparkJob.Builder>
      • isInitialized

        public final boolean isInitialized()
        Specified by:
        isInitialized in interface com.google.protobuf.MessageLiteOrBuilder
        Overrides:
        isInitialized in class com.google.protobuf.GeneratedMessageV3.Builder<PySparkJob.Builder>
      • mergeFrom

        public PySparkJob.Builder mergeFrom​(com.google.protobuf.CodedInputStream input,
                                            com.google.protobuf.ExtensionRegistryLite extensionRegistry)
                                     throws IOException
        Specified by:
        mergeFrom in interface com.google.protobuf.Message.Builder
        Specified by:
        mergeFrom in interface com.google.protobuf.MessageLite.Builder
        Overrides:
        mergeFrom in class com.google.protobuf.AbstractMessage.Builder<PySparkJob.Builder>
        Throws:
        IOException
      • getMainPythonFileUri

        public String getMainPythonFileUri()
         Required. The HCFS URI of the main Python file to use as the driver. Must
         be a .py file.
         
        string main_python_file_uri = 1 [(.google.api.field_behavior) = REQUIRED];
        Specified by:
        getMainPythonFileUri in interface PySparkJobOrBuilder
        Returns:
        The mainPythonFileUri.
      • getMainPythonFileUriBytes

        public com.google.protobuf.ByteString getMainPythonFileUriBytes()
         Required. The HCFS URI of the main Python file to use as the driver. Must
         be a .py file.
         
        string main_python_file_uri = 1 [(.google.api.field_behavior) = REQUIRED];
        Specified by:
        getMainPythonFileUriBytes in interface PySparkJobOrBuilder
        Returns:
        The bytes for mainPythonFileUri.
      • setMainPythonFileUri

        public PySparkJob.Builder setMainPythonFileUri​(String value)
         Required. The HCFS URI of the main Python file to use as the driver. Must
         be a .py file.
         
        string main_python_file_uri = 1 [(.google.api.field_behavior) = REQUIRED];
        Parameters:
        value - The mainPythonFileUri to set.
        Returns:
        This builder for chaining.
      • clearMainPythonFileUri

        public PySparkJob.Builder clearMainPythonFileUri()
         Required. The HCFS URI of the main Python file to use as the driver. Must
         be a .py file.
         
        string main_python_file_uri = 1 [(.google.api.field_behavior) = REQUIRED];
        Returns:
        This builder for chaining.
      • setMainPythonFileUriBytes

        public PySparkJob.Builder setMainPythonFileUriBytes​(com.google.protobuf.ByteString value)
         Required. The HCFS URI of the main Python file to use as the driver. Must
         be a .py file.
         
        string main_python_file_uri = 1 [(.google.api.field_behavior) = REQUIRED];
        Parameters:
        value - The bytes for mainPythonFileUri to set.
        Returns:
        This builder for chaining.
      • getArgsList

        public com.google.protobuf.ProtocolStringList getArgsList()
         Optional. The arguments to pass to the driver.  Do not include arguments,
         such as `--conf`, that can be set as job properties, since a collision may
         occur that causes an incorrect job submission.
         
        repeated string args = 2 [(.google.api.field_behavior) = OPTIONAL];
        Specified by:
        getArgsList in interface PySparkJobOrBuilder
        Returns:
        A list containing the args.
      • getArgsCount

        public int getArgsCount()
         Optional. The arguments to pass to the driver.  Do not include arguments,
         such as `--conf`, that can be set as job properties, since a collision may
         occur that causes an incorrect job submission.
         
        repeated string args = 2 [(.google.api.field_behavior) = OPTIONAL];
        Specified by:
        getArgsCount in interface PySparkJobOrBuilder
        Returns:
        The count of args.
      • getArgs

        public String getArgs​(int index)
         Optional. The arguments to pass to the driver.  Do not include arguments,
         such as `--conf`, that can be set as job properties, since a collision may
         occur that causes an incorrect job submission.
         
        repeated string args = 2 [(.google.api.field_behavior) = OPTIONAL];
        Specified by:
        getArgs in interface PySparkJobOrBuilder
        Parameters:
        index - The index of the element to return.
        Returns:
        The args at the given index.
      • getArgsBytes

        public com.google.protobuf.ByteString getArgsBytes​(int index)
         Optional. The arguments to pass to the driver.  Do not include arguments,
         such as `--conf`, that can be set as job properties, since a collision may
         occur that causes an incorrect job submission.
         
        repeated string args = 2 [(.google.api.field_behavior) = OPTIONAL];
        Specified by:
        getArgsBytes in interface PySparkJobOrBuilder
        Parameters:
        index - The index of the value to return.
        Returns:
        The bytes of the args at the given index.
      • setArgs

        public PySparkJob.Builder setArgs​(int index,
                                          String value)
         Optional. The arguments to pass to the driver.  Do not include arguments,
         such as `--conf`, that can be set as job properties, since a collision may
         occur that causes an incorrect job submission.
         
        repeated string args = 2 [(.google.api.field_behavior) = OPTIONAL];
        Parameters:
        index - The index to set the value at.
        value - The args to set.
        Returns:
        This builder for chaining.
      • addArgs

        public PySparkJob.Builder addArgs​(String value)
         Optional. The arguments to pass to the driver.  Do not include arguments,
         such as `--conf`, that can be set as job properties, since a collision may
         occur that causes an incorrect job submission.
         
        repeated string args = 2 [(.google.api.field_behavior) = OPTIONAL];
        Parameters:
        value - The args to add.
        Returns:
        This builder for chaining.
      • addAllArgs

        public PySparkJob.Builder addAllArgs​(Iterable<String> values)
         Optional. The arguments to pass to the driver.  Do not include arguments,
         such as `--conf`, that can be set as job properties, since a collision may
         occur that causes an incorrect job submission.
         
        repeated string args = 2 [(.google.api.field_behavior) = OPTIONAL];
        Parameters:
        values - The args to add.
        Returns:
        This builder for chaining.
      • clearArgs

        public PySparkJob.Builder clearArgs()
         Optional. The arguments to pass to the driver.  Do not include arguments,
         such as `--conf`, that can be set as job properties, since a collision may
         occur that causes an incorrect job submission.
         
        repeated string args = 2 [(.google.api.field_behavior) = OPTIONAL];
        Returns:
        This builder for chaining.
      • addArgsBytes

        public PySparkJob.Builder addArgsBytes​(com.google.protobuf.ByteString value)
         Optional. The arguments to pass to the driver.  Do not include arguments,
         such as `--conf`, that can be set as job properties, since a collision may
         occur that causes an incorrect job submission.
         
        repeated string args = 2 [(.google.api.field_behavior) = OPTIONAL];
        Parameters:
        value - The bytes of the args to add.
        Returns:
        This builder for chaining.
      • getPythonFileUrisList

        public com.google.protobuf.ProtocolStringList getPythonFileUrisList()
         Optional. HCFS file URIs of Python files to pass to the PySpark
         framework. Supported file types: .py, .egg, and .zip.
         
        repeated string python_file_uris = 3 [(.google.api.field_behavior) = OPTIONAL];
        Specified by:
        getPythonFileUrisList in interface PySparkJobOrBuilder
        Returns:
        A list containing the pythonFileUris.
      • getPythonFileUrisCount

        public int getPythonFileUrisCount()
         Optional. HCFS file URIs of Python files to pass to the PySpark
         framework. Supported file types: .py, .egg, and .zip.
         
        repeated string python_file_uris = 3 [(.google.api.field_behavior) = OPTIONAL];
        Specified by:
        getPythonFileUrisCount in interface PySparkJobOrBuilder
        Returns:
        The count of pythonFileUris.
      • getPythonFileUris

        public String getPythonFileUris​(int index)
         Optional. HCFS file URIs of Python files to pass to the PySpark
         framework. Supported file types: .py, .egg, and .zip.
         
        repeated string python_file_uris = 3 [(.google.api.field_behavior) = OPTIONAL];
        Specified by:
        getPythonFileUris in interface PySparkJobOrBuilder
        Parameters:
        index - The index of the element to return.
        Returns:
        The pythonFileUris at the given index.
      • getPythonFileUrisBytes

        public com.google.protobuf.ByteString getPythonFileUrisBytes​(int index)
         Optional. HCFS file URIs of Python files to pass to the PySpark
         framework. Supported file types: .py, .egg, and .zip.
         
        repeated string python_file_uris = 3 [(.google.api.field_behavior) = OPTIONAL];
        Specified by:
        getPythonFileUrisBytes in interface PySparkJobOrBuilder
        Parameters:
        index - The index of the value to return.
        Returns:
        The bytes of the pythonFileUris at the given index.
      • setPythonFileUris

        public PySparkJob.Builder setPythonFileUris​(int index,
                                                    String value)
         Optional. HCFS file URIs of Python files to pass to the PySpark
         framework. Supported file types: .py, .egg, and .zip.
         
        repeated string python_file_uris = 3 [(.google.api.field_behavior) = OPTIONAL];
        Parameters:
        index - The index to set the value at.
        value - The pythonFileUris to set.
        Returns:
        This builder for chaining.
      • addPythonFileUris

        public PySparkJob.Builder addPythonFileUris​(String value)
         Optional. HCFS file URIs of Python files to pass to the PySpark
         framework. Supported file types: .py, .egg, and .zip.
         
        repeated string python_file_uris = 3 [(.google.api.field_behavior) = OPTIONAL];
        Parameters:
        value - The pythonFileUris to add.
        Returns:
        This builder for chaining.
      • addAllPythonFileUris

        public PySparkJob.Builder addAllPythonFileUris​(Iterable<String> values)
         Optional. HCFS file URIs of Python files to pass to the PySpark
         framework. Supported file types: .py, .egg, and .zip.
         
        repeated string python_file_uris = 3 [(.google.api.field_behavior) = OPTIONAL];
        Parameters:
        values - The pythonFileUris to add.
        Returns:
        This builder for chaining.
      • clearPythonFileUris

        public PySparkJob.Builder clearPythonFileUris()
         Optional. HCFS file URIs of Python files to pass to the PySpark
         framework. Supported file types: .py, .egg, and .zip.
         
        repeated string python_file_uris = 3 [(.google.api.field_behavior) = OPTIONAL];
        Returns:
        This builder for chaining.
      • addPythonFileUrisBytes

        public PySparkJob.Builder addPythonFileUrisBytes​(com.google.protobuf.ByteString value)
         Optional. HCFS file URIs of Python files to pass to the PySpark
         framework. Supported file types: .py, .egg, and .zip.
         
        repeated string python_file_uris = 3 [(.google.api.field_behavior) = OPTIONAL];
        Parameters:
        value - The bytes of the pythonFileUris to add.
        Returns:
        This builder for chaining.
      • getJarFileUrisList

        public com.google.protobuf.ProtocolStringList getJarFileUrisList()
         Optional. HCFS URIs of jar files to add to the CLASSPATHs of the
         Python driver and tasks.
         
        repeated string jar_file_uris = 4 [(.google.api.field_behavior) = OPTIONAL];
        Specified by:
        getJarFileUrisList in interface PySparkJobOrBuilder
        Returns:
        A list containing the jarFileUris.
      • getJarFileUrisCount

        public int getJarFileUrisCount()
         Optional. HCFS URIs of jar files to add to the CLASSPATHs of the
         Python driver and tasks.
         
        repeated string jar_file_uris = 4 [(.google.api.field_behavior) = OPTIONAL];
        Specified by:
        getJarFileUrisCount in interface PySparkJobOrBuilder
        Returns:
        The count of jarFileUris.
      • getJarFileUris

        public String getJarFileUris​(int index)
         Optional. HCFS URIs of jar files to add to the CLASSPATHs of the
         Python driver and tasks.
         
        repeated string jar_file_uris = 4 [(.google.api.field_behavior) = OPTIONAL];
        Specified by:
        getJarFileUris in interface PySparkJobOrBuilder
        Parameters:
        index - The index of the element to return.
        Returns:
        The jarFileUris at the given index.
      • getJarFileUrisBytes

        public com.google.protobuf.ByteString getJarFileUrisBytes​(int index)
         Optional. HCFS URIs of jar files to add to the CLASSPATHs of the
         Python driver and tasks.
         
        repeated string jar_file_uris = 4 [(.google.api.field_behavior) = OPTIONAL];
        Specified by:
        getJarFileUrisBytes in interface PySparkJobOrBuilder
        Parameters:
        index - The index of the value to return.
        Returns:
        The bytes of the jarFileUris at the given index.
      • setJarFileUris

        public PySparkJob.Builder setJarFileUris​(int index,
                                                 String value)
         Optional. HCFS URIs of jar files to add to the CLASSPATHs of the
         Python driver and tasks.
         
        repeated string jar_file_uris = 4 [(.google.api.field_behavior) = OPTIONAL];
        Parameters:
        index - The index to set the value at.
        value - The jarFileUris to set.
        Returns:
        This builder for chaining.
      • addJarFileUris

        public PySparkJob.Builder addJarFileUris​(String value)
         Optional. HCFS URIs of jar files to add to the CLASSPATHs of the
         Python driver and tasks.
         
        repeated string jar_file_uris = 4 [(.google.api.field_behavior) = OPTIONAL];
        Parameters:
        value - The jarFileUris to add.
        Returns:
        This builder for chaining.
      • addAllJarFileUris

        public PySparkJob.Builder addAllJarFileUris​(Iterable<String> values)
         Optional. HCFS URIs of jar files to add to the CLASSPATHs of the
         Python driver and tasks.
         
        repeated string jar_file_uris = 4 [(.google.api.field_behavior) = OPTIONAL];
        Parameters:
        values - The jarFileUris to add.
        Returns:
        This builder for chaining.
      • clearJarFileUris

        public PySparkJob.Builder clearJarFileUris()
         Optional. HCFS URIs of jar files to add to the CLASSPATHs of the
         Python driver and tasks.
         
        repeated string jar_file_uris = 4 [(.google.api.field_behavior) = OPTIONAL];
        Returns:
        This builder for chaining.
      • addJarFileUrisBytes

        public PySparkJob.Builder addJarFileUrisBytes​(com.google.protobuf.ByteString value)
         Optional. HCFS URIs of jar files to add to the CLASSPATHs of the
         Python driver and tasks.
         
        repeated string jar_file_uris = 4 [(.google.api.field_behavior) = OPTIONAL];
        Parameters:
        value - The bytes of the jarFileUris to add.
        Returns:
        This builder for chaining.
      • getFileUrisList

        public com.google.protobuf.ProtocolStringList getFileUrisList()
         Optional. HCFS URIs of files to be placed in the working directory of
         each executor. Useful for naively parallel tasks.
         
        repeated string file_uris = 5 [(.google.api.field_behavior) = OPTIONAL];
        Specified by:
        getFileUrisList in interface PySparkJobOrBuilder
        Returns:
        A list containing the fileUris.
      • getFileUrisCount

        public int getFileUrisCount()
         Optional. HCFS URIs of files to be placed in the working directory of
         each executor. Useful for naively parallel tasks.
         
        repeated string file_uris = 5 [(.google.api.field_behavior) = OPTIONAL];
        Specified by:
        getFileUrisCount in interface PySparkJobOrBuilder
        Returns:
        The count of fileUris.
      • getFileUris

        public String getFileUris​(int index)
         Optional. HCFS URIs of files to be placed in the working directory of
         each executor. Useful for naively parallel tasks.
         
        repeated string file_uris = 5 [(.google.api.field_behavior) = OPTIONAL];
        Specified by:
        getFileUris in interface PySparkJobOrBuilder
        Parameters:
        index - The index of the element to return.
        Returns:
        The fileUris at the given index.
      • getFileUrisBytes

        public com.google.protobuf.ByteString getFileUrisBytes​(int index)
         Optional. HCFS URIs of files to be placed in the working directory of
         each executor. Useful for naively parallel tasks.
         
        repeated string file_uris = 5 [(.google.api.field_behavior) = OPTIONAL];
        Specified by:
        getFileUrisBytes in interface PySparkJobOrBuilder
        Parameters:
        index - The index of the value to return.
        Returns:
        The bytes of the fileUris at the given index.
      • setFileUris

        public PySparkJob.Builder setFileUris​(int index,
                                              String value)
         Optional. HCFS URIs of files to be placed in the working directory of
         each executor. Useful for naively parallel tasks.
         
        repeated string file_uris = 5 [(.google.api.field_behavior) = OPTIONAL];
        Parameters:
        index - The index to set the value at.
        value - The fileUris to set.
        Returns:
        This builder for chaining.
      • addFileUris

        public PySparkJob.Builder addFileUris​(String value)
         Optional. HCFS URIs of files to be placed in the working directory of
         each executor. Useful for naively parallel tasks.
         
        repeated string file_uris = 5 [(.google.api.field_behavior) = OPTIONAL];
        Parameters:
        value - The fileUris to add.
        Returns:
        This builder for chaining.
      • addAllFileUris

        public PySparkJob.Builder addAllFileUris​(Iterable<String> values)
         Optional. HCFS URIs of files to be placed in the working directory of
         each executor. Useful for naively parallel tasks.
         
        repeated string file_uris = 5 [(.google.api.field_behavior) = OPTIONAL];
        Parameters:
        values - The fileUris to add.
        Returns:
        This builder for chaining.
      • clearFileUris

        public PySparkJob.Builder clearFileUris()
         Optional. HCFS URIs of files to be placed in the working directory of
         each executor. Useful for naively parallel tasks.
         
        repeated string file_uris = 5 [(.google.api.field_behavior) = OPTIONAL];
        Returns:
        This builder for chaining.
      • addFileUrisBytes

        public PySparkJob.Builder addFileUrisBytes​(com.google.protobuf.ByteString value)
         Optional. HCFS URIs of files to be placed in the working directory of
         each executor. Useful for naively parallel tasks.
         
        repeated string file_uris = 5 [(.google.api.field_behavior) = OPTIONAL];
        Parameters:
        value - The bytes of the fileUris to add.
        Returns:
        This builder for chaining.
      • getArchiveUrisList

        public com.google.protobuf.ProtocolStringList getArchiveUrisList()
         Optional. HCFS URIs of archives to be extracted into the working directory
         of each executor. Supported file types:
         .jar, .tar, .tar.gz, .tgz, and .zip.
         
        repeated string archive_uris = 6 [(.google.api.field_behavior) = OPTIONAL];
        Specified by:
        getArchiveUrisList in interface PySparkJobOrBuilder
        Returns:
        A list containing the archiveUris.
      • getArchiveUrisCount

        public int getArchiveUrisCount()
         Optional. HCFS URIs of archives to be extracted into the working directory
         of each executor. Supported file types:
         .jar, .tar, .tar.gz, .tgz, and .zip.
         
        repeated string archive_uris = 6 [(.google.api.field_behavior) = OPTIONAL];
        Specified by:
        getArchiveUrisCount in interface PySparkJobOrBuilder
        Returns:
        The count of archiveUris.
      • getArchiveUris

        public String getArchiveUris​(int index)
         Optional. HCFS URIs of archives to be extracted into the working directory
         of each executor. Supported file types:
         .jar, .tar, .tar.gz, .tgz, and .zip.
         
        repeated string archive_uris = 6 [(.google.api.field_behavior) = OPTIONAL];
        Specified by:
        getArchiveUris in interface PySparkJobOrBuilder
        Parameters:
        index - The index of the element to return.
        Returns:
        The archiveUris at the given index.
      • getArchiveUrisBytes

        public com.google.protobuf.ByteString getArchiveUrisBytes​(int index)
         Optional. HCFS URIs of archives to be extracted into the working directory
         of each executor. Supported file types:
         .jar, .tar, .tar.gz, .tgz, and .zip.
         
        repeated string archive_uris = 6 [(.google.api.field_behavior) = OPTIONAL];
        Specified by:
        getArchiveUrisBytes in interface PySparkJobOrBuilder
        Parameters:
        index - The index of the value to return.
        Returns:
        The bytes of the archiveUris at the given index.
      • setArchiveUris

        public PySparkJob.Builder setArchiveUris​(int index,
                                                 String value)
         Optional. HCFS URIs of archives to be extracted into the working directory
         of each executor. Supported file types:
         .jar, .tar, .tar.gz, .tgz, and .zip.
         
        repeated string archive_uris = 6 [(.google.api.field_behavior) = OPTIONAL];
        Parameters:
        index - The index to set the value at.
        value - The archiveUris to set.
        Returns:
        This builder for chaining.
      • addArchiveUris

        public PySparkJob.Builder addArchiveUris​(String value)
         Optional. HCFS URIs of archives to be extracted into the working directory
         of each executor. Supported file types:
         .jar, .tar, .tar.gz, .tgz, and .zip.
         
        repeated string archive_uris = 6 [(.google.api.field_behavior) = OPTIONAL];
        Parameters:
        value - The archiveUris to add.
        Returns:
        This builder for chaining.
      • addAllArchiveUris

        public PySparkJob.Builder addAllArchiveUris​(Iterable<String> values)
         Optional. HCFS URIs of archives to be extracted into the working directory
         of each executor. Supported file types:
         .jar, .tar, .tar.gz, .tgz, and .zip.
         
        repeated string archive_uris = 6 [(.google.api.field_behavior) = OPTIONAL];
        Parameters:
        values - The archiveUris to add.
        Returns:
        This builder for chaining.
      • clearArchiveUris

        public PySparkJob.Builder clearArchiveUris()
         Optional. HCFS URIs of archives to be extracted into the working directory
         of each executor. Supported file types:
         .jar, .tar, .tar.gz, .tgz, and .zip.
         
        repeated string archive_uris = 6 [(.google.api.field_behavior) = OPTIONAL];
        Returns:
        This builder for chaining.
      • addArchiveUrisBytes

        public PySparkJob.Builder addArchiveUrisBytes​(com.google.protobuf.ByteString value)
         Optional. HCFS URIs of archives to be extracted into the working directory
         of each executor. Supported file types:
         .jar, .tar, .tar.gz, .tgz, and .zip.
         
        repeated string archive_uris = 6 [(.google.api.field_behavior) = OPTIONAL];
        Parameters:
        value - The bytes of the archiveUris to add.
        Returns:
        This builder for chaining.
      • getPropertiesCount

        public int getPropertiesCount()
        Description copied from interface: PySparkJobOrBuilder
         Optional. A mapping of property names to values, used to configure PySpark.
         Properties that conflict with values set by the Dataproc API may be
         overwritten. Can include properties set in
         /etc/spark/conf/spark-defaults.conf and classes in user code.
         
        map<string, string> properties = 7 [(.google.api.field_behavior) = OPTIONAL];
        Specified by:
        getPropertiesCount in interface PySparkJobOrBuilder
      • containsProperties

        public boolean containsProperties​(String key)
         Optional. A mapping of property names to values, used to configure PySpark.
         Properties that conflict with values set by the Dataproc API may be
         overwritten. Can include properties set in
         /etc/spark/conf/spark-defaults.conf and classes in user code.
         
        map<string, string> properties = 7 [(.google.api.field_behavior) = OPTIONAL];
        Specified by:
        containsProperties in interface PySparkJobOrBuilder
      • getPropertiesMap

        public Map<String,​String> getPropertiesMap()
         Optional. A mapping of property names to values, used to configure PySpark.
         Properties that conflict with values set by the Dataproc API may be
         overwritten. Can include properties set in
         /etc/spark/conf/spark-defaults.conf and classes in user code.
         
        map<string, string> properties = 7 [(.google.api.field_behavior) = OPTIONAL];
        Specified by:
        getPropertiesMap in interface PySparkJobOrBuilder
      • getPropertiesOrDefault

        public String getPropertiesOrDefault​(String key,
                                             String defaultValue)
         Optional. A mapping of property names to values, used to configure PySpark.
         Properties that conflict with values set by the Dataproc API may be
         overwritten. Can include properties set in
         /etc/spark/conf/spark-defaults.conf and classes in user code.
         
        map<string, string> properties = 7 [(.google.api.field_behavior) = OPTIONAL];
        Specified by:
        getPropertiesOrDefault in interface PySparkJobOrBuilder
      • getPropertiesOrThrow

        public String getPropertiesOrThrow​(String key)
         Optional. A mapping of property names to values, used to configure PySpark.
         Properties that conflict with values set by the Dataproc API may be
         overwritten. Can include properties set in
         /etc/spark/conf/spark-defaults.conf and classes in user code.
         
        map<string, string> properties = 7 [(.google.api.field_behavior) = OPTIONAL];
        Specified by:
        getPropertiesOrThrow in interface PySparkJobOrBuilder
      • removeProperties

        public PySparkJob.Builder removeProperties​(String key)
         Optional. A mapping of property names to values, used to configure PySpark.
         Properties that conflict with values set by the Dataproc API may be
         overwritten. Can include properties set in
         /etc/spark/conf/spark-defaults.conf and classes in user code.
         
        map<string, string> properties = 7 [(.google.api.field_behavior) = OPTIONAL];
      • getMutableProperties

        @Deprecated
        public Map<String,​String> getMutableProperties()
        Deprecated.
        Use alternate mutation accessors instead.
      • putProperties

        public PySparkJob.Builder putProperties​(String key,
                                                String value)
         Optional. A mapping of property names to values, used to configure PySpark.
         Properties that conflict with values set by the Dataproc API may be
         overwritten. Can include properties set in
         /etc/spark/conf/spark-defaults.conf and classes in user code.
         
        map<string, string> properties = 7 [(.google.api.field_behavior) = OPTIONAL];
      • putAllProperties

        public PySparkJob.Builder putAllProperties​(Map<String,​String> values)
         Optional. A mapping of property names to values, used to configure PySpark.
         Properties that conflict with values set by the Dataproc API may be
         overwritten. Can include properties set in
         /etc/spark/conf/spark-defaults.conf and classes in user code.
         
        map<string, string> properties = 7 [(.google.api.field_behavior) = OPTIONAL];
      • hasLoggingConfig

        public boolean hasLoggingConfig()
         Optional. The runtime log config for job execution.
         
        .google.cloud.dataproc.v1.LoggingConfig logging_config = 8 [(.google.api.field_behavior) = OPTIONAL];
        Specified by:
        hasLoggingConfig in interface PySparkJobOrBuilder
        Returns:
        Whether the loggingConfig field is set.
      • getLoggingConfig

        public LoggingConfig getLoggingConfig()
         Optional. The runtime log config for job execution.
         
        .google.cloud.dataproc.v1.LoggingConfig logging_config = 8 [(.google.api.field_behavior) = OPTIONAL];
        Specified by:
        getLoggingConfig in interface PySparkJobOrBuilder
        Returns:
        The loggingConfig.
      • setLoggingConfig

        public PySparkJob.Builder setLoggingConfig​(LoggingConfig value)
         Optional. The runtime log config for job execution.
         
        .google.cloud.dataproc.v1.LoggingConfig logging_config = 8 [(.google.api.field_behavior) = OPTIONAL];
      • setLoggingConfig

        public PySparkJob.Builder setLoggingConfig​(LoggingConfig.Builder builderForValue)
         Optional. The runtime log config for job execution.
         
        .google.cloud.dataproc.v1.LoggingConfig logging_config = 8 [(.google.api.field_behavior) = OPTIONAL];
      • mergeLoggingConfig

        public PySparkJob.Builder mergeLoggingConfig​(LoggingConfig value)
         Optional. The runtime log config for job execution.
         
        .google.cloud.dataproc.v1.LoggingConfig logging_config = 8 [(.google.api.field_behavior) = OPTIONAL];
      • clearLoggingConfig

        public PySparkJob.Builder clearLoggingConfig()
         Optional. The runtime log config for job execution.
         
        .google.cloud.dataproc.v1.LoggingConfig logging_config = 8 [(.google.api.field_behavior) = OPTIONAL];
      • getLoggingConfigBuilder

        public LoggingConfig.Builder getLoggingConfigBuilder()
         Optional. The runtime log config for job execution.
         
        .google.cloud.dataproc.v1.LoggingConfig logging_config = 8 [(.google.api.field_behavior) = OPTIONAL];
      • setUnknownFields

        public final PySparkJob.Builder setUnknownFields​(com.google.protobuf.UnknownFieldSet unknownFields)
        Specified by:
        setUnknownFields in interface com.google.protobuf.Message.Builder
        Overrides:
        setUnknownFields in class com.google.protobuf.GeneratedMessageV3.Builder<PySparkJob.Builder>
      • mergeUnknownFields

        public final PySparkJob.Builder mergeUnknownFields​(com.google.protobuf.UnknownFieldSet unknownFields)
        Specified by:
        mergeUnknownFields in interface com.google.protobuf.Message.Builder
        Overrides:
        mergeUnknownFields in class com.google.protobuf.GeneratedMessageV3.Builder<PySparkJob.Builder>