Class InputDataConfig

  • All Implemented Interfaces:
    InputDataConfigOrBuilder, com.google.protobuf.Message, com.google.protobuf.MessageLite, com.google.protobuf.MessageLiteOrBuilder, com.google.protobuf.MessageOrBuilder, Serializable

    public final class InputDataConfig
    extends com.google.protobuf.GeneratedMessageV3
    implements InputDataConfigOrBuilder
     Specifies Vertex AI owned input data to be used for training, and
     possibly evaluating, the Model.
     
    Protobuf type google.cloud.aiplatform.v1beta1.InputDataConfig
    See Also:
    Serialized Form
    • Field Detail

      • FRACTION_SPLIT_FIELD_NUMBER

        public static final int FRACTION_SPLIT_FIELD_NUMBER
        See Also:
        Constant Field Values
      • FILTER_SPLIT_FIELD_NUMBER

        public static final int FILTER_SPLIT_FIELD_NUMBER
        See Also:
        Constant Field Values
      • PREDEFINED_SPLIT_FIELD_NUMBER

        public static final int PREDEFINED_SPLIT_FIELD_NUMBER
        See Also:
        Constant Field Values
      • TIMESTAMP_SPLIT_FIELD_NUMBER

        public static final int TIMESTAMP_SPLIT_FIELD_NUMBER
        See Also:
        Constant Field Values
      • STRATIFIED_SPLIT_FIELD_NUMBER

        public static final int STRATIFIED_SPLIT_FIELD_NUMBER
        See Also:
        Constant Field Values
      • GCS_DESTINATION_FIELD_NUMBER

        public static final int GCS_DESTINATION_FIELD_NUMBER
        See Also:
        Constant Field Values
      • BIGQUERY_DESTINATION_FIELD_NUMBER

        public static final int BIGQUERY_DESTINATION_FIELD_NUMBER
        See Also:
        Constant Field Values
      • DATASET_ID_FIELD_NUMBER

        public static final int DATASET_ID_FIELD_NUMBER
        See Also:
        Constant Field Values
      • ANNOTATIONS_FILTER_FIELD_NUMBER

        public static final int ANNOTATIONS_FILTER_FIELD_NUMBER
        See Also:
        Constant Field Values
      • ANNOTATION_SCHEMA_URI_FIELD_NUMBER

        public static final int ANNOTATION_SCHEMA_URI_FIELD_NUMBER
        See Also:
        Constant Field Values
      • SAVED_QUERY_ID_FIELD_NUMBER

        public static final int SAVED_QUERY_ID_FIELD_NUMBER
        See Also:
        Constant Field Values
      • PERSIST_ML_USE_ASSIGNMENT_FIELD_NUMBER

        public static final int PERSIST_ML_USE_ASSIGNMENT_FIELD_NUMBER
        See Also:
        Constant Field Values
    • Method Detail

      • newInstance

        protected Object newInstance​(com.google.protobuf.GeneratedMessageV3.UnusedPrivateParameter unused)
        Overrides:
        newInstance in class com.google.protobuf.GeneratedMessageV3
      • getDescriptor

        public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()
      • internalGetFieldAccessorTable

        protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable()
        Specified by:
        internalGetFieldAccessorTable in class com.google.protobuf.GeneratedMessageV3
      • hasFractionSplit

        public boolean hasFractionSplit()
         Split based on fractions defining the size of each set.
         
        .google.cloud.aiplatform.v1beta1.FractionSplit fraction_split = 2;
        Specified by:
        hasFractionSplit in interface InputDataConfigOrBuilder
        Returns:
        Whether the fractionSplit field is set.
      • getFractionSplit

        public FractionSplit getFractionSplit()
         Split based on fractions defining the size of each set.
         
        .google.cloud.aiplatform.v1beta1.FractionSplit fraction_split = 2;
        Specified by:
        getFractionSplit in interface InputDataConfigOrBuilder
        Returns:
        The fractionSplit.
      • hasFilterSplit

        public boolean hasFilterSplit()
         Split based on the provided filters for each set.
         
        .google.cloud.aiplatform.v1beta1.FilterSplit filter_split = 3;
        Specified by:
        hasFilterSplit in interface InputDataConfigOrBuilder
        Returns:
        Whether the filterSplit field is set.
      • getFilterSplit

        public FilterSplit getFilterSplit()
         Split based on the provided filters for each set.
         
        .google.cloud.aiplatform.v1beta1.FilterSplit filter_split = 3;
        Specified by:
        getFilterSplit in interface InputDataConfigOrBuilder
        Returns:
        The filterSplit.
      • hasPredefinedSplit

        public boolean hasPredefinedSplit()
         Supported only for tabular Datasets.
        
         Split based on a predefined key.
         
        .google.cloud.aiplatform.v1beta1.PredefinedSplit predefined_split = 4;
        Specified by:
        hasPredefinedSplit in interface InputDataConfigOrBuilder
        Returns:
        Whether the predefinedSplit field is set.
      • getPredefinedSplit

        public PredefinedSplit getPredefinedSplit()
         Supported only for tabular Datasets.
        
         Split based on a predefined key.
         
        .google.cloud.aiplatform.v1beta1.PredefinedSplit predefined_split = 4;
        Specified by:
        getPredefinedSplit in interface InputDataConfigOrBuilder
        Returns:
        The predefinedSplit.
      • hasTimestampSplit

        public boolean hasTimestampSplit()
         Supported only for tabular Datasets.
        
         Split based on the timestamp of the input data pieces.
         
        .google.cloud.aiplatform.v1beta1.TimestampSplit timestamp_split = 5;
        Specified by:
        hasTimestampSplit in interface InputDataConfigOrBuilder
        Returns:
        Whether the timestampSplit field is set.
      • getTimestampSplit

        public TimestampSplit getTimestampSplit()
         Supported only for tabular Datasets.
        
         Split based on the timestamp of the input data pieces.
         
        .google.cloud.aiplatform.v1beta1.TimestampSplit timestamp_split = 5;
        Specified by:
        getTimestampSplit in interface InputDataConfigOrBuilder
        Returns:
        The timestampSplit.
      • hasStratifiedSplit

        public boolean hasStratifiedSplit()
         Supported only for tabular Datasets.
        
         Split based on the distribution of the specified column.
         
        .google.cloud.aiplatform.v1beta1.StratifiedSplit stratified_split = 12;
        Specified by:
        hasStratifiedSplit in interface InputDataConfigOrBuilder
        Returns:
        Whether the stratifiedSplit field is set.
      • getStratifiedSplit

        public StratifiedSplit getStratifiedSplit()
         Supported only for tabular Datasets.
        
         Split based on the distribution of the specified column.
         
        .google.cloud.aiplatform.v1beta1.StratifiedSplit stratified_split = 12;
        Specified by:
        getStratifiedSplit in interface InputDataConfigOrBuilder
        Returns:
        The stratifiedSplit.
      • hasGcsDestination

        public boolean hasGcsDestination()
         The Cloud Storage location where the training data is to be
         written to. In the given directory a new directory is created with
         name:
         `dataset-<dataset-id>-<annotation-type>-<timestamp-of-training-call>`
         where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format.
         All training input data is written into that directory.
        
         The Vertex AI environment variables representing Cloud Storage
         data URIs are represented in the Cloud Storage wildcard
         format to support sharded data. e.g.: "gs://.../training-*.jsonl"
        
         * AIP_DATA_FORMAT = "jsonl" for non-tabular data, "csv" for tabular data
         * AIP_TRAINING_DATA_URI =
         "gcs_destination/dataset-<dataset-id>-<annotation-type>-<time>/training-*.${AIP_DATA_FORMAT}"
        
         * AIP_VALIDATION_DATA_URI =
         "gcs_destination/dataset-<dataset-id>-<annotation-type>-<time>/validation-*.${AIP_DATA_FORMAT}"
        
         * AIP_TEST_DATA_URI =
         "gcs_destination/dataset-<dataset-id>-<annotation-type>-<time>/test-*.${AIP_DATA_FORMAT}"
         
        .google.cloud.aiplatform.v1beta1.GcsDestination gcs_destination = 8;
        Specified by:
        hasGcsDestination in interface InputDataConfigOrBuilder
        Returns:
        Whether the gcsDestination field is set.
      • getGcsDestination

        public GcsDestination getGcsDestination()
         The Cloud Storage location where the training data is to be
         written to. In the given directory a new directory is created with
         name:
         `dataset-<dataset-id>-<annotation-type>-<timestamp-of-training-call>`
         where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format.
         All training input data is written into that directory.
        
         The Vertex AI environment variables representing Cloud Storage
         data URIs are represented in the Cloud Storage wildcard
         format to support sharded data. e.g.: "gs://.../training-*.jsonl"
        
         * AIP_DATA_FORMAT = "jsonl" for non-tabular data, "csv" for tabular data
         * AIP_TRAINING_DATA_URI =
         "gcs_destination/dataset-<dataset-id>-<annotation-type>-<time>/training-*.${AIP_DATA_FORMAT}"
        
         * AIP_VALIDATION_DATA_URI =
         "gcs_destination/dataset-<dataset-id>-<annotation-type>-<time>/validation-*.${AIP_DATA_FORMAT}"
        
         * AIP_TEST_DATA_URI =
         "gcs_destination/dataset-<dataset-id>-<annotation-type>-<time>/test-*.${AIP_DATA_FORMAT}"
         
        .google.cloud.aiplatform.v1beta1.GcsDestination gcs_destination = 8;
        Specified by:
        getGcsDestination in interface InputDataConfigOrBuilder
        Returns:
        The gcsDestination.
      • getGcsDestinationOrBuilder

        public GcsDestinationOrBuilder getGcsDestinationOrBuilder()
         The Cloud Storage location where the training data is to be
         written to. In the given directory a new directory is created with
         name:
         `dataset-<dataset-id>-<annotation-type>-<timestamp-of-training-call>`
         where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format.
         All training input data is written into that directory.
        
         The Vertex AI environment variables representing Cloud Storage
         data URIs are represented in the Cloud Storage wildcard
         format to support sharded data. e.g.: "gs://.../training-*.jsonl"
        
         * AIP_DATA_FORMAT = "jsonl" for non-tabular data, "csv" for tabular data
         * AIP_TRAINING_DATA_URI =
         "gcs_destination/dataset-<dataset-id>-<annotation-type>-<time>/training-*.${AIP_DATA_FORMAT}"
        
         * AIP_VALIDATION_DATA_URI =
         "gcs_destination/dataset-<dataset-id>-<annotation-type>-<time>/validation-*.${AIP_DATA_FORMAT}"
        
         * AIP_TEST_DATA_URI =
         "gcs_destination/dataset-<dataset-id>-<annotation-type>-<time>/test-*.${AIP_DATA_FORMAT}"
         
        .google.cloud.aiplatform.v1beta1.GcsDestination gcs_destination = 8;
        Specified by:
        getGcsDestinationOrBuilder in interface InputDataConfigOrBuilder
      • hasBigqueryDestination

        public boolean hasBigqueryDestination()
         Only applicable to custom training with tabular Dataset with BigQuery
         source.
        
         The BigQuery project location where the training data is to be written
         to. In the given project a new dataset is created with name
         `dataset_<dataset-id>_<annotation-type>_<timestamp-of-training-call>`
         where timestamp is in YYYY_MM_DDThh_mm_ss_sssZ format. All training
         input data is written into that dataset. In the dataset three
         tables are created, `training`, `validation` and `test`.
        
         * AIP_DATA_FORMAT = "bigquery".
         * AIP_TRAINING_DATA_URI  =
         "bigquery_destination.dataset_<dataset-id>_<annotation-type>_<time>.training"
        
         * AIP_VALIDATION_DATA_URI =
         "bigquery_destination.dataset_<dataset-id>_<annotation-type>_<time>.validation"
        
         * AIP_TEST_DATA_URI =
         "bigquery_destination.dataset_<dataset-id>_<annotation-type>_<time>.test"
         
        .google.cloud.aiplatform.v1beta1.BigQueryDestination bigquery_destination = 10;
        Specified by:
        hasBigqueryDestination in interface InputDataConfigOrBuilder
        Returns:
        Whether the bigqueryDestination field is set.
      • getBigqueryDestination

        public BigQueryDestination getBigqueryDestination()
         Only applicable to custom training with tabular Dataset with BigQuery
         source.
        
         The BigQuery project location where the training data is to be written
         to. In the given project a new dataset is created with name
         `dataset_<dataset-id>_<annotation-type>_<timestamp-of-training-call>`
         where timestamp is in YYYY_MM_DDThh_mm_ss_sssZ format. All training
         input data is written into that dataset. In the dataset three
         tables are created, `training`, `validation` and `test`.
        
         * AIP_DATA_FORMAT = "bigquery".
         * AIP_TRAINING_DATA_URI  =
         "bigquery_destination.dataset_<dataset-id>_<annotation-type>_<time>.training"
        
         * AIP_VALIDATION_DATA_URI =
         "bigquery_destination.dataset_<dataset-id>_<annotation-type>_<time>.validation"
        
         * AIP_TEST_DATA_URI =
         "bigquery_destination.dataset_<dataset-id>_<annotation-type>_<time>.test"
         
        .google.cloud.aiplatform.v1beta1.BigQueryDestination bigquery_destination = 10;
        Specified by:
        getBigqueryDestination in interface InputDataConfigOrBuilder
        Returns:
        The bigqueryDestination.
      • getBigqueryDestinationOrBuilder

        public BigQueryDestinationOrBuilder getBigqueryDestinationOrBuilder()
         Only applicable to custom training with tabular Dataset with BigQuery
         source.
        
         The BigQuery project location where the training data is to be written
         to. In the given project a new dataset is created with name
         `dataset_<dataset-id>_<annotation-type>_<timestamp-of-training-call>`
         where timestamp is in YYYY_MM_DDThh_mm_ss_sssZ format. All training
         input data is written into that dataset. In the dataset three
         tables are created, `training`, `validation` and `test`.
        
         * AIP_DATA_FORMAT = "bigquery".
         * AIP_TRAINING_DATA_URI  =
         "bigquery_destination.dataset_<dataset-id>_<annotation-type>_<time>.training"
        
         * AIP_VALIDATION_DATA_URI =
         "bigquery_destination.dataset_<dataset-id>_<annotation-type>_<time>.validation"
        
         * AIP_TEST_DATA_URI =
         "bigquery_destination.dataset_<dataset-id>_<annotation-type>_<time>.test"
         
        .google.cloud.aiplatform.v1beta1.BigQueryDestination bigquery_destination = 10;
        Specified by:
        getBigqueryDestinationOrBuilder in interface InputDataConfigOrBuilder
      • getDatasetId

        public String getDatasetId()
         Required. The ID of the Dataset in the same Project and Location which data
         will be used to train the Model. The Dataset must use schema compatible
         with Model being trained, and what is compatible should be described in the
         used TrainingPipeline's [training_task_definition]
         [google.cloud.aiplatform.v1beta1.TrainingPipeline.training_task_definition].
         For tabular Datasets, all their data is exported to training, to pick
         and choose from.
         
        string dataset_id = 1 [(.google.api.field_behavior) = REQUIRED];
        Specified by:
        getDatasetId in interface InputDataConfigOrBuilder
        Returns:
        The datasetId.
      • getDatasetIdBytes

        public com.google.protobuf.ByteString getDatasetIdBytes()
         Required. The ID of the Dataset in the same Project and Location which data
         will be used to train the Model. The Dataset must use schema compatible
         with Model being trained, and what is compatible should be described in the
         used TrainingPipeline's [training_task_definition]
         [google.cloud.aiplatform.v1beta1.TrainingPipeline.training_task_definition].
         For tabular Datasets, all their data is exported to training, to pick
         and choose from.
         
        string dataset_id = 1 [(.google.api.field_behavior) = REQUIRED];
        Specified by:
        getDatasetIdBytes in interface InputDataConfigOrBuilder
        Returns:
        The bytes for datasetId.
      • getAnnotationsFilter

        public String getAnnotationsFilter()
         Applicable only to Datasets that have DataItems and Annotations.
        
         A filter on Annotations of the Dataset. Only Annotations that both
         match this filter and belong to DataItems not ignored by the split method
         are used in respectively training, validation or test role, depending on
         the role of the DataItem they are on (for the auto-assigned that role is
         decided by Vertex AI). A filter with same syntax as the one used in
         [ListAnnotations][google.cloud.aiplatform.v1beta1.DatasetService.ListAnnotations]
         may be used, but note here it filters across all Annotations of the
         Dataset, and not just within a single DataItem.
         
        string annotations_filter = 6;
        Specified by:
        getAnnotationsFilter in interface InputDataConfigOrBuilder
        Returns:
        The annotationsFilter.
      • getAnnotationsFilterBytes

        public com.google.protobuf.ByteString getAnnotationsFilterBytes()
         Applicable only to Datasets that have DataItems and Annotations.
        
         A filter on Annotations of the Dataset. Only Annotations that both
         match this filter and belong to DataItems not ignored by the split method
         are used in respectively training, validation or test role, depending on
         the role of the DataItem they are on (for the auto-assigned that role is
         decided by Vertex AI). A filter with same syntax as the one used in
         [ListAnnotations][google.cloud.aiplatform.v1beta1.DatasetService.ListAnnotations]
         may be used, but note here it filters across all Annotations of the
         Dataset, and not just within a single DataItem.
         
        string annotations_filter = 6;
        Specified by:
        getAnnotationsFilterBytes in interface InputDataConfigOrBuilder
        Returns:
        The bytes for annotationsFilter.
      • getAnnotationSchemaUri

        public String getAnnotationSchemaUri()
         Applicable only to custom training with Datasets that have DataItems and
         Annotations.
        
         Cloud Storage URI that points to a YAML file describing the annotation
         schema. The schema is defined as an OpenAPI 3.0.2 [Schema
         Object](https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.2.md#schemaObject).
         The schema files that can be used here are found in
         gs://google-cloud-aiplatform/schema/dataset/annotation/ , note that the
         chosen schema must be consistent with
         [metadata][google.cloud.aiplatform.v1beta1.Dataset.metadata_schema_uri] of
         the Dataset specified by
         [dataset_id][google.cloud.aiplatform.v1beta1.InputDataConfig.dataset_id].
        
         Only Annotations that both match this schema and belong to DataItems not
         ignored by the split method are used in respectively training, validation
         or test role, depending on the role of the DataItem they are on.
        
         When used in conjunction with
         [annotations_filter][google.cloud.aiplatform.v1beta1.InputDataConfig.annotations_filter],
         the Annotations used for training are filtered by both
         [annotations_filter][google.cloud.aiplatform.v1beta1.InputDataConfig.annotations_filter]
         and
         [annotation_schema_uri][google.cloud.aiplatform.v1beta1.InputDataConfig.annotation_schema_uri].
         
        string annotation_schema_uri = 9;
        Specified by:
        getAnnotationSchemaUri in interface InputDataConfigOrBuilder
        Returns:
        The annotationSchemaUri.
      • getAnnotationSchemaUriBytes

        public com.google.protobuf.ByteString getAnnotationSchemaUriBytes()
         Applicable only to custom training with Datasets that have DataItems and
         Annotations.
        
         Cloud Storage URI that points to a YAML file describing the annotation
         schema. The schema is defined as an OpenAPI 3.0.2 [Schema
         Object](https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.2.md#schemaObject).
         The schema files that can be used here are found in
         gs://google-cloud-aiplatform/schema/dataset/annotation/ , note that the
         chosen schema must be consistent with
         [metadata][google.cloud.aiplatform.v1beta1.Dataset.metadata_schema_uri] of
         the Dataset specified by
         [dataset_id][google.cloud.aiplatform.v1beta1.InputDataConfig.dataset_id].
        
         Only Annotations that both match this schema and belong to DataItems not
         ignored by the split method are used in respectively training, validation
         or test role, depending on the role of the DataItem they are on.
        
         When used in conjunction with
         [annotations_filter][google.cloud.aiplatform.v1beta1.InputDataConfig.annotations_filter],
         the Annotations used for training are filtered by both
         [annotations_filter][google.cloud.aiplatform.v1beta1.InputDataConfig.annotations_filter]
         and
         [annotation_schema_uri][google.cloud.aiplatform.v1beta1.InputDataConfig.annotation_schema_uri].
         
        string annotation_schema_uri = 9;
        Specified by:
        getAnnotationSchemaUriBytes in interface InputDataConfigOrBuilder
        Returns:
        The bytes for annotationSchemaUri.
      • getSavedQueryId

        public String getSavedQueryId()
         Only applicable to Datasets that have SavedQueries.
        
         The ID of a SavedQuery (annotation set) under the Dataset specified by
         [dataset_id][google.cloud.aiplatform.v1beta1.InputDataConfig.dataset_id]
         used for filtering Annotations for training.
        
         Only Annotations that are associated with this SavedQuery are used in
         respectively training. When used in conjunction with
         [annotations_filter][google.cloud.aiplatform.v1beta1.InputDataConfig.annotations_filter],
         the Annotations used for training are filtered by both
         [saved_query_id][google.cloud.aiplatform.v1beta1.InputDataConfig.saved_query_id]
         and
         [annotations_filter][google.cloud.aiplatform.v1beta1.InputDataConfig.annotations_filter].
        
         Only one of
         [saved_query_id][google.cloud.aiplatform.v1beta1.InputDataConfig.saved_query_id]
         and
         [annotation_schema_uri][google.cloud.aiplatform.v1beta1.InputDataConfig.annotation_schema_uri]
         should be specified as both of them represent the same thing: problem type.
         
        string saved_query_id = 7;
        Specified by:
        getSavedQueryId in interface InputDataConfigOrBuilder
        Returns:
        The savedQueryId.
      • getSavedQueryIdBytes

        public com.google.protobuf.ByteString getSavedQueryIdBytes()
         Only applicable to Datasets that have SavedQueries.
        
         The ID of a SavedQuery (annotation set) under the Dataset specified by
         [dataset_id][google.cloud.aiplatform.v1beta1.InputDataConfig.dataset_id]
         used for filtering Annotations for training.
        
         Only Annotations that are associated with this SavedQuery are used in
         respectively training. When used in conjunction with
         [annotations_filter][google.cloud.aiplatform.v1beta1.InputDataConfig.annotations_filter],
         the Annotations used for training are filtered by both
         [saved_query_id][google.cloud.aiplatform.v1beta1.InputDataConfig.saved_query_id]
         and
         [annotations_filter][google.cloud.aiplatform.v1beta1.InputDataConfig.annotations_filter].
        
         Only one of
         [saved_query_id][google.cloud.aiplatform.v1beta1.InputDataConfig.saved_query_id]
         and
         [annotation_schema_uri][google.cloud.aiplatform.v1beta1.InputDataConfig.annotation_schema_uri]
         should be specified as both of them represent the same thing: problem type.
         
        string saved_query_id = 7;
        Specified by:
        getSavedQueryIdBytes in interface InputDataConfigOrBuilder
        Returns:
        The bytes for savedQueryId.
      • getPersistMlUseAssignment

        public boolean getPersistMlUseAssignment()
         Whether to persist the ML use assignment to data item system labels.
         
        bool persist_ml_use_assignment = 11;
        Specified by:
        getPersistMlUseAssignment in interface InputDataConfigOrBuilder
        Returns:
        The persistMlUseAssignment.
      • isInitialized

        public final boolean isInitialized()
        Specified by:
        isInitialized in interface com.google.protobuf.MessageLiteOrBuilder
        Overrides:
        isInitialized in class com.google.protobuf.GeneratedMessageV3
      • writeTo

        public void writeTo​(com.google.protobuf.CodedOutputStream output)
                     throws IOException
        Specified by:
        writeTo in interface com.google.protobuf.MessageLite
        Overrides:
        writeTo in class com.google.protobuf.GeneratedMessageV3
        Throws:
        IOException
      • getSerializedSize

        public int getSerializedSize()
        Specified by:
        getSerializedSize in interface com.google.protobuf.MessageLite
        Overrides:
        getSerializedSize in class com.google.protobuf.GeneratedMessageV3
      • equals

        public boolean equals​(Object obj)
        Specified by:
        equals in interface com.google.protobuf.Message
        Overrides:
        equals in class com.google.protobuf.AbstractMessage
      • hashCode

        public int hashCode()
        Specified by:
        hashCode in interface com.google.protobuf.Message
        Overrides:
        hashCode in class com.google.protobuf.AbstractMessage
      • parseFrom

        public static InputDataConfig parseFrom​(ByteBuffer data)
                                         throws com.google.protobuf.InvalidProtocolBufferException
        Throws:
        com.google.protobuf.InvalidProtocolBufferException
      • parseFrom

        public static InputDataConfig parseFrom​(ByteBuffer data,
                                                com.google.protobuf.ExtensionRegistryLite extensionRegistry)
                                         throws com.google.protobuf.InvalidProtocolBufferException
        Throws:
        com.google.protobuf.InvalidProtocolBufferException
      • parseFrom

        public static InputDataConfig parseFrom​(com.google.protobuf.ByteString data)
                                         throws com.google.protobuf.InvalidProtocolBufferException
        Throws:
        com.google.protobuf.InvalidProtocolBufferException
      • parseFrom

        public static InputDataConfig parseFrom​(com.google.protobuf.ByteString data,
                                                com.google.protobuf.ExtensionRegistryLite extensionRegistry)
                                         throws com.google.protobuf.InvalidProtocolBufferException
        Throws:
        com.google.protobuf.InvalidProtocolBufferException
      • parseFrom

        public static InputDataConfig parseFrom​(byte[] data)
                                         throws com.google.protobuf.InvalidProtocolBufferException
        Throws:
        com.google.protobuf.InvalidProtocolBufferException
      • parseFrom

        public static InputDataConfig parseFrom​(byte[] data,
                                                com.google.protobuf.ExtensionRegistryLite extensionRegistry)
                                         throws com.google.protobuf.InvalidProtocolBufferException
        Throws:
        com.google.protobuf.InvalidProtocolBufferException
      • parseFrom

        public static InputDataConfig parseFrom​(com.google.protobuf.CodedInputStream input,
                                                com.google.protobuf.ExtensionRegistryLite extensionRegistry)
                                         throws IOException
        Throws:
        IOException
      • newBuilderForType

        public InputDataConfig.Builder newBuilderForType()
        Specified by:
        newBuilderForType in interface com.google.protobuf.Message
        Specified by:
        newBuilderForType in interface com.google.protobuf.MessageLite
      • toBuilder

        public InputDataConfig.Builder toBuilder()
        Specified by:
        toBuilder in interface com.google.protobuf.Message
        Specified by:
        toBuilder in interface com.google.protobuf.MessageLite
      • newBuilderForType

        protected InputDataConfig.Builder newBuilderForType​(com.google.protobuf.GeneratedMessageV3.BuilderParent parent)
        Specified by:
        newBuilderForType in class com.google.protobuf.GeneratedMessageV3
      • parser

        public static com.google.protobuf.Parser<InputDataConfig> parser()
      • getParserForType

        public com.google.protobuf.Parser<InputDataConfig> getParserForType()
        Specified by:
        getParserForType in interface com.google.protobuf.Message
        Specified by:
        getParserForType in interface com.google.protobuf.MessageLite
        Overrides:
        getParserForType in class com.google.protobuf.GeneratedMessageV3
      • getDefaultInstanceForType

        public InputDataConfig getDefaultInstanceForType()
        Specified by:
        getDefaultInstanceForType in interface com.google.protobuf.MessageLiteOrBuilder
        Specified by:
        getDefaultInstanceForType in interface com.google.protobuf.MessageOrBuilder