From a53df0d4600c1666cabf1818d550232f04b265c8 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 12 Sep 2024 10:47:44 -0700 Subject: [PATCH] feat: [dataproc] add support for new Dataproc features (#5666) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: add support for new Dataproc features 1. Allow flink job support for jobs 2. Add unreachable output field for LIST jobs API PiperOrigin-RevId: 672705294 Source-Link: https://github.com/googleapis/googleapis/commit/32bc03653260356351854429bd7e2dfbf670d352 Source-Link: https://github.com/googleapis/googleapis-gen/commit/46e7728c9908d9793ebce1061b0d1c6c4bad925b Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWRhdGFwcm9jLy5Pd2xCb3QueWFtbCIsImgiOiI0NmU3NzI4Yzk5MDhkOTc5M2ViY2UxMDYxYjBkMWM2YzRiYWQ5MjViIn0= * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * feat: add support for new Dataproc features 1. Allow flink and trino job support for workflow templates API 2. Add unreachable output field for LIST workflow template API 3. Add unreachable output field for LIST batch templates API 4. Add kms key input for create cluster API 5. Add FLINK metric source for Dataproc Metric Source PiperOrigin-RevId: 673000575 Source-Link: https://github.com/googleapis/googleapis/commit/02f62c8e241a9f95f0d183785354e90e35388cbd Source-Link: https://github.com/googleapis/googleapis-gen/commit/7726f478ac19d91e914ed3ae546cf24453d000b5 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWRhdGFwcm9jLy5Pd2xCb3QueWFtbCIsImgiOiI3NzI2ZjQ3OGFjMTlkOTFlOTE0ZWQzYWU1NDZjZjI0NDUzZDAwMGI1In0= * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot Co-authored-by: sofisl <55454395+sofisl@users.noreply.github.com> --- .../google/cloud/dataproc/v1/batches.proto | 5 + .../google/cloud/dataproc/v1/clusters.proto | 31 + .../google/cloud/dataproc/v1/jobs.proto | 50 + .../dataproc/v1/workflow_templates.proto | 49 + .../google-cloud-dataproc/protos/protos.d.ts | 291 +++++- .../google-cloud-dataproc/protos/protos.js | 945 +++++++++++++++++- .../google-cloud-dataproc/protos/protos.json | 140 ++- 7 files changed, 1497 insertions(+), 14 deletions(-) diff --git a/packages/google-cloud-dataproc/protos/google/cloud/dataproc/v1/batches.proto b/packages/google-cloud-dataproc/protos/google/cloud/dataproc/v1/batches.proto index b7411d2cada..e44af90c026 100644 --- a/packages/google-cloud-dataproc/protos/google/cloud/dataproc/v1/batches.proto +++ b/packages/google-cloud-dataproc/protos/google/cloud/dataproc/v1/batches.proto @@ -170,6 +170,11 @@ message ListBatchesResponse { // A token, which can be sent as `page_token` to retrieve the next page. // If this field is omitted, there are no subsequent pages. string next_page_token = 2; + + // Output only. List of Batches that could not be included in the response. + // Attempting to get one of these resources may indicate why it was not + // included in the list response. + repeated string unreachable = 3 [(google.api.field_behavior) = OUTPUT_ONLY]; } // A request to delete a batch workload. diff --git a/packages/google-cloud-dataproc/protos/google/cloud/dataproc/v1/clusters.proto b/packages/google-cloud-dataproc/protos/google/cloud/dataproc/v1/clusters.proto index 57a79bea912..92868618bb4 100644 --- a/packages/google-cloud-dataproc/protos/google/cloud/dataproc/v1/clusters.proto +++ b/packages/google-cloud-dataproc/protos/google/cloud/dataproc/v1/clusters.proto @@ -382,6 +382,34 @@ message EncryptionConfig { // Optional. The Cloud KMS key name to use for PD disk encryption for all // instances in the cluster. string gce_pd_kms_key_name = 1 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The Cloud KMS key resource name to use for cluster persistent + // disk and job argument encryption. See [Use CMEK with cluster data] + // (https://cloud.google.com//dataproc/docs/concepts/configuring-clusters/customer-managed-encryption#use_cmek_with_cluster_data) + // for more information. + // + // When this key resource name is provided, the following job arguments of + // the following job types submitted to the cluster are encrypted using CMEK: + // + // * [FlinkJob + // args](https://cloud.google.com/dataproc/docs/reference/rest/v1/FlinkJob) + // * [HadoopJob + // args](https://cloud.google.com/dataproc/docs/reference/rest/v1/HadoopJob) + // * [SparkJob + // args](https://cloud.google.com/dataproc/docs/reference/rest/v1/SparkJob) + // * [SparkRJob + // args](https://cloud.google.com/dataproc/docs/reference/rest/v1/SparkRJob) + // * [PySparkJob + // args](https://cloud.google.com/dataproc/docs/reference/rest/v1/PySparkJob) + // * [SparkSqlJob](https://cloud.google.com/dataproc/docs/reference/rest/v1/SparkSqlJob) + // scriptVariables and queryList.queries + // * [HiveJob](https://cloud.google.com/dataproc/docs/reference/rest/v1/HiveJob) + // scriptVariables and queryList.queries + // * [PigJob](https://cloud.google.com/dataproc/docs/reference/rest/v1/PigJob) + // scriptVariables and queryList.queries + // * [PrestoJob](https://cloud.google.com/dataproc/docs/reference/rest/v1/PrestoJob) + // scriptVariables and queryList.queries + string kms_key = 2 [(google.api.field_behavior) = OPTIONAL]; } // Common config settings for resources of Compute Engine cluster @@ -1211,6 +1239,9 @@ message DataprocMetricConfig { // hivemetastore metric source HIVEMETASTORE = 7; + + // flink metric source + FLINK = 8; } // A Dataproc custom metric. diff --git a/packages/google-cloud-dataproc/protos/google/cloud/dataproc/v1/jobs.proto b/packages/google-cloud-dataproc/protos/google/cloud/dataproc/v1/jobs.proto index 656a42787ab..5decf10ea02 100644 --- a/packages/google-cloud-dataproc/protos/google/cloud/dataproc/v1/jobs.proto +++ b/packages/google-cloud-dataproc/protos/google/cloud/dataproc/v1/jobs.proto @@ -515,6 +515,47 @@ message TrinoJob { LoggingConfig logging_config = 7 [(google.api.field_behavior) = OPTIONAL]; } +// A Dataproc job for running Apache Flink applications on YARN. +message FlinkJob { + // Required. The specification of the main method to call to drive the job. + // Specify either the jar file that contains the main class or the main class + // name. To pass both a main jar and a main class in the jar, add the jar to + // [jarFileUris][google.cloud.dataproc.v1.FlinkJob.jar_file_uris], and then + // specify the main class name in + // [mainClass][google.cloud.dataproc.v1.FlinkJob.main_class]. + oneof driver { + // The HCFS URI of the jar file that contains the main class. + string main_jar_file_uri = 1; + + // The name of the driver's main class. The jar file that contains the class + // must be in the default CLASSPATH or specified in + // [jarFileUris][google.cloud.dataproc.v1.FlinkJob.jar_file_uris]. + string main_class = 2; + } + + // Optional. The arguments to pass to the driver. Do not include arguments, + // such as `--conf`, that can be set as job properties, since a collision + // might occur that causes an incorrect job submission. + repeated string args = 3 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. HCFS URIs of jar files to add to the CLASSPATHs of the + // Flink driver and tasks. + repeated string jar_file_uris = 4 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. HCFS URI of the savepoint, which contains the last saved progress + // for starting the current job. + string savepoint_uri = 9 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. A mapping of property names to values, used to configure Flink. + // Properties that conflict with values set by the Dataproc API might be + // overwritten. Can include properties set in + // /etc/flink/conf/flink-defaults.conf and classes in user code. + map properties = 7 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The runtime log config for job execution. + LoggingConfig logging_config = 8 [(google.api.field_behavior) = OPTIONAL]; +} + // Dataproc job config. message JobPlacement { // Required. The name of the cluster where the job will be submitted. @@ -722,6 +763,9 @@ message Job { // Optional. Job is a Trino job. TrinoJob trino_job = 28 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Job is a Flink job. + FlinkJob flink_job = 29 [(google.api.field_behavior) = OPTIONAL]; } // Output only. The job status. Additional application-specific @@ -964,6 +1008,12 @@ message ListJobsResponse { // to fetch. To fetch additional results, provide this value as the // `page_token` in a subsequent ListJobsRequest. string next_page_token = 2 [(google.api.field_behavior) = OPTIONAL]; + + // Output only. List of jobs with + // [kms_key][google.cloud.dataproc.v1.EncryptionConfig.kms_key]-encrypted + // parameters that could not be decrypted. A response to a `jobs.get` request + // may indicate the reason for the decryption failure for a specific job. + repeated string unreachable = 3 [(google.api.field_behavior) = OUTPUT_ONLY]; } // A request to cancel a job. diff --git a/packages/google-cloud-dataproc/protos/google/cloud/dataproc/v1/workflow_templates.proto b/packages/google-cloud-dataproc/protos/google/cloud/dataproc/v1/workflow_templates.proto index 8a3a15a0d94..c4671acd39d 100644 --- a/packages/google-cloud-dataproc/protos/google/cloud/dataproc/v1/workflow_templates.proto +++ b/packages/google-cloud-dataproc/protos/google/cloud/dataproc/v1/workflow_templates.proto @@ -198,6 +198,39 @@ message WorkflowTemplate { history: ORIGINALLY_SINGLE_PATTERN }; + // Encryption settings for encrypting workflow template job arguments. + message EncryptionConfig { + // Optional. The Cloud KMS key name to use for encrypting + // workflow template job arguments. + // + // When this this key is provided, the following workflow template + // [job arguments] + // (https://cloud.google.com/dataproc/docs/concepts/workflows/use-workflows#adding_jobs_to_a_template), + // if present, are + // [CMEK + // encrypted](https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/customer-managed-encryption#use_cmek_with_workflow_template_data): + // + // * [FlinkJob + // args](https://cloud.google.com/dataproc/docs/reference/rest/v1/FlinkJob) + // * [HadoopJob + // args](https://cloud.google.com/dataproc/docs/reference/rest/v1/HadoopJob) + // * [SparkJob + // args](https://cloud.google.com/dataproc/docs/reference/rest/v1/SparkJob) + // * [SparkRJob + // args](https://cloud.google.com/dataproc/docs/reference/rest/v1/SparkRJob) + // * [PySparkJob + // args](https://cloud.google.com/dataproc/docs/reference/rest/v1/PySparkJob) + // * [SparkSqlJob](https://cloud.google.com/dataproc/docs/reference/rest/v1/SparkSqlJob) + // scriptVariables and queryList.queries + // * [HiveJob](https://cloud.google.com/dataproc/docs/reference/rest/v1/HiveJob) + // scriptVariables and queryList.queries + // * [PigJob](https://cloud.google.com/dataproc/docs/reference/rest/v1/PigJob) + // scriptVariables and queryList.queries + // * [PrestoJob](https://cloud.google.com/dataproc/docs/reference/rest/v1/PrestoJob) + // scriptVariables and queryList.queries + string kms_key = 1 [(google.api.field_behavior) = OPTIONAL]; + } + string id = 2 [(google.api.field_behavior) = REQUIRED]; // Output only. The resource name of the workflow template, as described @@ -270,6 +303,11 @@ message WorkflowTemplate { // the cluster is deleted. google.protobuf.Duration dag_timeout = 10 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Encryption settings for encrypting workflow template job + // arguments. + EncryptionConfig encryption_config = 11 + [(google.api.field_behavior) = OPTIONAL]; } // Specifies workflow execution target. @@ -371,6 +409,12 @@ message OrderedJob { // Optional. Job is a Presto job. PrestoJob presto_job = 12 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Job is a Trino job. + TrinoJob trino_job = 13 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Job is a Flink job. + FlinkJob flink_job = 14 [(google.api.field_behavior) = OPTIONAL]; } // Optional. The labels to associate with this job. @@ -806,6 +850,11 @@ message ListWorkflowTemplatesResponse { // results to fetch. To fetch additional results, provide this value as the // page_token in a subsequent ListWorkflowTemplatesRequest. string next_page_token = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. List of workflow templates that could not be included in the + // response. Attempting to get one of these resources may indicate why it was + // not included in the list response. + repeated string unreachable = 3 [(google.api.field_behavior) = OUTPUT_ONLY]; } // A request to delete a workflow template. diff --git a/packages/google-cloud-dataproc/protos/protos.d.ts b/packages/google-cloud-dataproc/protos/protos.d.ts index 0f9fd74b7bb..79e15480693 100644 --- a/packages/google-cloud-dataproc/protos/protos.d.ts +++ b/packages/google-cloud-dataproc/protos/protos.d.ts @@ -1676,6 +1676,9 @@ export namespace google { /** ListBatchesResponse nextPageToken */ nextPageToken?: (string|null); + + /** ListBatchesResponse unreachable */ + unreachable?: (string[]|null); } /** Represents a ListBatchesResponse. */ @@ -1693,6 +1696,9 @@ export namespace google { /** ListBatchesResponse nextPageToken. */ public nextPageToken: string; + /** ListBatchesResponse unreachable. */ + public unreachable: string[]; + /** * Creates a new ListBatchesResponse instance using the specified properties. * @param [properties] Properties to set @@ -5777,6 +5783,9 @@ export namespace google { /** EncryptionConfig gcePdKmsKeyName */ gcePdKmsKeyName?: (string|null); + + /** EncryptionConfig kmsKey */ + kmsKey?: (string|null); } /** Represents an EncryptionConfig. */ @@ -5791,6 +5800,9 @@ export namespace google { /** EncryptionConfig gcePdKmsKeyName. */ public gcePdKmsKeyName: string; + /** EncryptionConfig kmsKey. */ + public kmsKey: string; + /** * Creates a new EncryptionConfig instance using the specified properties. * @param [properties] Properties to set @@ -8796,7 +8808,8 @@ export namespace google { YARN = 4, SPARK_HISTORY_SERVER = 5, HIVESERVER2 = 6, - HIVEMETASTORE = 7 + HIVEMETASTORE = 7, + FLINK = 8 } /** Properties of a Metric. */ @@ -12538,6 +12551,142 @@ export namespace google { public static getTypeUrl(typeUrlPrefix?: string): string; } + /** Properties of a FlinkJob. */ + interface IFlinkJob { + + /** FlinkJob mainJarFileUri */ + mainJarFileUri?: (string|null); + + /** FlinkJob mainClass */ + mainClass?: (string|null); + + /** FlinkJob args */ + args?: (string[]|null); + + /** FlinkJob jarFileUris */ + jarFileUris?: (string[]|null); + + /** FlinkJob savepointUri */ + savepointUri?: (string|null); + + /** FlinkJob properties */ + properties?: ({ [k: string]: string }|null); + + /** FlinkJob loggingConfig */ + loggingConfig?: (google.cloud.dataproc.v1.ILoggingConfig|null); + } + + /** Represents a FlinkJob. */ + class FlinkJob implements IFlinkJob { + + /** + * Constructs a new FlinkJob. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.dataproc.v1.IFlinkJob); + + /** FlinkJob mainJarFileUri. */ + public mainJarFileUri?: (string|null); + + /** FlinkJob mainClass. */ + public mainClass?: (string|null); + + /** FlinkJob args. */ + public args: string[]; + + /** FlinkJob jarFileUris. */ + public jarFileUris: string[]; + + /** FlinkJob savepointUri. */ + public savepointUri: string; + + /** FlinkJob properties. */ + public properties: { [k: string]: string }; + + /** FlinkJob loggingConfig. */ + public loggingConfig?: (google.cloud.dataproc.v1.ILoggingConfig|null); + + /** FlinkJob driver. */ + public driver?: ("mainJarFileUri"|"mainClass"); + + /** + * Creates a new FlinkJob instance using the specified properties. + * @param [properties] Properties to set + * @returns FlinkJob instance + */ + public static create(properties?: google.cloud.dataproc.v1.IFlinkJob): google.cloud.dataproc.v1.FlinkJob; + + /** + * Encodes the specified FlinkJob message. Does not implicitly {@link google.cloud.dataproc.v1.FlinkJob.verify|verify} messages. + * @param message FlinkJob message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.dataproc.v1.IFlinkJob, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified FlinkJob message, length delimited. Does not implicitly {@link google.cloud.dataproc.v1.FlinkJob.verify|verify} messages. + * @param message FlinkJob message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.dataproc.v1.IFlinkJob, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a FlinkJob message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns FlinkJob + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.dataproc.v1.FlinkJob; + + /** + * Decodes a FlinkJob message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns FlinkJob + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.dataproc.v1.FlinkJob; + + /** + * Verifies a FlinkJob message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates a FlinkJob message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns FlinkJob + */ + public static fromObject(object: { [k: string]: any }): google.cloud.dataproc.v1.FlinkJob; + + /** + * Creates a plain object from a FlinkJob message. Also converts values to other types if specified. + * @param message FlinkJob + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.dataproc.v1.FlinkJob, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this FlinkJob to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for FlinkJob + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + /** Properties of a JobPlacement. */ interface IJobPlacement { @@ -13057,6 +13206,9 @@ export namespace google { /** Job trinoJob */ trinoJob?: (google.cloud.dataproc.v1.ITrinoJob|null); + /** Job flinkJob */ + flinkJob?: (google.cloud.dataproc.v1.IFlinkJob|null); + /** Job status */ status?: (google.cloud.dataproc.v1.IJobStatus|null); @@ -13130,6 +13282,9 @@ export namespace google { /** Job trinoJob. */ public trinoJob?: (google.cloud.dataproc.v1.ITrinoJob|null); + /** Job flinkJob. */ + public flinkJob?: (google.cloud.dataproc.v1.IFlinkJob|null); + /** Job status. */ public status?: (google.cloud.dataproc.v1.IJobStatus|null); @@ -13161,7 +13316,7 @@ export namespace google { public driverSchedulingConfig?: (google.cloud.dataproc.v1.IDriverSchedulingConfig|null); /** Job typeJob. */ - public typeJob?: ("hadoopJob"|"sparkJob"|"pysparkJob"|"hiveJob"|"pigJob"|"sparkRJob"|"sparkSqlJob"|"prestoJob"|"trinoJob"); + public typeJob?: ("hadoopJob"|"sparkJob"|"pysparkJob"|"hiveJob"|"pigJob"|"sparkRJob"|"sparkSqlJob"|"prestoJob"|"trinoJob"|"flinkJob"); /** * Creates a new Job instance using the specified properties. @@ -14058,6 +14213,9 @@ export namespace google { /** ListJobsResponse nextPageToken */ nextPageToken?: (string|null); + + /** ListJobsResponse unreachable */ + unreachable?: (string[]|null); } /** Represents a ListJobsResponse. */ @@ -14075,6 +14233,9 @@ export namespace google { /** ListJobsResponse nextPageToken. */ public nextPageToken: string; + /** ListJobsResponse unreachable. */ + public unreachable: string[]; + /** * Creates a new ListJobsResponse instance using the specified properties. * @param [properties] Properties to set @@ -17067,6 +17228,9 @@ export namespace google { /** WorkflowTemplate dagTimeout */ dagTimeout?: (google.protobuf.IDuration|null); + + /** WorkflowTemplate encryptionConfig */ + encryptionConfig?: (google.cloud.dataproc.v1.WorkflowTemplate.IEncryptionConfig|null); } /** Represents a WorkflowTemplate. */ @@ -17108,6 +17272,9 @@ export namespace google { /** WorkflowTemplate dagTimeout. */ public dagTimeout?: (google.protobuf.IDuration|null); + /** WorkflowTemplate encryptionConfig. */ + public encryptionConfig?: (google.cloud.dataproc.v1.WorkflowTemplate.IEncryptionConfig|null); + /** * Creates a new WorkflowTemplate instance using the specified properties. * @param [properties] Properties to set @@ -17186,6 +17353,106 @@ export namespace google { public static getTypeUrl(typeUrlPrefix?: string): string; } + namespace WorkflowTemplate { + + /** Properties of an EncryptionConfig. */ + interface IEncryptionConfig { + + /** EncryptionConfig kmsKey */ + kmsKey?: (string|null); + } + + /** Represents an EncryptionConfig. */ + class EncryptionConfig implements IEncryptionConfig { + + /** + * Constructs a new EncryptionConfig. + * @param [properties] Properties to set + */ + constructor(properties?: google.cloud.dataproc.v1.WorkflowTemplate.IEncryptionConfig); + + /** EncryptionConfig kmsKey. */ + public kmsKey: string; + + /** + * Creates a new EncryptionConfig instance using the specified properties. + * @param [properties] Properties to set + * @returns EncryptionConfig instance + */ + public static create(properties?: google.cloud.dataproc.v1.WorkflowTemplate.IEncryptionConfig): google.cloud.dataproc.v1.WorkflowTemplate.EncryptionConfig; + + /** + * Encodes the specified EncryptionConfig message. Does not implicitly {@link google.cloud.dataproc.v1.WorkflowTemplate.EncryptionConfig.verify|verify} messages. + * @param message EncryptionConfig message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: google.cloud.dataproc.v1.WorkflowTemplate.IEncryptionConfig, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Encodes the specified EncryptionConfig message, length delimited. Does not implicitly {@link google.cloud.dataproc.v1.WorkflowTemplate.EncryptionConfig.verify|verify} messages. + * @param message EncryptionConfig message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encodeDelimited(message: google.cloud.dataproc.v1.WorkflowTemplate.IEncryptionConfig, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes an EncryptionConfig message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns EncryptionConfig + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): google.cloud.dataproc.v1.WorkflowTemplate.EncryptionConfig; + + /** + * Decodes an EncryptionConfig message from the specified reader or buffer, length delimited. + * @param reader Reader or buffer to decode from + * @returns EncryptionConfig + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decodeDelimited(reader: ($protobuf.Reader|Uint8Array)): google.cloud.dataproc.v1.WorkflowTemplate.EncryptionConfig; + + /** + * Verifies an EncryptionConfig message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + + /** + * Creates an EncryptionConfig message from a plain object. Also converts values to their respective internal types. + * @param object Plain object + * @returns EncryptionConfig + */ + public static fromObject(object: { [k: string]: any }): google.cloud.dataproc.v1.WorkflowTemplate.EncryptionConfig; + + /** + * Creates a plain object from an EncryptionConfig message. Also converts values to other types if specified. + * @param message EncryptionConfig + * @param [options] Conversion options + * @returns Plain object + */ + public static toObject(message: google.cloud.dataproc.v1.WorkflowTemplate.EncryptionConfig, options?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this EncryptionConfig to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + + /** + * Gets the default type url for EncryptionConfig + * @param [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns The default type url + */ + public static getTypeUrl(typeUrlPrefix?: string): string; + } + } + /** Properties of a WorkflowTemplatePlacement. */ interface IWorkflowTemplatePlacement { @@ -17534,6 +17801,12 @@ export namespace google { /** OrderedJob prestoJob */ prestoJob?: (google.cloud.dataproc.v1.IPrestoJob|null); + /** OrderedJob trinoJob */ + trinoJob?: (google.cloud.dataproc.v1.ITrinoJob|null); + + /** OrderedJob flinkJob */ + flinkJob?: (google.cloud.dataproc.v1.IFlinkJob|null); + /** OrderedJob labels */ labels?: ({ [k: string]: string }|null); @@ -17580,6 +17853,12 @@ export namespace google { /** OrderedJob prestoJob. */ public prestoJob?: (google.cloud.dataproc.v1.IPrestoJob|null); + /** OrderedJob trinoJob. */ + public trinoJob?: (google.cloud.dataproc.v1.ITrinoJob|null); + + /** OrderedJob flinkJob. */ + public flinkJob?: (google.cloud.dataproc.v1.IFlinkJob|null); + /** OrderedJob labels. */ public labels: { [k: string]: string }; @@ -17590,7 +17869,7 @@ export namespace google { public prerequisiteStepIds: string[]; /** OrderedJob jobType. */ - public jobType?: ("hadoopJob"|"sparkJob"|"pysparkJob"|"hiveJob"|"pigJob"|"sparkRJob"|"sparkSqlJob"|"prestoJob"); + public jobType?: ("hadoopJob"|"sparkJob"|"pysparkJob"|"hiveJob"|"pigJob"|"sparkRJob"|"sparkSqlJob"|"prestoJob"|"trinoJob"|"flinkJob"); /** * Creates a new OrderedJob instance using the specified properties. @@ -19255,6 +19534,9 @@ export namespace google { /** ListWorkflowTemplatesResponse nextPageToken */ nextPageToken?: (string|null); + + /** ListWorkflowTemplatesResponse unreachable */ + unreachable?: (string[]|null); } /** Represents a ListWorkflowTemplatesResponse. */ @@ -19272,6 +19554,9 @@ export namespace google { /** ListWorkflowTemplatesResponse nextPageToken. */ public nextPageToken: string; + /** ListWorkflowTemplatesResponse unreachable. */ + public unreachable: string[]; + /** * Creates a new ListWorkflowTemplatesResponse instance using the specified properties. * @param [properties] Properties to set diff --git a/packages/google-cloud-dataproc/protos/protos.js b/packages/google-cloud-dataproc/protos/protos.js index d2205061fdb..56b6a30027d 100644 --- a/packages/google-cloud-dataproc/protos/protos.js +++ b/packages/google-cloud-dataproc/protos/protos.js @@ -3759,6 +3759,7 @@ * @interface IListBatchesResponse * @property {Array.|null} [batches] ListBatchesResponse batches * @property {string|null} [nextPageToken] ListBatchesResponse nextPageToken + * @property {Array.|null} [unreachable] ListBatchesResponse unreachable */ /** @@ -3771,6 +3772,7 @@ */ function ListBatchesResponse(properties) { this.batches = []; + this.unreachable = []; if (properties) for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) if (properties[keys[i]] != null) @@ -3793,6 +3795,14 @@ */ ListBatchesResponse.prototype.nextPageToken = ""; + /** + * ListBatchesResponse unreachable. + * @member {Array.} unreachable + * @memberof google.cloud.dataproc.v1.ListBatchesResponse + * @instance + */ + ListBatchesResponse.prototype.unreachable = $util.emptyArray; + /** * Creates a new ListBatchesResponse instance using the specified properties. * @function create @@ -3822,6 +3832,9 @@ $root.google.cloud.dataproc.v1.Batch.encode(message.batches[i], writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); if (message.nextPageToken != null && Object.hasOwnProperty.call(message, "nextPageToken")) writer.uint32(/* id 2, wireType 2 =*/18).string(message.nextPageToken); + if (message.unreachable != null && message.unreachable.length) + for (var i = 0; i < message.unreachable.length; ++i) + writer.uint32(/* id 3, wireType 2 =*/26).string(message.unreachable[i]); return writer; }; @@ -3866,6 +3879,12 @@ message.nextPageToken = reader.string(); break; } + case 3: { + if (!(message.unreachable && message.unreachable.length)) + message.unreachable = []; + message.unreachable.push(reader.string()); + break; + } default: reader.skipType(tag & 7); break; @@ -3913,6 +3932,13 @@ if (message.nextPageToken != null && message.hasOwnProperty("nextPageToken")) if (!$util.isString(message.nextPageToken)) return "nextPageToken: string expected"; + if (message.unreachable != null && message.hasOwnProperty("unreachable")) { + if (!Array.isArray(message.unreachable)) + return "unreachable: array expected"; + for (var i = 0; i < message.unreachable.length; ++i) + if (!$util.isString(message.unreachable[i])) + return "unreachable: string[] expected"; + } return null; }; @@ -3940,6 +3966,13 @@ } if (object.nextPageToken != null) message.nextPageToken = String(object.nextPageToken); + if (object.unreachable) { + if (!Array.isArray(object.unreachable)) + throw TypeError(".google.cloud.dataproc.v1.ListBatchesResponse.unreachable: array expected"); + message.unreachable = []; + for (var i = 0; i < object.unreachable.length; ++i) + message.unreachable[i] = String(object.unreachable[i]); + } return message; }; @@ -3956,8 +3989,10 @@ if (!options) options = {}; var object = {}; - if (options.arrays || options.defaults) + if (options.arrays || options.defaults) { object.batches = []; + object.unreachable = []; + } if (options.defaults) object.nextPageToken = ""; if (message.batches && message.batches.length) { @@ -3967,6 +4002,11 @@ } if (message.nextPageToken != null && message.hasOwnProperty("nextPageToken")) object.nextPageToken = message.nextPageToken; + if (message.unreachable && message.unreachable.length) { + object.unreachable = []; + for (var j = 0; j < message.unreachable.length; ++j) + object.unreachable[j] = message.unreachable[j]; + } return object; }; @@ -14683,6 +14723,7 @@ * @memberof google.cloud.dataproc.v1 * @interface IEncryptionConfig * @property {string|null} [gcePdKmsKeyName] EncryptionConfig gcePdKmsKeyName + * @property {string|null} [kmsKey] EncryptionConfig kmsKey */ /** @@ -14708,6 +14749,14 @@ */ EncryptionConfig.prototype.gcePdKmsKeyName = ""; + /** + * EncryptionConfig kmsKey. + * @member {string} kmsKey + * @memberof google.cloud.dataproc.v1.EncryptionConfig + * @instance + */ + EncryptionConfig.prototype.kmsKey = ""; + /** * Creates a new EncryptionConfig instance using the specified properties. * @function create @@ -14734,6 +14783,8 @@ writer = $Writer.create(); if (message.gcePdKmsKeyName != null && Object.hasOwnProperty.call(message, "gcePdKmsKeyName")) writer.uint32(/* id 1, wireType 2 =*/10).string(message.gcePdKmsKeyName); + if (message.kmsKey != null && Object.hasOwnProperty.call(message, "kmsKey")) + writer.uint32(/* id 2, wireType 2 =*/18).string(message.kmsKey); return writer; }; @@ -14772,6 +14823,10 @@ message.gcePdKmsKeyName = reader.string(); break; } + case 2: { + message.kmsKey = reader.string(); + break; + } default: reader.skipType(tag & 7); break; @@ -14810,6 +14865,9 @@ if (message.gcePdKmsKeyName != null && message.hasOwnProperty("gcePdKmsKeyName")) if (!$util.isString(message.gcePdKmsKeyName)) return "gcePdKmsKeyName: string expected"; + if (message.kmsKey != null && message.hasOwnProperty("kmsKey")) + if (!$util.isString(message.kmsKey)) + return "kmsKey: string expected"; return null; }; @@ -14827,6 +14885,8 @@ var message = new $root.google.cloud.dataproc.v1.EncryptionConfig(); if (object.gcePdKmsKeyName != null) message.gcePdKmsKeyName = String(object.gcePdKmsKeyName); + if (object.kmsKey != null) + message.kmsKey = String(object.kmsKey); return message; }; @@ -14843,10 +14903,14 @@ if (!options) options = {}; var object = {}; - if (options.defaults) + if (options.defaults) { object.gcePdKmsKeyName = ""; + object.kmsKey = ""; + } if (message.gcePdKmsKeyName != null && message.hasOwnProperty("gcePdKmsKeyName")) object.gcePdKmsKeyName = message.gcePdKmsKeyName; + if (message.kmsKey != null && message.hasOwnProperty("kmsKey")) + object.kmsKey = message.kmsKey; return object; }; @@ -22580,6 +22644,7 @@ * @property {number} SPARK_HISTORY_SERVER=5 SPARK_HISTORY_SERVER value * @property {number} HIVESERVER2=6 HIVESERVER2 value * @property {number} HIVEMETASTORE=7 HIVEMETASTORE value + * @property {number} FLINK=8 FLINK value */ DataprocMetricConfig.MetricSource = (function() { var valuesById = {}, values = Object.create(valuesById); @@ -22591,6 +22656,7 @@ values[valuesById[5] = "SPARK_HISTORY_SERVER"] = 5; values[valuesById[6] = "HIVESERVER2"] = 6; values[valuesById[7] = "HIVEMETASTORE"] = 7; + values[valuesById[8] = "FLINK"] = 8; return values; })(); @@ -22756,6 +22822,7 @@ case 5: case 6: case 7: + case 8: break; } if (message.metricOverrides != null && message.hasOwnProperty("metricOverrides")) { @@ -22819,6 +22886,10 @@ case 7: message.metricSource = 7; break; + case "FLINK": + case 8: + message.metricSource = 8; + break; } if (object.metricOverrides) { if (!Array.isArray(object.metricOverrides)) @@ -33361,6 +33432,448 @@ return TrinoJob; })(); + v1.FlinkJob = (function() { + + /** + * Properties of a FlinkJob. + * @memberof google.cloud.dataproc.v1 + * @interface IFlinkJob + * @property {string|null} [mainJarFileUri] FlinkJob mainJarFileUri + * @property {string|null} [mainClass] FlinkJob mainClass + * @property {Array.|null} [args] FlinkJob args + * @property {Array.|null} [jarFileUris] FlinkJob jarFileUris + * @property {string|null} [savepointUri] FlinkJob savepointUri + * @property {Object.|null} [properties] FlinkJob properties + * @property {google.cloud.dataproc.v1.ILoggingConfig|null} [loggingConfig] FlinkJob loggingConfig + */ + + /** + * Constructs a new FlinkJob. + * @memberof google.cloud.dataproc.v1 + * @classdesc Represents a FlinkJob. + * @implements IFlinkJob + * @constructor + * @param {google.cloud.dataproc.v1.IFlinkJob=} [properties] Properties to set + */ + function FlinkJob(properties) { + this.args = []; + this.jarFileUris = []; + this.properties = {}; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * FlinkJob mainJarFileUri. + * @member {string|null|undefined} mainJarFileUri + * @memberof google.cloud.dataproc.v1.FlinkJob + * @instance + */ + FlinkJob.prototype.mainJarFileUri = null; + + /** + * FlinkJob mainClass. + * @member {string|null|undefined} mainClass + * @memberof google.cloud.dataproc.v1.FlinkJob + * @instance + */ + FlinkJob.prototype.mainClass = null; + + /** + * FlinkJob args. + * @member {Array.} args + * @memberof google.cloud.dataproc.v1.FlinkJob + * @instance + */ + FlinkJob.prototype.args = $util.emptyArray; + + /** + * FlinkJob jarFileUris. + * @member {Array.} jarFileUris + * @memberof google.cloud.dataproc.v1.FlinkJob + * @instance + */ + FlinkJob.prototype.jarFileUris = $util.emptyArray; + + /** + * FlinkJob savepointUri. + * @member {string} savepointUri + * @memberof google.cloud.dataproc.v1.FlinkJob + * @instance + */ + FlinkJob.prototype.savepointUri = ""; + + /** + * FlinkJob properties. + * @member {Object.} properties + * @memberof google.cloud.dataproc.v1.FlinkJob + * @instance + */ + FlinkJob.prototype.properties = $util.emptyObject; + + /** + * FlinkJob loggingConfig. + * @member {google.cloud.dataproc.v1.ILoggingConfig|null|undefined} loggingConfig + * @memberof google.cloud.dataproc.v1.FlinkJob + * @instance + */ + FlinkJob.prototype.loggingConfig = null; + + // OneOf field names bound to virtual getters and setters + var $oneOfFields; + + /** + * FlinkJob driver. + * @member {"mainJarFileUri"|"mainClass"|undefined} driver + * @memberof google.cloud.dataproc.v1.FlinkJob + * @instance + */ + Object.defineProperty(FlinkJob.prototype, "driver", { + get: $util.oneOfGetter($oneOfFields = ["mainJarFileUri", "mainClass"]), + set: $util.oneOfSetter($oneOfFields) + }); + + /** + * Creates a new FlinkJob instance using the specified properties. + * @function create + * @memberof google.cloud.dataproc.v1.FlinkJob + * @static + * @param {google.cloud.dataproc.v1.IFlinkJob=} [properties] Properties to set + * @returns {google.cloud.dataproc.v1.FlinkJob} FlinkJob instance + */ + FlinkJob.create = function create(properties) { + return new FlinkJob(properties); + }; + + /** + * Encodes the specified FlinkJob message. Does not implicitly {@link google.cloud.dataproc.v1.FlinkJob.verify|verify} messages. + * @function encode + * @memberof google.cloud.dataproc.v1.FlinkJob + * @static + * @param {google.cloud.dataproc.v1.IFlinkJob} message FlinkJob message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + FlinkJob.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.mainJarFileUri != null && Object.hasOwnProperty.call(message, "mainJarFileUri")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.mainJarFileUri); + if (message.mainClass != null && Object.hasOwnProperty.call(message, "mainClass")) + writer.uint32(/* id 2, wireType 2 =*/18).string(message.mainClass); + if (message.args != null && message.args.length) + for (var i = 0; i < message.args.length; ++i) + writer.uint32(/* id 3, wireType 2 =*/26).string(message.args[i]); + if (message.jarFileUris != null && message.jarFileUris.length) + for (var i = 0; i < message.jarFileUris.length; ++i) + writer.uint32(/* id 4, wireType 2 =*/34).string(message.jarFileUris[i]); + if (message.properties != null && Object.hasOwnProperty.call(message, "properties")) + for (var keys = Object.keys(message.properties), i = 0; i < keys.length; ++i) + writer.uint32(/* id 7, wireType 2 =*/58).fork().uint32(/* id 1, wireType 2 =*/10).string(keys[i]).uint32(/* id 2, wireType 2 =*/18).string(message.properties[keys[i]]).ldelim(); + if (message.loggingConfig != null && Object.hasOwnProperty.call(message, "loggingConfig")) + $root.google.cloud.dataproc.v1.LoggingConfig.encode(message.loggingConfig, writer.uint32(/* id 8, wireType 2 =*/66).fork()).ldelim(); + if (message.savepointUri != null && Object.hasOwnProperty.call(message, "savepointUri")) + writer.uint32(/* id 9, wireType 2 =*/74).string(message.savepointUri); + return writer; + }; + + /** + * Encodes the specified FlinkJob message, length delimited. Does not implicitly {@link google.cloud.dataproc.v1.FlinkJob.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.dataproc.v1.FlinkJob + * @static + * @param {google.cloud.dataproc.v1.IFlinkJob} message FlinkJob message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + FlinkJob.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes a FlinkJob message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.dataproc.v1.FlinkJob + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.dataproc.v1.FlinkJob} FlinkJob + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + FlinkJob.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.dataproc.v1.FlinkJob(), key, value; + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.mainJarFileUri = reader.string(); + break; + } + case 2: { + message.mainClass = reader.string(); + break; + } + case 3: { + if (!(message.args && message.args.length)) + message.args = []; + message.args.push(reader.string()); + break; + } + case 4: { + if (!(message.jarFileUris && message.jarFileUris.length)) + message.jarFileUris = []; + message.jarFileUris.push(reader.string()); + break; + } + case 9: { + message.savepointUri = reader.string(); + break; + } + case 7: { + if (message.properties === $util.emptyObject) + message.properties = {}; + var end2 = reader.uint32() + reader.pos; + key = ""; + value = ""; + while (reader.pos < end2) { + var tag2 = reader.uint32(); + switch (tag2 >>> 3) { + case 1: + key = reader.string(); + break; + case 2: + value = reader.string(); + break; + default: + reader.skipType(tag2 & 7); + break; + } + } + message.properties[key] = value; + break; + } + case 8: { + message.loggingConfig = $root.google.cloud.dataproc.v1.LoggingConfig.decode(reader, reader.uint32()); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes a FlinkJob message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.dataproc.v1.FlinkJob + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.dataproc.v1.FlinkJob} FlinkJob + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + FlinkJob.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies a FlinkJob message. + * @function verify + * @memberof google.cloud.dataproc.v1.FlinkJob + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + FlinkJob.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + var properties = {}; + if (message.mainJarFileUri != null && message.hasOwnProperty("mainJarFileUri")) { + properties.driver = 1; + if (!$util.isString(message.mainJarFileUri)) + return "mainJarFileUri: string expected"; + } + if (message.mainClass != null && message.hasOwnProperty("mainClass")) { + if (properties.driver === 1) + return "driver: multiple values"; + properties.driver = 1; + if (!$util.isString(message.mainClass)) + return "mainClass: string expected"; + } + if (message.args != null && message.hasOwnProperty("args")) { + if (!Array.isArray(message.args)) + return "args: array expected"; + for (var i = 0; i < message.args.length; ++i) + if (!$util.isString(message.args[i])) + return "args: string[] expected"; + } + if (message.jarFileUris != null && message.hasOwnProperty("jarFileUris")) { + if (!Array.isArray(message.jarFileUris)) + return "jarFileUris: array expected"; + for (var i = 0; i < message.jarFileUris.length; ++i) + if (!$util.isString(message.jarFileUris[i])) + return "jarFileUris: string[] expected"; + } + if (message.savepointUri != null && message.hasOwnProperty("savepointUri")) + if (!$util.isString(message.savepointUri)) + return "savepointUri: string expected"; + if (message.properties != null && message.hasOwnProperty("properties")) { + if (!$util.isObject(message.properties)) + return "properties: object expected"; + var key = Object.keys(message.properties); + for (var i = 0; i < key.length; ++i) + if (!$util.isString(message.properties[key[i]])) + return "properties: string{k:string} expected"; + } + if (message.loggingConfig != null && message.hasOwnProperty("loggingConfig")) { + var error = $root.google.cloud.dataproc.v1.LoggingConfig.verify(message.loggingConfig); + if (error) + return "loggingConfig." + error; + } + return null; + }; + + /** + * Creates a FlinkJob message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.dataproc.v1.FlinkJob + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.dataproc.v1.FlinkJob} FlinkJob + */ + FlinkJob.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.dataproc.v1.FlinkJob) + return object; + var message = new $root.google.cloud.dataproc.v1.FlinkJob(); + if (object.mainJarFileUri != null) + message.mainJarFileUri = String(object.mainJarFileUri); + if (object.mainClass != null) + message.mainClass = String(object.mainClass); + if (object.args) { + if (!Array.isArray(object.args)) + throw TypeError(".google.cloud.dataproc.v1.FlinkJob.args: array expected"); + message.args = []; + for (var i = 0; i < object.args.length; ++i) + message.args[i] = String(object.args[i]); + } + if (object.jarFileUris) { + if (!Array.isArray(object.jarFileUris)) + throw TypeError(".google.cloud.dataproc.v1.FlinkJob.jarFileUris: array expected"); + message.jarFileUris = []; + for (var i = 0; i < object.jarFileUris.length; ++i) + message.jarFileUris[i] = String(object.jarFileUris[i]); + } + if (object.savepointUri != null) + message.savepointUri = String(object.savepointUri); + if (object.properties) { + if (typeof object.properties !== "object") + throw TypeError(".google.cloud.dataproc.v1.FlinkJob.properties: object expected"); + message.properties = {}; + for (var keys = Object.keys(object.properties), i = 0; i < keys.length; ++i) + message.properties[keys[i]] = String(object.properties[keys[i]]); + } + if (object.loggingConfig != null) { + if (typeof object.loggingConfig !== "object") + throw TypeError(".google.cloud.dataproc.v1.FlinkJob.loggingConfig: object expected"); + message.loggingConfig = $root.google.cloud.dataproc.v1.LoggingConfig.fromObject(object.loggingConfig); + } + return message; + }; + + /** + * Creates a plain object from a FlinkJob message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.dataproc.v1.FlinkJob + * @static + * @param {google.cloud.dataproc.v1.FlinkJob} message FlinkJob + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + FlinkJob.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.arrays || options.defaults) { + object.args = []; + object.jarFileUris = []; + } + if (options.objects || options.defaults) + object.properties = {}; + if (options.defaults) { + object.loggingConfig = null; + object.savepointUri = ""; + } + if (message.mainJarFileUri != null && message.hasOwnProperty("mainJarFileUri")) { + object.mainJarFileUri = message.mainJarFileUri; + if (options.oneofs) + object.driver = "mainJarFileUri"; + } + if (message.mainClass != null && message.hasOwnProperty("mainClass")) { + object.mainClass = message.mainClass; + if (options.oneofs) + object.driver = "mainClass"; + } + if (message.args && message.args.length) { + object.args = []; + for (var j = 0; j < message.args.length; ++j) + object.args[j] = message.args[j]; + } + if (message.jarFileUris && message.jarFileUris.length) { + object.jarFileUris = []; + for (var j = 0; j < message.jarFileUris.length; ++j) + object.jarFileUris[j] = message.jarFileUris[j]; + } + var keys2; + if (message.properties && (keys2 = Object.keys(message.properties)).length) { + object.properties = {}; + for (var j = 0; j < keys2.length; ++j) + object.properties[keys2[j]] = message.properties[keys2[j]]; + } + if (message.loggingConfig != null && message.hasOwnProperty("loggingConfig")) + object.loggingConfig = $root.google.cloud.dataproc.v1.LoggingConfig.toObject(message.loggingConfig, options); + if (message.savepointUri != null && message.hasOwnProperty("savepointUri")) + object.savepointUri = message.savepointUri; + return object; + }; + + /** + * Converts this FlinkJob to JSON. + * @function toJSON + * @memberof google.cloud.dataproc.v1.FlinkJob + * @instance + * @returns {Object.} JSON object + */ + FlinkJob.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for FlinkJob + * @function getTypeUrl + * @memberof google.cloud.dataproc.v1.FlinkJob + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + FlinkJob.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.dataproc.v1.FlinkJob"; + }; + + return FlinkJob; + })(); + v1.JobPlacement = (function() { /** @@ -34660,6 +35173,7 @@ * @property {google.cloud.dataproc.v1.ISparkSqlJob|null} [sparkSqlJob] Job sparkSqlJob * @property {google.cloud.dataproc.v1.IPrestoJob|null} [prestoJob] Job prestoJob * @property {google.cloud.dataproc.v1.ITrinoJob|null} [trinoJob] Job trinoJob + * @property {google.cloud.dataproc.v1.IFlinkJob|null} [flinkJob] Job flinkJob * @property {google.cloud.dataproc.v1.IJobStatus|null} [status] Job status * @property {Array.|null} [statusHistory] Job statusHistory * @property {Array.|null} [yarnApplications] Job yarnApplications @@ -34778,6 +35292,14 @@ */ Job.prototype.trinoJob = null; + /** + * Job flinkJob. + * @member {google.cloud.dataproc.v1.IFlinkJob|null|undefined} flinkJob + * @memberof google.cloud.dataproc.v1.Job + * @instance + */ + Job.prototype.flinkJob = null; + /** * Job status. * @member {google.cloud.dataproc.v1.IJobStatus|null|undefined} status @@ -34863,12 +35385,12 @@ /** * Job typeJob. - * @member {"hadoopJob"|"sparkJob"|"pysparkJob"|"hiveJob"|"pigJob"|"sparkRJob"|"sparkSqlJob"|"prestoJob"|"trinoJob"|undefined} typeJob + * @member {"hadoopJob"|"sparkJob"|"pysparkJob"|"hiveJob"|"pigJob"|"sparkRJob"|"sparkSqlJob"|"prestoJob"|"trinoJob"|"flinkJob"|undefined} typeJob * @memberof google.cloud.dataproc.v1.Job * @instance */ Object.defineProperty(Job.prototype, "typeJob", { - get: $util.oneOfGetter($oneOfFields = ["hadoopJob", "sparkJob", "pysparkJob", "hiveJob", "pigJob", "sparkRJob", "sparkSqlJob", "prestoJob", "trinoJob"]), + get: $util.oneOfGetter($oneOfFields = ["hadoopJob", "sparkJob", "pysparkJob", "hiveJob", "pigJob", "sparkRJob", "sparkSqlJob", "prestoJob", "trinoJob", "flinkJob"]), set: $util.oneOfSetter($oneOfFields) }); @@ -34941,6 +35463,8 @@ $root.google.cloud.dataproc.v1.DriverSchedulingConfig.encode(message.driverSchedulingConfig, writer.uint32(/* id 27, wireType 2 =*/218).fork()).ldelim(); if (message.trinoJob != null && Object.hasOwnProperty.call(message, "trinoJob")) $root.google.cloud.dataproc.v1.TrinoJob.encode(message.trinoJob, writer.uint32(/* id 28, wireType 2 =*/226).fork()).ldelim(); + if (message.flinkJob != null && Object.hasOwnProperty.call(message, "flinkJob")) + $root.google.cloud.dataproc.v1.FlinkJob.encode(message.flinkJob, writer.uint32(/* id 29, wireType 2 =*/234).fork()).ldelim(); return writer; }; @@ -35019,6 +35543,10 @@ message.trinoJob = $root.google.cloud.dataproc.v1.TrinoJob.decode(reader, reader.uint32()); break; } + case 29: { + message.flinkJob = $root.google.cloud.dataproc.v1.FlinkJob.decode(reader, reader.uint32()); + break; + } case 8: { message.status = $root.google.cloud.dataproc.v1.JobStatus.decode(reader, reader.uint32()); break; @@ -35216,6 +35744,16 @@ return "trinoJob." + error; } } + if (message.flinkJob != null && message.hasOwnProperty("flinkJob")) { + if (properties.typeJob === 1) + return "typeJob: multiple values"; + properties.typeJob = 1; + { + var error = $root.google.cloud.dataproc.v1.FlinkJob.verify(message.flinkJob); + if (error) + return "flinkJob." + error; + } + } if (message.status != null && message.hasOwnProperty("status")) { var error = $root.google.cloud.dataproc.v1.JobStatus.verify(message.status); if (error) @@ -35339,6 +35877,11 @@ throw TypeError(".google.cloud.dataproc.v1.Job.trinoJob: object expected"); message.trinoJob = $root.google.cloud.dataproc.v1.TrinoJob.fromObject(object.trinoJob); } + if (object.flinkJob != null) { + if (typeof object.flinkJob !== "object") + throw TypeError(".google.cloud.dataproc.v1.Job.flinkJob: object expected"); + message.flinkJob = $root.google.cloud.dataproc.v1.FlinkJob.fromObject(object.flinkJob); + } if (object.status != null) { if (typeof object.status !== "object") throw TypeError(".google.cloud.dataproc.v1.Job.status: object expected"); @@ -35501,6 +36044,11 @@ if (options.oneofs) object.typeJob = "trinoJob"; } + if (message.flinkJob != null && message.hasOwnProperty("flinkJob")) { + object.flinkJob = $root.google.cloud.dataproc.v1.FlinkJob.toObject(message.flinkJob, options); + if (options.oneofs) + object.typeJob = "flinkJob"; + } return object; }; @@ -37494,6 +38042,7 @@ * @interface IListJobsResponse * @property {Array.|null} [jobs] ListJobsResponse jobs * @property {string|null} [nextPageToken] ListJobsResponse nextPageToken + * @property {Array.|null} [unreachable] ListJobsResponse unreachable */ /** @@ -37506,6 +38055,7 @@ */ function ListJobsResponse(properties) { this.jobs = []; + this.unreachable = []; if (properties) for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) if (properties[keys[i]] != null) @@ -37528,6 +38078,14 @@ */ ListJobsResponse.prototype.nextPageToken = ""; + /** + * ListJobsResponse unreachable. + * @member {Array.} unreachable + * @memberof google.cloud.dataproc.v1.ListJobsResponse + * @instance + */ + ListJobsResponse.prototype.unreachable = $util.emptyArray; + /** * Creates a new ListJobsResponse instance using the specified properties. * @function create @@ -37557,6 +38115,9 @@ $root.google.cloud.dataproc.v1.Job.encode(message.jobs[i], writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); if (message.nextPageToken != null && Object.hasOwnProperty.call(message, "nextPageToken")) writer.uint32(/* id 2, wireType 2 =*/18).string(message.nextPageToken); + if (message.unreachable != null && message.unreachable.length) + for (var i = 0; i < message.unreachable.length; ++i) + writer.uint32(/* id 3, wireType 2 =*/26).string(message.unreachable[i]); return writer; }; @@ -37601,6 +38162,12 @@ message.nextPageToken = reader.string(); break; } + case 3: { + if (!(message.unreachable && message.unreachable.length)) + message.unreachable = []; + message.unreachable.push(reader.string()); + break; + } default: reader.skipType(tag & 7); break; @@ -37648,6 +38215,13 @@ if (message.nextPageToken != null && message.hasOwnProperty("nextPageToken")) if (!$util.isString(message.nextPageToken)) return "nextPageToken: string expected"; + if (message.unreachable != null && message.hasOwnProperty("unreachable")) { + if (!Array.isArray(message.unreachable)) + return "unreachable: array expected"; + for (var i = 0; i < message.unreachable.length; ++i) + if (!$util.isString(message.unreachable[i])) + return "unreachable: string[] expected"; + } return null; }; @@ -37675,6 +38249,13 @@ } if (object.nextPageToken != null) message.nextPageToken = String(object.nextPageToken); + if (object.unreachable) { + if (!Array.isArray(object.unreachable)) + throw TypeError(".google.cloud.dataproc.v1.ListJobsResponse.unreachable: array expected"); + message.unreachable = []; + for (var i = 0; i < object.unreachable.length; ++i) + message.unreachable[i] = String(object.unreachable[i]); + } return message; }; @@ -37691,8 +38272,10 @@ if (!options) options = {}; var object = {}; - if (options.arrays || options.defaults) + if (options.arrays || options.defaults) { object.jobs = []; + object.unreachable = []; + } if (options.defaults) object.nextPageToken = ""; if (message.jobs && message.jobs.length) { @@ -37702,6 +38285,11 @@ } if (message.nextPageToken != null && message.hasOwnProperty("nextPageToken")) object.nextPageToken = message.nextPageToken; + if (message.unreachable && message.unreachable.length) { + object.unreachable = []; + for (var j = 0; j < message.unreachable.length; ++j) + object.unreachable[j] = message.unreachable[j]; + } return object; }; @@ -44380,6 +44968,7 @@ * @property {Array.|null} [jobs] WorkflowTemplate jobs * @property {Array.|null} [parameters] WorkflowTemplate parameters * @property {google.protobuf.IDuration|null} [dagTimeout] WorkflowTemplate dagTimeout + * @property {google.cloud.dataproc.v1.WorkflowTemplate.IEncryptionConfig|null} [encryptionConfig] WorkflowTemplate encryptionConfig */ /** @@ -44480,6 +45069,14 @@ */ WorkflowTemplate.prototype.dagTimeout = null; + /** + * WorkflowTemplate encryptionConfig. + * @member {google.cloud.dataproc.v1.WorkflowTemplate.IEncryptionConfig|null|undefined} encryptionConfig + * @memberof google.cloud.dataproc.v1.WorkflowTemplate + * @instance + */ + WorkflowTemplate.prototype.encryptionConfig = null; + /** * Creates a new WorkflowTemplate instance using the specified properties. * @function create @@ -44527,6 +45124,8 @@ $root.google.cloud.dataproc.v1.TemplateParameter.encode(message.parameters[i], writer.uint32(/* id 9, wireType 2 =*/74).fork()).ldelim(); if (message.dagTimeout != null && Object.hasOwnProperty.call(message, "dagTimeout")) $root.google.protobuf.Duration.encode(message.dagTimeout, writer.uint32(/* id 10, wireType 2 =*/82).fork()).ldelim(); + if (message.encryptionConfig != null && Object.hasOwnProperty.call(message, "encryptionConfig")) + $root.google.cloud.dataproc.v1.WorkflowTemplate.EncryptionConfig.encode(message.encryptionConfig, writer.uint32(/* id 11, wireType 2 =*/90).fork()).ldelim(); return writer; }; @@ -44624,6 +45223,10 @@ message.dagTimeout = $root.google.protobuf.Duration.decode(reader, reader.uint32()); break; } + case 11: { + message.encryptionConfig = $root.google.cloud.dataproc.v1.WorkflowTemplate.EncryptionConfig.decode(reader, reader.uint32()); + break; + } default: reader.skipType(tag & 7); break; @@ -44714,6 +45317,11 @@ if (error) return "dagTimeout." + error; } + if (message.encryptionConfig != null && message.hasOwnProperty("encryptionConfig")) { + var error = $root.google.cloud.dataproc.v1.WorkflowTemplate.EncryptionConfig.verify(message.encryptionConfig); + if (error) + return "encryptionConfig." + error; + } return null; }; @@ -44782,6 +45390,11 @@ throw TypeError(".google.cloud.dataproc.v1.WorkflowTemplate.dagTimeout: object expected"); message.dagTimeout = $root.google.protobuf.Duration.fromObject(object.dagTimeout); } + if (object.encryptionConfig != null) { + if (typeof object.encryptionConfig !== "object") + throw TypeError(".google.cloud.dataproc.v1.WorkflowTemplate.encryptionConfig: object expected"); + message.encryptionConfig = $root.google.cloud.dataproc.v1.WorkflowTemplate.EncryptionConfig.fromObject(object.encryptionConfig); + } return message; }; @@ -44812,6 +45425,7 @@ object.updateTime = null; object.placement = null; object.dagTimeout = null; + object.encryptionConfig = null; } if (message.name != null && message.hasOwnProperty("name")) object.name = message.name; @@ -44843,6 +45457,8 @@ } if (message.dagTimeout != null && message.hasOwnProperty("dagTimeout")) object.dagTimeout = $root.google.protobuf.Duration.toObject(message.dagTimeout, options); + if (message.encryptionConfig != null && message.hasOwnProperty("encryptionConfig")) + object.encryptionConfig = $root.google.cloud.dataproc.v1.WorkflowTemplate.EncryptionConfig.toObject(message.encryptionConfig, options); return object; }; @@ -44872,6 +45488,209 @@ return typeUrlPrefix + "/google.cloud.dataproc.v1.WorkflowTemplate"; }; + WorkflowTemplate.EncryptionConfig = (function() { + + /** + * Properties of an EncryptionConfig. + * @memberof google.cloud.dataproc.v1.WorkflowTemplate + * @interface IEncryptionConfig + * @property {string|null} [kmsKey] EncryptionConfig kmsKey + */ + + /** + * Constructs a new EncryptionConfig. + * @memberof google.cloud.dataproc.v1.WorkflowTemplate + * @classdesc Represents an EncryptionConfig. + * @implements IEncryptionConfig + * @constructor + * @param {google.cloud.dataproc.v1.WorkflowTemplate.IEncryptionConfig=} [properties] Properties to set + */ + function EncryptionConfig(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * EncryptionConfig kmsKey. + * @member {string} kmsKey + * @memberof google.cloud.dataproc.v1.WorkflowTemplate.EncryptionConfig + * @instance + */ + EncryptionConfig.prototype.kmsKey = ""; + + /** + * Creates a new EncryptionConfig instance using the specified properties. + * @function create + * @memberof google.cloud.dataproc.v1.WorkflowTemplate.EncryptionConfig + * @static + * @param {google.cloud.dataproc.v1.WorkflowTemplate.IEncryptionConfig=} [properties] Properties to set + * @returns {google.cloud.dataproc.v1.WorkflowTemplate.EncryptionConfig} EncryptionConfig instance + */ + EncryptionConfig.create = function create(properties) { + return new EncryptionConfig(properties); + }; + + /** + * Encodes the specified EncryptionConfig message. Does not implicitly {@link google.cloud.dataproc.v1.WorkflowTemplate.EncryptionConfig.verify|verify} messages. + * @function encode + * @memberof google.cloud.dataproc.v1.WorkflowTemplate.EncryptionConfig + * @static + * @param {google.cloud.dataproc.v1.WorkflowTemplate.IEncryptionConfig} message EncryptionConfig message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + EncryptionConfig.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.kmsKey != null && Object.hasOwnProperty.call(message, "kmsKey")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.kmsKey); + return writer; + }; + + /** + * Encodes the specified EncryptionConfig message, length delimited. Does not implicitly {@link google.cloud.dataproc.v1.WorkflowTemplate.EncryptionConfig.verify|verify} messages. + * @function encodeDelimited + * @memberof google.cloud.dataproc.v1.WorkflowTemplate.EncryptionConfig + * @static + * @param {google.cloud.dataproc.v1.WorkflowTemplate.IEncryptionConfig} message EncryptionConfig message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + EncryptionConfig.encodeDelimited = function encodeDelimited(message, writer) { + return this.encode(message, writer).ldelim(); + }; + + /** + * Decodes an EncryptionConfig message from the specified reader or buffer. + * @function decode + * @memberof google.cloud.dataproc.v1.WorkflowTemplate.EncryptionConfig + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {google.cloud.dataproc.v1.WorkflowTemplate.EncryptionConfig} EncryptionConfig + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + EncryptionConfig.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.google.cloud.dataproc.v1.WorkflowTemplate.EncryptionConfig(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: { + message.kmsKey = reader.string(); + break; + } + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Decodes an EncryptionConfig message from the specified reader or buffer, length delimited. + * @function decodeDelimited + * @memberof google.cloud.dataproc.v1.WorkflowTemplate.EncryptionConfig + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @returns {google.cloud.dataproc.v1.WorkflowTemplate.EncryptionConfig} EncryptionConfig + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + EncryptionConfig.decodeDelimited = function decodeDelimited(reader) { + if (!(reader instanceof $Reader)) + reader = new $Reader(reader); + return this.decode(reader, reader.uint32()); + }; + + /** + * Verifies an EncryptionConfig message. + * @function verify + * @memberof google.cloud.dataproc.v1.WorkflowTemplate.EncryptionConfig + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + EncryptionConfig.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.kmsKey != null && message.hasOwnProperty("kmsKey")) + if (!$util.isString(message.kmsKey)) + return "kmsKey: string expected"; + return null; + }; + + /** + * Creates an EncryptionConfig message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof google.cloud.dataproc.v1.WorkflowTemplate.EncryptionConfig + * @static + * @param {Object.} object Plain object + * @returns {google.cloud.dataproc.v1.WorkflowTemplate.EncryptionConfig} EncryptionConfig + */ + EncryptionConfig.fromObject = function fromObject(object) { + if (object instanceof $root.google.cloud.dataproc.v1.WorkflowTemplate.EncryptionConfig) + return object; + var message = new $root.google.cloud.dataproc.v1.WorkflowTemplate.EncryptionConfig(); + if (object.kmsKey != null) + message.kmsKey = String(object.kmsKey); + return message; + }; + + /** + * Creates a plain object from an EncryptionConfig message. Also converts values to other types if specified. + * @function toObject + * @memberof google.cloud.dataproc.v1.WorkflowTemplate.EncryptionConfig + * @static + * @param {google.cloud.dataproc.v1.WorkflowTemplate.EncryptionConfig} message EncryptionConfig + * @param {$protobuf.IConversionOptions} [options] Conversion options + * @returns {Object.} Plain object + */ + EncryptionConfig.toObject = function toObject(message, options) { + if (!options) + options = {}; + var object = {}; + if (options.defaults) + object.kmsKey = ""; + if (message.kmsKey != null && message.hasOwnProperty("kmsKey")) + object.kmsKey = message.kmsKey; + return object; + }; + + /** + * Converts this EncryptionConfig to JSON. + * @function toJSON + * @memberof google.cloud.dataproc.v1.WorkflowTemplate.EncryptionConfig + * @instance + * @returns {Object.} JSON object + */ + EncryptionConfig.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + /** + * Gets the default type url for EncryptionConfig + * @function getTypeUrl + * @memberof google.cloud.dataproc.v1.WorkflowTemplate.EncryptionConfig + * @static + * @param {string} [typeUrlPrefix] your custom typeUrlPrefix(default "type.googleapis.com") + * @returns {string} The default type url + */ + EncryptionConfig.getTypeUrl = function getTypeUrl(typeUrlPrefix) { + if (typeUrlPrefix === undefined) { + typeUrlPrefix = "type.googleapis.com"; + } + return typeUrlPrefix + "/google.cloud.dataproc.v1.WorkflowTemplate.EncryptionConfig"; + }; + + return EncryptionConfig; + })(); + return WorkflowTemplate; })(); @@ -45705,6 +46524,8 @@ * @property {google.cloud.dataproc.v1.ISparkRJob|null} [sparkRJob] OrderedJob sparkRJob * @property {google.cloud.dataproc.v1.ISparkSqlJob|null} [sparkSqlJob] OrderedJob sparkSqlJob * @property {google.cloud.dataproc.v1.IPrestoJob|null} [prestoJob] OrderedJob prestoJob + * @property {google.cloud.dataproc.v1.ITrinoJob|null} [trinoJob] OrderedJob trinoJob + * @property {google.cloud.dataproc.v1.IFlinkJob|null} [flinkJob] OrderedJob flinkJob * @property {Object.|null} [labels] OrderedJob labels * @property {google.cloud.dataproc.v1.IJobScheduling|null} [scheduling] OrderedJob scheduling * @property {Array.|null} [prerequisiteStepIds] OrderedJob prerequisiteStepIds @@ -45799,6 +46620,22 @@ */ OrderedJob.prototype.prestoJob = null; + /** + * OrderedJob trinoJob. + * @member {google.cloud.dataproc.v1.ITrinoJob|null|undefined} trinoJob + * @memberof google.cloud.dataproc.v1.OrderedJob + * @instance + */ + OrderedJob.prototype.trinoJob = null; + + /** + * OrderedJob flinkJob. + * @member {google.cloud.dataproc.v1.IFlinkJob|null|undefined} flinkJob + * @memberof google.cloud.dataproc.v1.OrderedJob + * @instance + */ + OrderedJob.prototype.flinkJob = null; + /** * OrderedJob labels. * @member {Object.} labels @@ -45828,12 +46665,12 @@ /** * OrderedJob jobType. - * @member {"hadoopJob"|"sparkJob"|"pysparkJob"|"hiveJob"|"pigJob"|"sparkRJob"|"sparkSqlJob"|"prestoJob"|undefined} jobType + * @member {"hadoopJob"|"sparkJob"|"pysparkJob"|"hiveJob"|"pigJob"|"sparkRJob"|"sparkSqlJob"|"prestoJob"|"trinoJob"|"flinkJob"|undefined} jobType * @memberof google.cloud.dataproc.v1.OrderedJob * @instance */ Object.defineProperty(OrderedJob.prototype, "jobType", { - get: $util.oneOfGetter($oneOfFields = ["hadoopJob", "sparkJob", "pysparkJob", "hiveJob", "pigJob", "sparkRJob", "sparkSqlJob", "prestoJob"]), + get: $util.oneOfGetter($oneOfFields = ["hadoopJob", "sparkJob", "pysparkJob", "hiveJob", "pigJob", "sparkRJob", "sparkSqlJob", "prestoJob", "trinoJob", "flinkJob"]), set: $util.oneOfSetter($oneOfFields) }); @@ -45887,6 +46724,10 @@ $root.google.cloud.dataproc.v1.SparkRJob.encode(message.sparkRJob, writer.uint32(/* id 11, wireType 2 =*/90).fork()).ldelim(); if (message.prestoJob != null && Object.hasOwnProperty.call(message, "prestoJob")) $root.google.cloud.dataproc.v1.PrestoJob.encode(message.prestoJob, writer.uint32(/* id 12, wireType 2 =*/98).fork()).ldelim(); + if (message.trinoJob != null && Object.hasOwnProperty.call(message, "trinoJob")) + $root.google.cloud.dataproc.v1.TrinoJob.encode(message.trinoJob, writer.uint32(/* id 13, wireType 2 =*/106).fork()).ldelim(); + if (message.flinkJob != null && Object.hasOwnProperty.call(message, "flinkJob")) + $root.google.cloud.dataproc.v1.FlinkJob.encode(message.flinkJob, writer.uint32(/* id 14, wireType 2 =*/114).fork()).ldelim(); return writer; }; @@ -45957,6 +46798,14 @@ message.prestoJob = $root.google.cloud.dataproc.v1.PrestoJob.decode(reader, reader.uint32()); break; } + case 13: { + message.trinoJob = $root.google.cloud.dataproc.v1.TrinoJob.decode(reader, reader.uint32()); + break; + } + case 14: { + message.flinkJob = $root.google.cloud.dataproc.v1.FlinkJob.decode(reader, reader.uint32()); + break; + } case 8: { if (message.labels === $util.emptyObject) message.labels = {}; @@ -46107,6 +46956,26 @@ return "prestoJob." + error; } } + if (message.trinoJob != null && message.hasOwnProperty("trinoJob")) { + if (properties.jobType === 1) + return "jobType: multiple values"; + properties.jobType = 1; + { + var error = $root.google.cloud.dataproc.v1.TrinoJob.verify(message.trinoJob); + if (error) + return "trinoJob." + error; + } + } + if (message.flinkJob != null && message.hasOwnProperty("flinkJob")) { + if (properties.jobType === 1) + return "jobType: multiple values"; + properties.jobType = 1; + { + var error = $root.google.cloud.dataproc.v1.FlinkJob.verify(message.flinkJob); + if (error) + return "flinkJob." + error; + } + } if (message.labels != null && message.hasOwnProperty("labels")) { if (!$util.isObject(message.labels)) return "labels: object expected"; @@ -46184,6 +47053,16 @@ throw TypeError(".google.cloud.dataproc.v1.OrderedJob.prestoJob: object expected"); message.prestoJob = $root.google.cloud.dataproc.v1.PrestoJob.fromObject(object.prestoJob); } + if (object.trinoJob != null) { + if (typeof object.trinoJob !== "object") + throw TypeError(".google.cloud.dataproc.v1.OrderedJob.trinoJob: object expected"); + message.trinoJob = $root.google.cloud.dataproc.v1.TrinoJob.fromObject(object.trinoJob); + } + if (object.flinkJob != null) { + if (typeof object.flinkJob !== "object") + throw TypeError(".google.cloud.dataproc.v1.OrderedJob.flinkJob: object expected"); + message.flinkJob = $root.google.cloud.dataproc.v1.FlinkJob.fromObject(object.flinkJob); + } if (object.labels) { if (typeof object.labels !== "object") throw TypeError(".google.cloud.dataproc.v1.OrderedJob.labels: object expected"); @@ -46282,6 +47161,16 @@ if (options.oneofs) object.jobType = "prestoJob"; } + if (message.trinoJob != null && message.hasOwnProperty("trinoJob")) { + object.trinoJob = $root.google.cloud.dataproc.v1.TrinoJob.toObject(message.trinoJob, options); + if (options.oneofs) + object.jobType = "trinoJob"; + } + if (message.flinkJob != null && message.hasOwnProperty("flinkJob")) { + object.flinkJob = $root.google.cloud.dataproc.v1.FlinkJob.toObject(message.flinkJob, options); + if (options.oneofs) + object.jobType = "flinkJob"; + } return object; }; @@ -50272,6 +51161,7 @@ * @interface IListWorkflowTemplatesResponse * @property {Array.|null} [templates] ListWorkflowTemplatesResponse templates * @property {string|null} [nextPageToken] ListWorkflowTemplatesResponse nextPageToken + * @property {Array.|null} [unreachable] ListWorkflowTemplatesResponse unreachable */ /** @@ -50284,6 +51174,7 @@ */ function ListWorkflowTemplatesResponse(properties) { this.templates = []; + this.unreachable = []; if (properties) for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) if (properties[keys[i]] != null) @@ -50306,6 +51197,14 @@ */ ListWorkflowTemplatesResponse.prototype.nextPageToken = ""; + /** + * ListWorkflowTemplatesResponse unreachable. + * @member {Array.} unreachable + * @memberof google.cloud.dataproc.v1.ListWorkflowTemplatesResponse + * @instance + */ + ListWorkflowTemplatesResponse.prototype.unreachable = $util.emptyArray; + /** * Creates a new ListWorkflowTemplatesResponse instance using the specified properties. * @function create @@ -50335,6 +51234,9 @@ $root.google.cloud.dataproc.v1.WorkflowTemplate.encode(message.templates[i], writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); if (message.nextPageToken != null && Object.hasOwnProperty.call(message, "nextPageToken")) writer.uint32(/* id 2, wireType 2 =*/18).string(message.nextPageToken); + if (message.unreachable != null && message.unreachable.length) + for (var i = 0; i < message.unreachable.length; ++i) + writer.uint32(/* id 3, wireType 2 =*/26).string(message.unreachable[i]); return writer; }; @@ -50379,6 +51281,12 @@ message.nextPageToken = reader.string(); break; } + case 3: { + if (!(message.unreachable && message.unreachable.length)) + message.unreachable = []; + message.unreachable.push(reader.string()); + break; + } default: reader.skipType(tag & 7); break; @@ -50426,6 +51334,13 @@ if (message.nextPageToken != null && message.hasOwnProperty("nextPageToken")) if (!$util.isString(message.nextPageToken)) return "nextPageToken: string expected"; + if (message.unreachable != null && message.hasOwnProperty("unreachable")) { + if (!Array.isArray(message.unreachable)) + return "unreachable: array expected"; + for (var i = 0; i < message.unreachable.length; ++i) + if (!$util.isString(message.unreachable[i])) + return "unreachable: string[] expected"; + } return null; }; @@ -50453,6 +51368,13 @@ } if (object.nextPageToken != null) message.nextPageToken = String(object.nextPageToken); + if (object.unreachable) { + if (!Array.isArray(object.unreachable)) + throw TypeError(".google.cloud.dataproc.v1.ListWorkflowTemplatesResponse.unreachable: array expected"); + message.unreachable = []; + for (var i = 0; i < object.unreachable.length; ++i) + message.unreachable[i] = String(object.unreachable[i]); + } return message; }; @@ -50469,8 +51391,10 @@ if (!options) options = {}; var object = {}; - if (options.arrays || options.defaults) + if (options.arrays || options.defaults) { object.templates = []; + object.unreachable = []; + } if (options.defaults) object.nextPageToken = ""; if (message.templates && message.templates.length) { @@ -50480,6 +51404,11 @@ } if (message.nextPageToken != null && message.hasOwnProperty("nextPageToken")) object.nextPageToken = message.nextPageToken; + if (message.unreachable && message.unreachable.length) { + object.unreachable = []; + for (var j = 0; j < message.unreachable.length; ++j) + object.unreachable[j] = message.unreachable[j]; + } return object; }; diff --git a/packages/google-cloud-dataproc/protos/protos.json b/packages/google-cloud-dataproc/protos/protos.json index aa8c44c4759..4659b4f13b0 100644 --- a/packages/google-cloud-dataproc/protos/protos.json +++ b/packages/google-cloud-dataproc/protos/protos.json @@ -570,6 +570,14 @@ "nextPageToken": { "type": "string", "id": 2 + }, + "unreachable": { + "rule": "repeated", + "type": "string", + "id": 3, + "options": { + "(google.api.field_behavior)": "OUTPUT_ONLY" + } } } }, @@ -1985,6 +1993,13 @@ "options": { "(google.api.field_behavior)": "OPTIONAL" } + }, + "kmsKey": { + "type": "string", + "id": 2, + "options": { + "(google.api.field_behavior)": "OPTIONAL" + } } } }, @@ -2852,7 +2867,8 @@ "YARN": 4, "SPARK_HISTORY_SERVER": 5, "HIVESERVER2": 6, - "HIVEMETASTORE": 7 + "HIVEMETASTORE": 7, + "FLINK": 8 } }, "Metric": { @@ -4273,6 +4289,64 @@ } } }, + "FlinkJob": { + "oneofs": { + "driver": { + "oneof": [ + "mainJarFileUri", + "mainClass" + ] + } + }, + "fields": { + "mainJarFileUri": { + "type": "string", + "id": 1 + }, + "mainClass": { + "type": "string", + "id": 2 + }, + "args": { + "rule": "repeated", + "type": "string", + "id": 3, + "options": { + "(google.api.field_behavior)": "OPTIONAL" + } + }, + "jarFileUris": { + "rule": "repeated", + "type": "string", + "id": 4, + "options": { + "(google.api.field_behavior)": "OPTIONAL" + } + }, + "savepointUri": { + "type": "string", + "id": 9, + "options": { + "(google.api.field_behavior)": "OPTIONAL" + } + }, + "properties": { + "keyType": "string", + "type": "string", + "id": 7, + "options": { + "(google.api.field_behavior)": "OPTIONAL" + } + }, + "loggingConfig": { + "type": "LoggingConfig", + "id": 8, + "options": { + "(google.api.field_behavior)": "OPTIONAL" + } + } + } + }, "JobPlacement": { "fields": { "clusterName": { @@ -4432,7 +4506,8 @@ "sparkRJob", "sparkSqlJob", "prestoJob", - "trinoJob" + "trinoJob", + "flinkJob" ] } }, @@ -4514,6 +4589,13 @@ "(google.api.field_behavior)": "OPTIONAL" } }, + "flinkJob": { + "type": "FlinkJob", + "id": 29, + "options": { + "(google.api.field_behavior)": "OPTIONAL" + } + }, "status": { "type": "JobStatus", "id": 8, @@ -4831,6 +4913,14 @@ "options": { "(google.api.field_behavior)": "OPTIONAL" } + }, + "unreachable": { + "rule": "repeated", + "type": "string", + "id": 3, + "options": { + "(google.api.field_behavior)": "OUTPUT_ONLY" + } } } }, @@ -6055,6 +6145,26 @@ "options": { "(google.api.field_behavior)": "OPTIONAL" } + }, + "encryptionConfig": { + "type": "EncryptionConfig", + "id": 11, + "options": { + "(google.api.field_behavior)": "OPTIONAL" + } + } + }, + "nested": { + "EncryptionConfig": { + "fields": { + "kmsKey": { + "type": "string", + "id": 1, + "options": { + "(google.api.field_behavior)": "OPTIONAL" + } + } + } } } }, @@ -6134,7 +6244,9 @@ "pigJob", "sparkRJob", "sparkSqlJob", - "prestoJob" + "prestoJob", + "trinoJob", + "flinkJob" ] } }, @@ -6202,6 +6314,20 @@ "(google.api.field_behavior)": "OPTIONAL" } }, + "trinoJob": { + "type": "TrinoJob", + "id": 13, + "options": { + "(google.api.field_behavior)": "OPTIONAL" + } + }, + "flinkJob": { + "type": "FlinkJob", + "id": 14, + "options": { + "(google.api.field_behavior)": "OPTIONAL" + } + }, "labels": { "keyType": "string", "type": "string", @@ -6654,6 +6780,14 @@ "options": { "(google.api.field_behavior)": "OUTPUT_ONLY" } + }, + "unreachable": { + "rule": "repeated", + "type": "string", + "id": 3, + "options": { + "(google.api.field_behavior)": "OUTPUT_ONLY" + } } } },