Add Cloud Dataflow API

This commit is contained in:
Thomas Coffee 2016-10-03 17:30:46 -07:00
parent 403cf9f9e9
commit 8e1162e2d7
6 changed files with 6885 additions and 1 deletions

View File

@ -22957,6 +22957,609 @@
"/container:v1/ListNodePoolsResponse/nodePools/node_pool": node_pool
"/container:v1/CreateNodePoolRequest": create_node_pool_request
"/container:v1/CreateNodePoolRequest/nodePool": node_pool
"/dataflow:v1b3/fields": fields
"/dataflow:v1b3/key": key
"/dataflow:v1b3/quotaUser": quota_user
"/dataflow:v1b3/dataflow.projects.workerMessages": worker_project_messages
"/dataflow:v1b3/dataflow.projects.workerMessages/projectId": project_id
"/dataflow:v1b3/dataflow.projects.jobs.create": create_project_job
"/dataflow:v1b3/dataflow.projects.jobs.create/projectId": project_id
"/dataflow:v1b3/dataflow.projects.jobs.create/view": view
"/dataflow:v1b3/dataflow.projects.jobs.create/replaceJobId": replace_job_id
"/dataflow:v1b3/dataflow.projects.jobs.get": get_project_job
"/dataflow:v1b3/dataflow.projects.jobs.get/projectId": project_id
"/dataflow:v1b3/dataflow.projects.jobs.get/jobId": job_id
"/dataflow:v1b3/dataflow.projects.jobs.get/view": view
"/dataflow:v1b3/dataflow.projects.jobs.update": update_project_job
"/dataflow:v1b3/dataflow.projects.jobs.update/projectId": project_id
"/dataflow:v1b3/dataflow.projects.jobs.update/jobId": job_id
"/dataflow:v1b3/dataflow.projects.jobs.list": list_project_jobs
"/dataflow:v1b3/dataflow.projects.jobs.list/projectId": project_id
"/dataflow:v1b3/dataflow.projects.jobs.list/filter": filter
"/dataflow:v1b3/dataflow.projects.jobs.list/view": view
"/dataflow:v1b3/dataflow.projects.jobs.list/pageSize": page_size
"/dataflow:v1b3/dataflow.projects.jobs.list/pageToken": page_token
"/dataflow:v1b3/dataflow.projects.jobs.getMetrics": get_project_job_metrics
"/dataflow:v1b3/dataflow.projects.jobs.getMetrics/projectId": project_id
"/dataflow:v1b3/dataflow.projects.jobs.getMetrics/jobId": job_id
"/dataflow:v1b3/dataflow.projects.jobs.getMetrics/startTime": start_time
"/dataflow:v1b3/dataflow.projects.jobs.debug.getConfig": get_project_job_debug_config
"/dataflow:v1b3/dataflow.projects.jobs.debug.getConfig/projectId": project_id
"/dataflow:v1b3/dataflow.projects.jobs.debug.getConfig/jobId": job_id
"/dataflow:v1b3/dataflow.projects.jobs.debug.sendCapture": send_project_job_debug_capture
"/dataflow:v1b3/dataflow.projects.jobs.debug.sendCapture/projectId": project_id
"/dataflow:v1b3/dataflow.projects.jobs.debug.sendCapture/jobId": job_id
"/dataflow:v1b3/dataflow.projects.jobs.messages.list": list_project_job_messages
"/dataflow:v1b3/dataflow.projects.jobs.messages.list/projectId": project_id
"/dataflow:v1b3/dataflow.projects.jobs.messages.list/jobId": job_id
"/dataflow:v1b3/dataflow.projects.jobs.messages.list/minimumImportance": minimum_importance
"/dataflow:v1b3/dataflow.projects.jobs.messages.list/pageSize": page_size
"/dataflow:v1b3/dataflow.projects.jobs.messages.list/pageToken": page_token
"/dataflow:v1b3/dataflow.projects.jobs.messages.list/startTime": start_time
"/dataflow:v1b3/dataflow.projects.jobs.messages.list/endTime": end_time
"/dataflow:v1b3/dataflow.projects.jobs.workItems.reportStatus": report_project_job_work_item_status
"/dataflow:v1b3/dataflow.projects.jobs.workItems.reportStatus/projectId": project_id
"/dataflow:v1b3/dataflow.projects.jobs.workItems.reportStatus/jobId": job_id
"/dataflow:v1b3/dataflow.projects.jobs.workItems.lease": lease_work_item
"/dataflow:v1b3/dataflow.projects.jobs.workItems.lease/projectId": project_id
"/dataflow:v1b3/dataflow.projects.jobs.workItems.lease/jobId": job_id
"/dataflow:v1b3/dataflow.projects.templates.create": create_job_from_template
"/dataflow:v1b3/dataflow.projects.templates.create/projectId": project_id
"/dataflow:v1b3/GetDebugConfigRequest": get_debug_config_request
"/dataflow:v1b3/GetDebugConfigRequest/workerId": worker_id
"/dataflow:v1b3/GetDebugConfigRequest/componentId": component_id
"/dataflow:v1b3/GetDebugConfigResponse": get_debug_config_response
"/dataflow:v1b3/GetDebugConfigResponse/config": config
"/dataflow:v1b3/SendDebugCaptureRequest": send_debug_capture_request
"/dataflow:v1b3/SendDebugCaptureRequest/workerId": worker_id
"/dataflow:v1b3/SendDebugCaptureRequest/componentId": component_id
"/dataflow:v1b3/SendDebugCaptureRequest/data": data
"/dataflow:v1b3/SendDebugCaptureResponse": send_debug_capture_response
"/dataflow:v1b3/Job": job
"/dataflow:v1b3/Job/id": id
"/dataflow:v1b3/Job/projectId": project_id
"/dataflow:v1b3/Job/name": name
"/dataflow:v1b3/Job/type": type
"/dataflow:v1b3/Job/environment": environment
"/dataflow:v1b3/Job/steps": steps
"/dataflow:v1b3/Job/steps/step": step
"/dataflow:v1b3/Job/currentState": current_state
"/dataflow:v1b3/Job/currentStateTime": current_state_time
"/dataflow:v1b3/Job/requestedState": requested_state
"/dataflow:v1b3/Job/executionInfo": execution_info
"/dataflow:v1b3/Job/createTime": create_time
"/dataflow:v1b3/Job/replaceJobId": replace_job_id
"/dataflow:v1b3/Job/transformNameMapping": transform_name_mapping
"/dataflow:v1b3/Job/transformNameMapping/transform_name_mapping": transform_name_mapping
"/dataflow:v1b3/Job/clientRequestId": client_request_id
"/dataflow:v1b3/Job/replacedByJobId": replaced_by_job_id
"/dataflow:v1b3/Job/tempFiles": temp_files
"/dataflow:v1b3/Job/tempFiles/temp_file": temp_file
"/dataflow:v1b3/Job/labels": labels
"/dataflow:v1b3/Job/labels/label": label
"/dataflow:v1b3/Environment": environment
"/dataflow:v1b3/Environment/tempStoragePrefix": temp_storage_prefix
"/dataflow:v1b3/Environment/clusterManagerApiService": cluster_manager_api_service
"/dataflow:v1b3/Environment/experiments": experiments
"/dataflow:v1b3/Environment/experiments/experiment": experiment
"/dataflow:v1b3/Environment/workerPools": worker_pools
"/dataflow:v1b3/Environment/workerPools/worker_pool": worker_pool
"/dataflow:v1b3/Environment/userAgent": user_agent
"/dataflow:v1b3/Environment/userAgent/user_agent": user_agent
"/dataflow:v1b3/Environment/version": version
"/dataflow:v1b3/Environment/version/version": version
"/dataflow:v1b3/Environment/dataset": dataset
"/dataflow:v1b3/Environment/sdkPipelineOptions": sdk_pipeline_options
"/dataflow:v1b3/Environment/sdkPipelineOptions/sdk_pipeline_option": sdk_pipeline_option
"/dataflow:v1b3/Environment/internalExperiments": internal_experiments
"/dataflow:v1b3/Environment/internalExperiments/internal_experiment": internal_experiment
"/dataflow:v1b3/Environment/serviceAccountEmail": service_account_email
"/dataflow:v1b3/WorkerPool": worker_pool
"/dataflow:v1b3/WorkerPool/kind": kind
"/dataflow:v1b3/WorkerPool/numWorkers": num_workers
"/dataflow:v1b3/WorkerPool/packages": packages
"/dataflow:v1b3/WorkerPool/packages/package": package
"/dataflow:v1b3/WorkerPool/defaultPackageSet": default_package_set
"/dataflow:v1b3/WorkerPool/machineType": machine_type
"/dataflow:v1b3/WorkerPool/teardownPolicy": teardown_policy
"/dataflow:v1b3/WorkerPool/diskSizeGb": disk_size_gb
"/dataflow:v1b3/WorkerPool/diskType": disk_type
"/dataflow:v1b3/WorkerPool/diskSourceImage": disk_source_image
"/dataflow:v1b3/WorkerPool/zone": zone
"/dataflow:v1b3/WorkerPool/taskrunnerSettings": taskrunner_settings
"/dataflow:v1b3/WorkerPool/onHostMaintenance": on_host_maintenance
"/dataflow:v1b3/WorkerPool/dataDisks": data_disks
"/dataflow:v1b3/WorkerPool/dataDisks/data_disk": data_disk
"/dataflow:v1b3/WorkerPool/metadata": metadata
"/dataflow:v1b3/WorkerPool/metadata/metadatum": metadatum
"/dataflow:v1b3/WorkerPool/autoscalingSettings": autoscaling_settings
"/dataflow:v1b3/WorkerPool/poolArgs": pool_args
"/dataflow:v1b3/WorkerPool/poolArgs/pool_arg": pool_arg
"/dataflow:v1b3/WorkerPool/network": network
"/dataflow:v1b3/WorkerPool/subnetwork": subnetwork
"/dataflow:v1b3/WorkerPool/workerHarnessContainerImage": worker_harness_container_image
"/dataflow:v1b3/WorkerPool/numThreadsPerWorker": num_threads_per_worker
"/dataflow:v1b3/WorkerPool/ipConfiguration": ip_configuration
"/dataflow:v1b3/Package": package
"/dataflow:v1b3/Package/name": name
"/dataflow:v1b3/Package/location": location
"/dataflow:v1b3/TaskRunnerSettings": task_runner_settings
"/dataflow:v1b3/TaskRunnerSettings/taskUser": task_user
"/dataflow:v1b3/TaskRunnerSettings/taskGroup": task_group
"/dataflow:v1b3/TaskRunnerSettings/oauthScopes": oauth_scopes
"/dataflow:v1b3/TaskRunnerSettings/oauthScopes/oauth_scope": oauth_scope
"/dataflow:v1b3/TaskRunnerSettings/baseUrl": base_url
"/dataflow:v1b3/TaskRunnerSettings/dataflowApiVersion": dataflow_api_version
"/dataflow:v1b3/TaskRunnerSettings/parallelWorkerSettings": parallel_worker_settings
"/dataflow:v1b3/TaskRunnerSettings/baseTaskDir": base_task_dir
"/dataflow:v1b3/TaskRunnerSettings/continueOnException": continue_on_exception
"/dataflow:v1b3/TaskRunnerSettings/logToSerialconsole": log_to_serialconsole
"/dataflow:v1b3/TaskRunnerSettings/alsologtostderr": alsologtostderr
"/dataflow:v1b3/TaskRunnerSettings/logUploadLocation": log_upload_location
"/dataflow:v1b3/TaskRunnerSettings/logDir": log_dir
"/dataflow:v1b3/TaskRunnerSettings/tempStoragePrefix": temp_storage_prefix
"/dataflow:v1b3/TaskRunnerSettings/harnessCommand": harness_command
"/dataflow:v1b3/TaskRunnerSettings/workflowFileName": workflow_file_name
"/dataflow:v1b3/TaskRunnerSettings/commandlinesFileName": commandlines_file_name
"/dataflow:v1b3/TaskRunnerSettings/vmId": vm_id
"/dataflow:v1b3/TaskRunnerSettings/languageHint": language_hint
"/dataflow:v1b3/TaskRunnerSettings/streamingWorkerMainClass": streaming_worker_main_class
"/dataflow:v1b3/WorkerSettings": worker_settings
"/dataflow:v1b3/WorkerSettings/baseUrl": base_url
"/dataflow:v1b3/WorkerSettings/reportingEnabled": reporting_enabled
"/dataflow:v1b3/WorkerSettings/servicePath": service_path
"/dataflow:v1b3/WorkerSettings/shuffleServicePath": shuffle_service_path
"/dataflow:v1b3/WorkerSettings/workerId": worker_id
"/dataflow:v1b3/WorkerSettings/tempStoragePrefix": temp_storage_prefix
"/dataflow:v1b3/Disk": disk
"/dataflow:v1b3/Disk/sizeGb": size_gb
"/dataflow:v1b3/Disk/diskType": disk_type
"/dataflow:v1b3/Disk/mountPoint": mount_point
"/dataflow:v1b3/AutoscalingSettings": autoscaling_settings
"/dataflow:v1b3/AutoscalingSettings/algorithm": algorithm
"/dataflow:v1b3/AutoscalingSettings/maxNumWorkers": max_num_workers
"/dataflow:v1b3/Step": step
"/dataflow:v1b3/Step/kind": kind
"/dataflow:v1b3/Step/name": name
"/dataflow:v1b3/Step/properties": properties
"/dataflow:v1b3/Step/properties/property": property
"/dataflow:v1b3/JobExecutionInfo": job_execution_info
"/dataflow:v1b3/JobExecutionInfo/stages": stages
"/dataflow:v1b3/JobExecutionInfo/stages/stage": stage
"/dataflow:v1b3/JobExecutionStageInfo": job_execution_stage_info
"/dataflow:v1b3/JobExecutionStageInfo/stepName": step_name
"/dataflow:v1b3/JobExecutionStageInfo/stepName/step_name": step_name
"/dataflow:v1b3/ListJobsResponse": list_jobs_response
"/dataflow:v1b3/ListJobsResponse/jobs": jobs
"/dataflow:v1b3/ListJobsResponse/jobs/job": job
"/dataflow:v1b3/ListJobsResponse/nextPageToken": next_page_token
"/dataflow:v1b3/ListJobMessagesResponse": list_job_messages_response
"/dataflow:v1b3/ListJobMessagesResponse/jobMessages": job_messages
"/dataflow:v1b3/ListJobMessagesResponse/jobMessages/job_message": job_message
"/dataflow:v1b3/ListJobMessagesResponse/nextPageToken": next_page_token
"/dataflow:v1b3/JobMessage": job_message
"/dataflow:v1b3/JobMessage/id": id
"/dataflow:v1b3/JobMessage/time": time
"/dataflow:v1b3/JobMessage/messageText": message_text
"/dataflow:v1b3/JobMessage/messageImportance": message_importance
"/dataflow:v1b3/JobMetrics": job_metrics
"/dataflow:v1b3/JobMetrics/metricTime": metric_time
"/dataflow:v1b3/JobMetrics/metrics": metrics
"/dataflow:v1b3/JobMetrics/metrics/metric": metric
"/dataflow:v1b3/MetricUpdate": metric_update
"/dataflow:v1b3/MetricUpdate/name": name
"/dataflow:v1b3/MetricUpdate/kind": kind
"/dataflow:v1b3/MetricUpdate/cumulative": cumulative
"/dataflow:v1b3/MetricUpdate/scalar": scalar
"/dataflow:v1b3/MetricUpdate/meanSum": mean_sum
"/dataflow:v1b3/MetricUpdate/meanCount": mean_count
"/dataflow:v1b3/MetricUpdate/set": set
"/dataflow:v1b3/MetricUpdate/internal": internal
"/dataflow:v1b3/MetricUpdate/updateTime": update_time
"/dataflow:v1b3/MetricStructuredName": metric_structured_name
"/dataflow:v1b3/MetricStructuredName/origin": origin
"/dataflow:v1b3/MetricStructuredName/name": name
"/dataflow:v1b3/MetricStructuredName/context": context
"/dataflow:v1b3/MetricStructuredName/context/context": context
"/dataflow:v1b3/CreateJobFromTemplateRequest": create_job_from_template_request
"/dataflow:v1b3/CreateJobFromTemplateRequest/gcsPath": gcs_path
"/dataflow:v1b3/CreateJobFromTemplateRequest/parameters": parameters
"/dataflow:v1b3/CreateJobFromTemplateRequest/parameters/parameter": parameter
"/dataflow:v1b3/ReportWorkItemStatusRequest": report_work_item_status_request
"/dataflow:v1b3/ReportWorkItemStatusRequest/workerId": worker_id
"/dataflow:v1b3/ReportWorkItemStatusRequest/workItemStatuses": work_item_statuses
"/dataflow:v1b3/ReportWorkItemStatusRequest/workItemStatuses/work_item_status": work_item_status
"/dataflow:v1b3/ReportWorkItemStatusRequest/currentWorkerTime": current_worker_time
"/dataflow:v1b3/WorkItemStatus": work_item_status
"/dataflow:v1b3/WorkItemStatus/workItemId": work_item_id
"/dataflow:v1b3/WorkItemStatus/reportIndex": report_index
"/dataflow:v1b3/WorkItemStatus/requestedLeaseDuration": requested_lease_duration
"/dataflow:v1b3/WorkItemStatus/completed": completed
"/dataflow:v1b3/WorkItemStatus/errors": errors
"/dataflow:v1b3/WorkItemStatus/errors/error": error
"/dataflow:v1b3/WorkItemStatus/counterUpdates": counter_updates
"/dataflow:v1b3/WorkItemStatus/counterUpdates/counter_update": counter_update
"/dataflow:v1b3/WorkItemStatus/metricUpdates": metric_updates
"/dataflow:v1b3/WorkItemStatus/metricUpdates/metric_update": metric_update
"/dataflow:v1b3/WorkItemStatus/reportedProgress": reported_progress
"/dataflow:v1b3/WorkItemStatus/stopPosition": stop_position
"/dataflow:v1b3/WorkItemStatus/dynamicSourceSplit": dynamic_source_split
"/dataflow:v1b3/WorkItemStatus/sourceOperationResponse": source_operation_response
"/dataflow:v1b3/WorkItemStatus/sourceFork": source_fork
"/dataflow:v1b3/WorkItemStatus/progress": progress
"/dataflow:v1b3/Status": status
"/dataflow:v1b3/Status/code": code
"/dataflow:v1b3/Status/message": message
"/dataflow:v1b3/Status/details": details
"/dataflow:v1b3/Status/details/detail": detail
"/dataflow:v1b3/Status/details/detail/detail": detail
"/dataflow:v1b3/CounterUpdate": counter_update
"/dataflow:v1b3/CounterUpdate/nameAndKind": name_and_kind
"/dataflow:v1b3/CounterUpdate/shortId": short_id
"/dataflow:v1b3/CounterUpdate/structuredNameAndMetadata": structured_name_and_metadata
"/dataflow:v1b3/CounterUpdate/cumulative": cumulative
"/dataflow:v1b3/CounterUpdate/integer": integer
"/dataflow:v1b3/CounterUpdate/floatingPoint": floating_point
"/dataflow:v1b3/CounterUpdate/boolean": boolean
"/dataflow:v1b3/CounterUpdate/integerMean": integer_mean
"/dataflow:v1b3/CounterUpdate/floatingPointMean": floating_point_mean
"/dataflow:v1b3/CounterUpdate/integerList": integer_list
"/dataflow:v1b3/CounterUpdate/floatingPointList": floating_point_list
"/dataflow:v1b3/CounterUpdate/stringList": string_list
"/dataflow:v1b3/CounterUpdate/internal": internal
"/dataflow:v1b3/NameAndKind": name_and_kind
"/dataflow:v1b3/NameAndKind/name": name
"/dataflow:v1b3/NameAndKind/kind": kind
"/dataflow:v1b3/CounterStructuredNameAndMetadata": counter_structured_name_and_metadata
"/dataflow:v1b3/CounterStructuredNameAndMetadata/name": name
"/dataflow:v1b3/CounterStructuredNameAndMetadata/metadata": metadata
"/dataflow:v1b3/CounterStructuredName": counter_structured_name
"/dataflow:v1b3/CounterStructuredName/name": name
"/dataflow:v1b3/CounterStructuredName/standardOrigin": standard_origin
"/dataflow:v1b3/CounterStructuredName/otherOrigin": other_origin
"/dataflow:v1b3/CounterStructuredName/originalStepName": original_step_name
"/dataflow:v1b3/CounterStructuredName/componentStepName": component_step_name
"/dataflow:v1b3/CounterStructuredName/executionStepName": execution_step_name
"/dataflow:v1b3/CounterStructuredName/workerId": worker_id
"/dataflow:v1b3/CounterStructuredName/portion": portion
"/dataflow:v1b3/CounterMetadata": counter_metadata
"/dataflow:v1b3/CounterMetadata/kind": kind
"/dataflow:v1b3/CounterMetadata/description": description
"/dataflow:v1b3/CounterMetadata/standardUnits": standard_units
"/dataflow:v1b3/CounterMetadata/otherUnits": other_units
"/dataflow:v1b3/SplitInt64": split_int64
"/dataflow:v1b3/SplitInt64/lowBits": low_bits
"/dataflow:v1b3/SplitInt64/highBits": high_bits
"/dataflow:v1b3/IntegerMean": integer_mean
"/dataflow:v1b3/IntegerMean/sum": sum
"/dataflow:v1b3/IntegerMean/count": count
"/dataflow:v1b3/FloatingPointMean": floating_point_mean
"/dataflow:v1b3/FloatingPointMean/sum": sum
"/dataflow:v1b3/FloatingPointMean/count": count
"/dataflow:v1b3/IntegerList": integer_list
"/dataflow:v1b3/IntegerList/elements": elements
"/dataflow:v1b3/IntegerList/elements/element": element
"/dataflow:v1b3/FloatingPointList": floating_point_list
"/dataflow:v1b3/FloatingPointList/elements": elements
"/dataflow:v1b3/FloatingPointList/elements/element": element
"/dataflow:v1b3/StringList": string_list
"/dataflow:v1b3/StringList/elements": elements
"/dataflow:v1b3/StringList/elements/element": element
"/dataflow:v1b3/ApproximateReportedProgress": approximate_reported_progress
"/dataflow:v1b3/ApproximateReportedProgress/position": position
"/dataflow:v1b3/ApproximateReportedProgress/fractionConsumed": fraction_consumed
"/dataflow:v1b3/ApproximateReportedProgress/remainingParallelism": remaining_parallelism
"/dataflow:v1b3/ApproximateReportedProgress/consumedParallelism": consumed_parallelism
"/dataflow:v1b3/Position": position
"/dataflow:v1b3/Position/end": end
"/dataflow:v1b3/Position/key": key
"/dataflow:v1b3/Position/byteOffset": byte_offset
"/dataflow:v1b3/Position/recordIndex": record_index
"/dataflow:v1b3/Position/shufflePosition": shuffle_position
"/dataflow:v1b3/Position/concatPosition": concat_position
"/dataflow:v1b3/ConcatPosition": concat_position
"/dataflow:v1b3/ConcatPosition/index": index
"/dataflow:v1b3/ConcatPosition/position": position
"/dataflow:v1b3/ReportedParallelism": reported_parallelism
"/dataflow:v1b3/ReportedParallelism/isInfinite": is_infinite
"/dataflow:v1b3/ReportedParallelism/value": value
"/dataflow:v1b3/DynamicSourceSplit": dynamic_source_split
"/dataflow:v1b3/DynamicSourceSplit/primary": primary
"/dataflow:v1b3/DynamicSourceSplit/residual": residual
"/dataflow:v1b3/DerivedSource": derived_source
"/dataflow:v1b3/DerivedSource/source": source
"/dataflow:v1b3/DerivedSource/derivationMode": derivation_mode
"/dataflow:v1b3/Source": source
"/dataflow:v1b3/Source/spec": spec
"/dataflow:v1b3/Source/spec/spec": spec
"/dataflow:v1b3/Source/codec": codec
"/dataflow:v1b3/Source/codec/codec": codec
"/dataflow:v1b3/Source/baseSpecs": base_specs
"/dataflow:v1b3/Source/baseSpecs/base_spec": base_spec
"/dataflow:v1b3/Source/baseSpecs/base_spec/base_spec": base_spec
"/dataflow:v1b3/Source/metadata": metadata
"/dataflow:v1b3/Source/doesNotNeedSplitting": does_not_need_splitting
"/dataflow:v1b3/SourceMetadata": source_metadata
"/dataflow:v1b3/SourceMetadata/producesSortedKeys": produces_sorted_keys
"/dataflow:v1b3/SourceMetadata/infinite": infinite
"/dataflow:v1b3/SourceMetadata/estimatedSizeBytes": estimated_size_bytes
"/dataflow:v1b3/SourceOperationResponse": source_operation_response
"/dataflow:v1b3/SourceOperationResponse/split": split
"/dataflow:v1b3/SourceOperationResponse/getMetadata": get_metadata
"/dataflow:v1b3/SourceSplitResponse": source_split_response
"/dataflow:v1b3/SourceSplitResponse/outcome": outcome
"/dataflow:v1b3/SourceSplitResponse/bundles": bundles
"/dataflow:v1b3/SourceSplitResponse/bundles/bundle": bundle
"/dataflow:v1b3/SourceSplitResponse/shards": shards
"/dataflow:v1b3/SourceSplitResponse/shards/shard": shard
"/dataflow:v1b3/SourceSplitShard": source_split_shard
"/dataflow:v1b3/SourceSplitShard/source": source
"/dataflow:v1b3/SourceSplitShard/derivationMode": derivation_mode
"/dataflow:v1b3/SourceGetMetadataResponse": source_get_metadata_response
"/dataflow:v1b3/SourceGetMetadataResponse/metadata": metadata
"/dataflow:v1b3/SourceFork": source_fork
"/dataflow:v1b3/SourceFork/primary": primary
"/dataflow:v1b3/SourceFork/residual": residual
"/dataflow:v1b3/SourceFork/primarySource": primary_source
"/dataflow:v1b3/SourceFork/residualSource": residual_source
"/dataflow:v1b3/ApproximateProgress": approximate_progress
"/dataflow:v1b3/ApproximateProgress/position": position
"/dataflow:v1b3/ApproximateProgress/percentComplete": percent_complete
"/dataflow:v1b3/ApproximateProgress/remainingTime": remaining_time
"/dataflow:v1b3/ReportWorkItemStatusResponse": report_work_item_status_response
"/dataflow:v1b3/ReportWorkItemStatusResponse/workItemServiceStates": work_item_service_states
"/dataflow:v1b3/ReportWorkItemStatusResponse/workItemServiceStates/work_item_service_state": work_item_service_state
"/dataflow:v1b3/WorkItemServiceState": work_item_service_state
"/dataflow:v1b3/WorkItemServiceState/splitRequest": split_request
"/dataflow:v1b3/WorkItemServiceState/leaseExpireTime": lease_expire_time
"/dataflow:v1b3/WorkItemServiceState/reportStatusInterval": report_status_interval
"/dataflow:v1b3/WorkItemServiceState/harnessData": harness_data
"/dataflow:v1b3/WorkItemServiceState/harnessData/harness_datum": harness_datum
"/dataflow:v1b3/WorkItemServiceState/nextReportIndex": next_report_index
"/dataflow:v1b3/WorkItemServiceState/metricShortId": metric_short_id
"/dataflow:v1b3/WorkItemServiceState/metricShortId/metric_short_id": metric_short_id
"/dataflow:v1b3/WorkItemServiceState/suggestedStopPosition": suggested_stop_position
"/dataflow:v1b3/WorkItemServiceState/suggestedStopPoint": suggested_stop_point
"/dataflow:v1b3/ApproximateSplitRequest": approximate_split_request
"/dataflow:v1b3/ApproximateSplitRequest/position": position
"/dataflow:v1b3/ApproximateSplitRequest/fractionConsumed": fraction_consumed
"/dataflow:v1b3/MetricShortId": metric_short_id
"/dataflow:v1b3/MetricShortId/metricIndex": metric_index
"/dataflow:v1b3/MetricShortId/shortId": short_id
"/dataflow:v1b3/LeaseWorkItemRequest": lease_work_item_request
"/dataflow:v1b3/LeaseWorkItemRequest/workItemTypes": work_item_types
"/dataflow:v1b3/LeaseWorkItemRequest/workItemTypes/work_item_type": work_item_type
"/dataflow:v1b3/LeaseWorkItemRequest/workerCapabilities": worker_capabilities
"/dataflow:v1b3/LeaseWorkItemRequest/workerCapabilities/worker_capability": worker_capability
"/dataflow:v1b3/LeaseWorkItemRequest/requestedLeaseDuration": requested_lease_duration
"/dataflow:v1b3/LeaseWorkItemRequest/currentWorkerTime": current_worker_time
"/dataflow:v1b3/LeaseWorkItemRequest/workerId": worker_id
"/dataflow:v1b3/LeaseWorkItemResponse": lease_work_item_response
"/dataflow:v1b3/LeaseWorkItemResponse/workItems": work_items
"/dataflow:v1b3/LeaseWorkItemResponse/workItems/work_item": work_item
"/dataflow:v1b3/WorkItem": work_item
"/dataflow:v1b3/WorkItem/id": id
"/dataflow:v1b3/WorkItem/projectId": project_id
"/dataflow:v1b3/WorkItem/jobId": job_id
"/dataflow:v1b3/WorkItem/packages": packages
"/dataflow:v1b3/WorkItem/packages/package": package
"/dataflow:v1b3/WorkItem/mapTask": map_task
"/dataflow:v1b3/WorkItem/seqMapTask": seq_map_task
"/dataflow:v1b3/WorkItem/shellTask": shell_task
"/dataflow:v1b3/WorkItem/streamingSetupTask": streaming_setup_task
"/dataflow:v1b3/WorkItem/sourceOperationTask": source_operation_task
"/dataflow:v1b3/WorkItem/streamingComputationTask": streaming_computation_task
"/dataflow:v1b3/WorkItem/streamingConfigTask": streaming_config_task
"/dataflow:v1b3/WorkItem/reportStatusInterval": report_status_interval
"/dataflow:v1b3/WorkItem/leaseExpireTime": lease_expire_time
"/dataflow:v1b3/WorkItem/configuration": configuration
"/dataflow:v1b3/WorkItem/initialReportIndex": initial_report_index
"/dataflow:v1b3/MapTask": map_task
"/dataflow:v1b3/MapTask/instructions": instructions
"/dataflow:v1b3/MapTask/instructions/instruction": instruction
"/dataflow:v1b3/MapTask/systemName": system_name
"/dataflow:v1b3/MapTask/stageName": stage_name
"/dataflow:v1b3/ParallelInstruction": parallel_instruction
"/dataflow:v1b3/ParallelInstruction/systemName": system_name
"/dataflow:v1b3/ParallelInstruction/name": name
"/dataflow:v1b3/ParallelInstruction/originalName": original_name
"/dataflow:v1b3/ParallelInstruction/read": read
"/dataflow:v1b3/ParallelInstruction/write": write
"/dataflow:v1b3/ParallelInstruction/parDo": par_do
"/dataflow:v1b3/ParallelInstruction/partialGroupByKey": partial_group_by_key
"/dataflow:v1b3/ParallelInstruction/flatten": flatten
"/dataflow:v1b3/ParallelInstruction/outputs": outputs
"/dataflow:v1b3/ParallelInstruction/outputs/output": output
"/dataflow:v1b3/ReadInstruction": read_instruction
"/dataflow:v1b3/ReadInstruction/source": source
"/dataflow:v1b3/WriteInstruction": write_instruction
"/dataflow:v1b3/WriteInstruction/input": input
"/dataflow:v1b3/WriteInstruction/sink": sink
"/dataflow:v1b3/InstructionInput": instruction_input
"/dataflow:v1b3/InstructionInput/producerInstructionIndex": producer_instruction_index
"/dataflow:v1b3/InstructionInput/outputNum": output_num
"/dataflow:v1b3/Sink": sink
"/dataflow:v1b3/Sink/spec": spec
"/dataflow:v1b3/Sink/spec/spec": spec
"/dataflow:v1b3/Sink/codec": codec
"/dataflow:v1b3/Sink/codec/codec": codec
"/dataflow:v1b3/ParDoInstruction": par_do_instruction
"/dataflow:v1b3/ParDoInstruction/input": input
"/dataflow:v1b3/ParDoInstruction/sideInputs": side_inputs
"/dataflow:v1b3/ParDoInstruction/sideInputs/side_input": side_input
"/dataflow:v1b3/ParDoInstruction/userFn": user_fn
"/dataflow:v1b3/ParDoInstruction/userFn/user_fn": user_fn
"/dataflow:v1b3/ParDoInstruction/numOutputs": num_outputs
"/dataflow:v1b3/ParDoInstruction/multiOutputInfos": multi_output_infos
"/dataflow:v1b3/ParDoInstruction/multiOutputInfos/multi_output_info": multi_output_info
"/dataflow:v1b3/SideInputInfo": side_input_info
"/dataflow:v1b3/SideInputInfo/sources": sources
"/dataflow:v1b3/SideInputInfo/sources/source": source
"/dataflow:v1b3/SideInputInfo/kind": kind
"/dataflow:v1b3/SideInputInfo/kind/kind": kind
"/dataflow:v1b3/SideInputInfo/tag": tag
"/dataflow:v1b3/MultiOutputInfo": multi_output_info
"/dataflow:v1b3/MultiOutputInfo/tag": tag
"/dataflow:v1b3/PartialGroupByKeyInstruction": partial_group_by_key_instruction
"/dataflow:v1b3/PartialGroupByKeyInstruction/input": input
"/dataflow:v1b3/PartialGroupByKeyInstruction/inputElementCodec": input_element_codec
"/dataflow:v1b3/PartialGroupByKeyInstruction/inputElementCodec/input_element_codec": input_element_codec
"/dataflow:v1b3/PartialGroupByKeyInstruction/valueCombiningFn": value_combining_fn
"/dataflow:v1b3/PartialGroupByKeyInstruction/valueCombiningFn/value_combining_fn": value_combining_fn
"/dataflow:v1b3/PartialGroupByKeyInstruction/sideInputs": side_inputs
"/dataflow:v1b3/PartialGroupByKeyInstruction/sideInputs/side_input": side_input
"/dataflow:v1b3/PartialGroupByKeyInstruction/originalCombineValuesStepName": original_combine_values_step_name
"/dataflow:v1b3/PartialGroupByKeyInstruction/originalCombineValuesInputStoreName": original_combine_values_input_store_name
"/dataflow:v1b3/FlattenInstruction": flatten_instruction
"/dataflow:v1b3/FlattenInstruction/inputs": inputs
"/dataflow:v1b3/FlattenInstruction/inputs/input": input
"/dataflow:v1b3/InstructionOutput": instruction_output
"/dataflow:v1b3/InstructionOutput/name": name
"/dataflow:v1b3/InstructionOutput/systemName": system_name
"/dataflow:v1b3/InstructionOutput/originalName": original_name
"/dataflow:v1b3/InstructionOutput/codec": codec
"/dataflow:v1b3/InstructionOutput/codec/codec": codec
"/dataflow:v1b3/InstructionOutput/onlyCountKeyBytes": only_count_key_bytes
"/dataflow:v1b3/InstructionOutput/onlyCountValueBytes": only_count_value_bytes
"/dataflow:v1b3/SeqMapTask": seq_map_task
"/dataflow:v1b3/SeqMapTask/inputs": inputs
"/dataflow:v1b3/SeqMapTask/inputs/input": input
"/dataflow:v1b3/SeqMapTask/userFn": user_fn
"/dataflow:v1b3/SeqMapTask/userFn/user_fn": user_fn
"/dataflow:v1b3/SeqMapTask/outputInfos": output_infos
"/dataflow:v1b3/SeqMapTask/outputInfos/output_info": output_info
"/dataflow:v1b3/SeqMapTask/name": name
"/dataflow:v1b3/SeqMapTask/systemName": system_name
"/dataflow:v1b3/SeqMapTask/stageName": stage_name
"/dataflow:v1b3/SeqMapTaskOutputInfo": seq_map_task_output_info
"/dataflow:v1b3/SeqMapTaskOutputInfo/tag": tag
"/dataflow:v1b3/SeqMapTaskOutputInfo/sink": sink
"/dataflow:v1b3/ShellTask": shell_task
"/dataflow:v1b3/ShellTask/command": command
"/dataflow:v1b3/ShellTask/exitCode": exit_code
"/dataflow:v1b3/StreamingSetupTask": streaming_setup_task
"/dataflow:v1b3/StreamingSetupTask/receiveWorkPort": receive_work_port
"/dataflow:v1b3/StreamingSetupTask/workerHarnessPort": worker_harness_port
"/dataflow:v1b3/StreamingSetupTask/streamingComputationTopology": streaming_computation_topology
"/dataflow:v1b3/StreamingSetupTask/drain": drain
"/dataflow:v1b3/TopologyConfig": topology_config
"/dataflow:v1b3/TopologyConfig/computations": computations
"/dataflow:v1b3/TopologyConfig/computations/computation": computation
"/dataflow:v1b3/TopologyConfig/dataDiskAssignments": data_disk_assignments
"/dataflow:v1b3/TopologyConfig/dataDiskAssignments/data_disk_assignment": data_disk_assignment
"/dataflow:v1b3/TopologyConfig/userStageToComputationNameMap": user_stage_to_computation_name_map
"/dataflow:v1b3/TopologyConfig/userStageToComputationNameMap/user_stage_to_computation_name_map": user_stage_to_computation_name_map
"/dataflow:v1b3/TopologyConfig/forwardingKeyBits": forwarding_key_bits
"/dataflow:v1b3/TopologyConfig/persistentStateVersion": persistent_state_version
"/dataflow:v1b3/ComputationTopology": computation_topology
"/dataflow:v1b3/ComputationTopology/systemStageName": system_stage_name
"/dataflow:v1b3/ComputationTopology/computationId": computation_id
"/dataflow:v1b3/ComputationTopology/userStageName": user_stage_name
"/dataflow:v1b3/ComputationTopology/keyRanges": key_ranges
"/dataflow:v1b3/ComputationTopology/keyRanges/key_range": key_range
"/dataflow:v1b3/ComputationTopology/inputs": inputs
"/dataflow:v1b3/ComputationTopology/inputs/input": input
"/dataflow:v1b3/ComputationTopology/outputs": outputs
"/dataflow:v1b3/ComputationTopology/outputs/output": output
"/dataflow:v1b3/ComputationTopology/stateFamilies": state_families
"/dataflow:v1b3/ComputationTopology/stateFamilies/state_family": state_family
"/dataflow:v1b3/KeyRangeLocation": key_range_location
"/dataflow:v1b3/KeyRangeLocation/start": start
"/dataflow:v1b3/KeyRangeLocation/end": end
"/dataflow:v1b3/KeyRangeLocation/deliveryEndpoint": delivery_endpoint
"/dataflow:v1b3/KeyRangeLocation/persistentDirectory": persistent_directory
"/dataflow:v1b3/KeyRangeLocation/dataDisk": data_disk
"/dataflow:v1b3/StreamLocation": stream_location
"/dataflow:v1b3/StreamLocation/streamingStageLocation": streaming_stage_location
"/dataflow:v1b3/StreamLocation/pubsubLocation": pubsub_location
"/dataflow:v1b3/StreamLocation/sideInputLocation": side_input_location
"/dataflow:v1b3/StreamLocation/customSourceLocation": custom_source_location
"/dataflow:v1b3/StreamingStageLocation": streaming_stage_location
"/dataflow:v1b3/StreamingStageLocation/streamId": stream_id
"/dataflow:v1b3/PubsubLocation": pubsub_location
"/dataflow:v1b3/PubsubLocation/topic": topic
"/dataflow:v1b3/PubsubLocation/subscription": subscription
"/dataflow:v1b3/PubsubLocation/timestampLabel": timestamp_label
"/dataflow:v1b3/PubsubLocation/idLabel": id_label
"/dataflow:v1b3/PubsubLocation/dropLateData": drop_late_data
"/dataflow:v1b3/PubsubLocation/trackingSubscription": tracking_subscription
"/dataflow:v1b3/StreamingSideInputLocation": streaming_side_input_location
"/dataflow:v1b3/StreamingSideInputLocation/tag": tag
"/dataflow:v1b3/StreamingSideInputLocation/stateFamily": state_family
"/dataflow:v1b3/CustomSourceLocation": custom_source_location
"/dataflow:v1b3/CustomSourceLocation/stateful": stateful
"/dataflow:v1b3/StateFamilyConfig": state_family_config
"/dataflow:v1b3/StateFamilyConfig/stateFamily": state_family
"/dataflow:v1b3/StateFamilyConfig/isRead": is_read
"/dataflow:v1b3/DataDiskAssignment": data_disk_assignment
"/dataflow:v1b3/DataDiskAssignment/vmInstance": vm_instance
"/dataflow:v1b3/DataDiskAssignment/dataDisks": data_disks
"/dataflow:v1b3/DataDiskAssignment/dataDisks/data_disk": data_disk
"/dataflow:v1b3/SourceOperationRequest": source_operation_request
"/dataflow:v1b3/SourceOperationRequest/split": split
"/dataflow:v1b3/SourceOperationRequest/getMetadata": get_metadata
"/dataflow:v1b3/SourceSplitRequest": source_split_request
"/dataflow:v1b3/SourceSplitRequest/source": source
"/dataflow:v1b3/SourceSplitRequest/options": options
"/dataflow:v1b3/SourceSplitOptions": source_split_options
"/dataflow:v1b3/SourceSplitOptions/desiredBundleSizeBytes": desired_bundle_size_bytes
"/dataflow:v1b3/SourceSplitOptions/desiredShardSizeBytes": desired_shard_size_bytes
"/dataflow:v1b3/SourceGetMetadataRequest": source_get_metadata_request
"/dataflow:v1b3/SourceGetMetadataRequest/source": source
"/dataflow:v1b3/StreamingComputationTask": streaming_computation_task
"/dataflow:v1b3/StreamingComputationTask/taskType": task_type
"/dataflow:v1b3/StreamingComputationTask/dataDisks": data_disks
"/dataflow:v1b3/StreamingComputationTask/dataDisks/data_disk": data_disk
"/dataflow:v1b3/StreamingComputationTask/computationRanges": computation_ranges
"/dataflow:v1b3/StreamingComputationTask/computationRanges/computation_range": computation_range
"/dataflow:v1b3/MountedDataDisk": mounted_data_disk
"/dataflow:v1b3/MountedDataDisk/dataDisk": data_disk
"/dataflow:v1b3/StreamingComputationRanges": streaming_computation_ranges
"/dataflow:v1b3/StreamingComputationRanges/computationId": computation_id
"/dataflow:v1b3/StreamingComputationRanges/rangeAssignments": range_assignments
"/dataflow:v1b3/StreamingComputationRanges/rangeAssignments/range_assignment": range_assignment
"/dataflow:v1b3/KeyRangeDataDiskAssignment": key_range_data_disk_assignment
"/dataflow:v1b3/KeyRangeDataDiskAssignment/start": start
"/dataflow:v1b3/KeyRangeDataDiskAssignment/end": end
"/dataflow:v1b3/KeyRangeDataDiskAssignment/dataDisk": data_disk
"/dataflow:v1b3/StreamingConfigTask": streaming_config_task
"/dataflow:v1b3/StreamingConfigTask/streamingComputationConfigs": streaming_computation_configs
"/dataflow:v1b3/StreamingConfigTask/streamingComputationConfigs/streaming_computation_config": streaming_computation_config
"/dataflow:v1b3/StreamingConfigTask/userStepToStateFamilyNameMap": user_step_to_state_family_name_map
"/dataflow:v1b3/StreamingConfigTask/userStepToStateFamilyNameMap/user_step_to_state_family_name_map": user_step_to_state_family_name_map
"/dataflow:v1b3/StreamingComputationConfig": streaming_computation_config
"/dataflow:v1b3/StreamingComputationConfig/computationId": computation_id
"/dataflow:v1b3/StreamingComputationConfig/systemName": system_name
"/dataflow:v1b3/StreamingComputationConfig/stageName": stage_name
"/dataflow:v1b3/StreamingComputationConfig/instructions": instructions
"/dataflow:v1b3/StreamingComputationConfig/instructions/instruction": instruction
"/dataflow:v1b3/SendWorkerMessagesRequest": send_worker_messages_request
"/dataflow:v1b3/SendWorkerMessagesRequest/workerMessages": worker_messages
"/dataflow:v1b3/SendWorkerMessagesRequest/workerMessages/worker_message": worker_message
"/dataflow:v1b3/WorkerMessage": worker_message
"/dataflow:v1b3/WorkerMessage/labels": labels
"/dataflow:v1b3/WorkerMessage/labels/label": label
"/dataflow:v1b3/WorkerMessage/time": time
"/dataflow:v1b3/WorkerMessage/workerHealthReport": worker_health_report
"/dataflow:v1b3/WorkerMessage/workerMessageCode": worker_message_code
"/dataflow:v1b3/WorkerHealthReport": worker_health_report
"/dataflow:v1b3/WorkerHealthReport/vmIsHealthy": vm_is_healthy
"/dataflow:v1b3/WorkerHealthReport/vmStartupTime": vm_startup_time
"/dataflow:v1b3/WorkerHealthReport/reportInterval": report_interval
"/dataflow:v1b3/WorkerHealthReport/pods": pods
"/dataflow:v1b3/WorkerHealthReport/pods/pod": pod
"/dataflow:v1b3/WorkerHealthReport/pods/pod/pod": pod
"/dataflow:v1b3/WorkerMessageCode": worker_message_code
"/dataflow:v1b3/WorkerMessageCode/code": code
"/dataflow:v1b3/WorkerMessageCode/parameters": parameters
"/dataflow:v1b3/WorkerMessageCode/parameters/parameter": parameter
"/dataflow:v1b3/SendWorkerMessagesResponse": send_worker_messages_response
"/dataflow:v1b3/SendWorkerMessagesResponse/workerMessageResponses": worker_message_responses
"/dataflow:v1b3/SendWorkerMessagesResponse/workerMessageResponses/worker_message_response": worker_message_response
"/dataflow:v1b3/WorkerMessageResponse": worker_message_response
"/dataflow:v1b3/WorkerMessageResponse/workerHealthReportResponse": worker_health_report_response
"/dataflow:v1b3/WorkerHealthReportResponse": worker_health_report_response
"/dataflow:v1b3/WorkerHealthReportResponse/reportInterval": report_interval
"/dataproc:v1/fields": fields
"/dataproc:v1/key": key
"/dataproc:v1/quotaUser": quota_user

View File

@ -0,0 +1,38 @@
# Copyright 2015 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require 'google/apis/dataflow_v1b3/service.rb'
require 'google/apis/dataflow_v1b3/classes.rb'
require 'google/apis/dataflow_v1b3/representations.rb'
module Google
module Apis
# Google Dataflow API
#
# Develops and executes data processing patterns like ETL, batch computation,
# and continuous computation.
#
# @see https://cloud.google.com/dataflow
module DataflowV1b3
VERSION = 'V1b3'
REVISION = '20160923'
# View and manage your data across Google Cloud Platform services
AUTH_CLOUD_PLATFORM = 'https://www.googleapis.com/auth/cloud-platform'
# View your email address
AUTH_USERINFO_EMAIL = 'https://www.googleapis.com/auth/userinfo.email'
end
end
end

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,515 @@
# Copyright 2015 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require 'google/apis/core/base_service'
require 'google/apis/core/json_representation'
require 'google/apis/core/hashable'
require 'google/apis/errors'
module Google
module Apis
module DataflowV1b3
# Google Dataflow API
#
# Develops and executes data processing patterns like ETL, batch computation,
# and continuous computation.
#
# @example
# require 'google/apis/dataflow_v1b3'
#
# Dataflow = Google::Apis::DataflowV1b3 # Alias the module
# service = Dataflow::DataflowService.new
#
# @see https://cloud.google.com/dataflow
class DataflowService < Google::Apis::Core::BaseService
# @return [String]
# API key. Your API key identifies your project and provides you with API access,
# quota, and reports. Required unless you provide an OAuth 2.0 token.
attr_accessor :key
# @return [String]
# Available to use for quota purposes for server-side applications. Can be any
# arbitrary string assigned to a user, but should not exceed 40 characters.
attr_accessor :quota_user
def initialize
super('https://dataflow.googleapis.com/', '')
end
# Send a worker_message to the service.
# @param [String] project_id
# The project to send the WorkerMessages to.
# @param [Google::Apis::DataflowV1b3::SendWorkerMessagesRequest] send_worker_messages_request_object
# @param [String] fields
# Selector specifying which fields to include in a partial response.
# @param [String] quota_user
# Available to use for quota purposes for server-side applications. Can be any
# arbitrary string assigned to a user, but should not exceed 40 characters.
# @param [Google::Apis::RequestOptions] options
# Request-specific options
#
# @yield [result, err] Result & error if block supplied
# @yieldparam result [Google::Apis::DataflowV1b3::SendWorkerMessagesResponse] parsed result object
# @yieldparam err [StandardError] error object if request failed
#
# @return [Google::Apis::DataflowV1b3::SendWorkerMessagesResponse]
#
# @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried
# @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification
# @raise [Google::Apis::AuthorizationError] Authorization is required
def worker_project_messages(project_id, send_worker_messages_request_object = nil, fields: nil, quota_user: nil, options: nil, &block)
command = make_simple_command(:post, 'v1b3/projects/{projectId}/WorkerMessages', options)
command.request_representation = Google::Apis::DataflowV1b3::SendWorkerMessagesRequest::Representation
command.request_object = send_worker_messages_request_object
command.response_representation = Google::Apis::DataflowV1b3::SendWorkerMessagesResponse::Representation
command.response_class = Google::Apis::DataflowV1b3::SendWorkerMessagesResponse
command.params['projectId'] = project_id unless project_id.nil?
command.query['fields'] = fields unless fields.nil?
command.query['quotaUser'] = quota_user unless quota_user.nil?
execute_or_queue_command(command, &block)
end
# Creates a dataflow job.
# @param [String] project_id
# The project which owns the job.
# @param [Google::Apis::DataflowV1b3::Job] job_object
# @param [String] view
# Level of information requested in response.
# @param [String] replace_job_id
# DEPRECATED. This field is now on the Job message.
# @param [String] fields
# Selector specifying which fields to include in a partial response.
# @param [String] quota_user
# Available to use for quota purposes for server-side applications. Can be any
# arbitrary string assigned to a user, but should not exceed 40 characters.
# @param [Google::Apis::RequestOptions] options
# Request-specific options
#
# @yield [result, err] Result & error if block supplied
# @yieldparam result [Google::Apis::DataflowV1b3::Job] parsed result object
# @yieldparam err [StandardError] error object if request failed
#
# @return [Google::Apis::DataflowV1b3::Job]
#
# @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried
# @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification
# @raise [Google::Apis::AuthorizationError] Authorization is required
def create_project_job(project_id, job_object = nil, view: nil, replace_job_id: nil, fields: nil, quota_user: nil, options: nil, &block)
command = make_simple_command(:post, 'v1b3/projects/{projectId}/jobs', options)
command.request_representation = Google::Apis::DataflowV1b3::Job::Representation
command.request_object = job_object
command.response_representation = Google::Apis::DataflowV1b3::Job::Representation
command.response_class = Google::Apis::DataflowV1b3::Job
command.params['projectId'] = project_id unless project_id.nil?
command.query['view'] = view unless view.nil?
command.query['replaceJobId'] = replace_job_id unless replace_job_id.nil?
command.query['fields'] = fields unless fields.nil?
command.query['quotaUser'] = quota_user unless quota_user.nil?
execute_or_queue_command(command, &block)
end
# Gets the state of the specified dataflow job.
# @param [String] project_id
# The project which owns the job.
# @param [String] job_id
# Identifies a single job.
# @param [String] view
# Level of information requested in response.
# @param [String] fields
# Selector specifying which fields to include in a partial response.
# @param [String] quota_user
# Available to use for quota purposes for server-side applications. Can be any
# arbitrary string assigned to a user, but should not exceed 40 characters.
# @param [Google::Apis::RequestOptions] options
# Request-specific options
#
# @yield [result, err] Result & error if block supplied
# @yieldparam result [Google::Apis::DataflowV1b3::Job] parsed result object
# @yieldparam err [StandardError] error object if request failed
#
# @return [Google::Apis::DataflowV1b3::Job]
#
# @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried
# @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification
# @raise [Google::Apis::AuthorizationError] Authorization is required
def get_project_job(project_id, job_id, view: nil, fields: nil, quota_user: nil, options: nil, &block)
command = make_simple_command(:get, 'v1b3/projects/{projectId}/jobs/{jobId}', options)
command.response_representation = Google::Apis::DataflowV1b3::Job::Representation
command.response_class = Google::Apis::DataflowV1b3::Job
command.params['projectId'] = project_id unless project_id.nil?
command.params['jobId'] = job_id unless job_id.nil?
command.query['view'] = view unless view.nil?
command.query['fields'] = fields unless fields.nil?
command.query['quotaUser'] = quota_user unless quota_user.nil?
execute_or_queue_command(command, &block)
end
# Updates the state of an existing dataflow job.
# @param [String] project_id
# The project which owns the job.
# @param [String] job_id
# Identifies a single job.
# @param [Google::Apis::DataflowV1b3::Job] job_object
# @param [String] fields
# Selector specifying which fields to include in a partial response.
# @param [String] quota_user
# Available to use for quota purposes for server-side applications. Can be any
# arbitrary string assigned to a user, but should not exceed 40 characters.
# @param [Google::Apis::RequestOptions] options
# Request-specific options
#
# @yield [result, err] Result & error if block supplied
# @yieldparam result [Google::Apis::DataflowV1b3::Job] parsed result object
# @yieldparam err [StandardError] error object if request failed
#
# @return [Google::Apis::DataflowV1b3::Job]
#
# @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried
# @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification
# @raise [Google::Apis::AuthorizationError] Authorization is required
def update_project_job(project_id, job_id, job_object = nil, fields: nil, quota_user: nil, options: nil, &block)
command = make_simple_command(:put, 'v1b3/projects/{projectId}/jobs/{jobId}', options)
command.request_representation = Google::Apis::DataflowV1b3::Job::Representation
command.request_object = job_object
command.response_representation = Google::Apis::DataflowV1b3::Job::Representation
command.response_class = Google::Apis::DataflowV1b3::Job
command.params['projectId'] = project_id unless project_id.nil?
command.params['jobId'] = job_id unless job_id.nil?
command.query['fields'] = fields unless fields.nil?
command.query['quotaUser'] = quota_user unless quota_user.nil?
execute_or_queue_command(command, &block)
end
# List the jobs of a project
# @param [String] project_id
# The project which owns the jobs.
# @param [String] filter
# The kind of filter to use.
# @param [String] view
# Level of information requested in response. Default is SUMMARY.
# @param [Fixnum] page_size
# If there are many jobs, limit response to at most this many. The actual number
# of jobs returned will be the lesser of max_responses and an unspecified server-
# defined limit.
# @param [String] page_token
# Set this to the 'next_page_token' field of a previous response to request
# additional results in a long list.
# @param [String] fields
# Selector specifying which fields to include in a partial response.
# @param [String] quota_user
# Available to use for quota purposes for server-side applications. Can be any
# arbitrary string assigned to a user, but should not exceed 40 characters.
# @param [Google::Apis::RequestOptions] options
# Request-specific options
#
# @yield [result, err] Result & error if block supplied
# @yieldparam result [Google::Apis::DataflowV1b3::ListJobsResponse] parsed result object
# @yieldparam err [StandardError] error object if request failed
#
# @return [Google::Apis::DataflowV1b3::ListJobsResponse]
#
# @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried
# @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification
# @raise [Google::Apis::AuthorizationError] Authorization is required
def list_project_jobs(project_id, filter: nil, view: nil, page_size: nil, page_token: nil, fields: nil, quota_user: nil, options: nil, &block)
command = make_simple_command(:get, 'v1b3/projects/{projectId}/jobs', options)
command.response_representation = Google::Apis::DataflowV1b3::ListJobsResponse::Representation
command.response_class = Google::Apis::DataflowV1b3::ListJobsResponse
command.params['projectId'] = project_id unless project_id.nil?
command.query['filter'] = filter unless filter.nil?
command.query['view'] = view unless view.nil?
command.query['pageSize'] = page_size unless page_size.nil?
command.query['pageToken'] = page_token unless page_token.nil?
command.query['fields'] = fields unless fields.nil?
command.query['quotaUser'] = quota_user unless quota_user.nil?
execute_or_queue_command(command, &block)
end
# Request the job status.
# @param [String] project_id
# A project id.
# @param [String] job_id
# The job to get messages for.
# @param [String] start_time
# Return only metric data that has changed since this time. Default is to return
# all information about all metrics for the job.
# @param [String] fields
# Selector specifying which fields to include in a partial response.
# @param [String] quota_user
# Available to use for quota purposes for server-side applications. Can be any
# arbitrary string assigned to a user, but should not exceed 40 characters.
# @param [Google::Apis::RequestOptions] options
# Request-specific options
#
# @yield [result, err] Result & error if block supplied
# @yieldparam result [Google::Apis::DataflowV1b3::JobMetrics] parsed result object
# @yieldparam err [StandardError] error object if request failed
#
# @return [Google::Apis::DataflowV1b3::JobMetrics]
#
# @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried
# @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification
# @raise [Google::Apis::AuthorizationError] Authorization is required
def get_project_job_metrics(project_id, job_id, start_time: nil, fields: nil, quota_user: nil, options: nil, &block)
command = make_simple_command(:get, 'v1b3/projects/{projectId}/jobs/{jobId}/metrics', options)
command.response_representation = Google::Apis::DataflowV1b3::JobMetrics::Representation
command.response_class = Google::Apis::DataflowV1b3::JobMetrics
command.params['projectId'] = project_id unless project_id.nil?
command.params['jobId'] = job_id unless job_id.nil?
command.query['startTime'] = start_time unless start_time.nil?
command.query['fields'] = fields unless fields.nil?
command.query['quotaUser'] = quota_user unless quota_user.nil?
execute_or_queue_command(command, &block)
end
# Get encoded debug configuration for component. Not cacheable.
# @param [String] project_id
# The project id.
# @param [String] job_id
# The job id.
# @param [Google::Apis::DataflowV1b3::GetDebugConfigRequest] get_debug_config_request_object
# @param [String] fields
# Selector specifying which fields to include in a partial response.
# @param [String] quota_user
# Available to use for quota purposes for server-side applications. Can be any
# arbitrary string assigned to a user, but should not exceed 40 characters.
# @param [Google::Apis::RequestOptions] options
# Request-specific options
#
# @yield [result, err] Result & error if block supplied
# @yieldparam result [Google::Apis::DataflowV1b3::GetDebugConfigResponse] parsed result object
# @yieldparam err [StandardError] error object if request failed
#
# @return [Google::Apis::DataflowV1b3::GetDebugConfigResponse]
#
# @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried
# @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification
# @raise [Google::Apis::AuthorizationError] Authorization is required
def get_project_job_debug_config(project_id, job_id, get_debug_config_request_object = nil, fields: nil, quota_user: nil, options: nil, &block)
command = make_simple_command(:post, 'v1b3/projects/{projectId}/jobs/{jobId}/debug/getConfig', options)
command.request_representation = Google::Apis::DataflowV1b3::GetDebugConfigRequest::Representation
command.request_object = get_debug_config_request_object
command.response_representation = Google::Apis::DataflowV1b3::GetDebugConfigResponse::Representation
command.response_class = Google::Apis::DataflowV1b3::GetDebugConfigResponse
command.params['projectId'] = project_id unless project_id.nil?
command.params['jobId'] = job_id unless job_id.nil?
command.query['fields'] = fields unless fields.nil?
command.query['quotaUser'] = quota_user unless quota_user.nil?
execute_or_queue_command(command, &block)
end
# Send encoded debug capture data for component.
# @param [String] project_id
# The project id.
# @param [String] job_id
# The job id.
# @param [Google::Apis::DataflowV1b3::SendDebugCaptureRequest] send_debug_capture_request_object
# @param [String] fields
# Selector specifying which fields to include in a partial response.
# @param [String] quota_user
# Available to use for quota purposes for server-side applications. Can be any
# arbitrary string assigned to a user, but should not exceed 40 characters.
# @param [Google::Apis::RequestOptions] options
# Request-specific options
#
# @yield [result, err] Result & error if block supplied
# @yieldparam result [Google::Apis::DataflowV1b3::SendDebugCaptureResponse] parsed result object
# @yieldparam err [StandardError] error object if request failed
#
# @return [Google::Apis::DataflowV1b3::SendDebugCaptureResponse]
#
# @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried
# @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification
# @raise [Google::Apis::AuthorizationError] Authorization is required
def send_project_job_debug_capture(project_id, job_id, send_debug_capture_request_object = nil, fields: nil, quota_user: nil, options: nil, &block)
command = make_simple_command(:post, 'v1b3/projects/{projectId}/jobs/{jobId}/debug/sendCapture', options)
command.request_representation = Google::Apis::DataflowV1b3::SendDebugCaptureRequest::Representation
command.request_object = send_debug_capture_request_object
command.response_representation = Google::Apis::DataflowV1b3::SendDebugCaptureResponse::Representation
command.response_class = Google::Apis::DataflowV1b3::SendDebugCaptureResponse
command.params['projectId'] = project_id unless project_id.nil?
command.params['jobId'] = job_id unless job_id.nil?
command.query['fields'] = fields unless fields.nil?
command.query['quotaUser'] = quota_user unless quota_user.nil?
execute_or_queue_command(command, &block)
end
# Request the job status.
# @param [String] project_id
# A project id.
# @param [String] job_id
# The job to get messages about.
# @param [String] minimum_importance
# Filter to only get messages with importance >= level
# @param [Fixnum] page_size
# If specified, determines the maximum number of messages to return. If
# unspecified, the service may choose an appropriate default, or may return an
# arbitrarily large number of results.
# @param [String] page_token
# If supplied, this should be the value of next_page_token returned by an
# earlier call. This will cause the next page of results to be returned.
# @param [String] start_time
# If specified, return only messages with timestamps >= start_time. The default
# is the job creation time (i.e. beginning of messages).
# @param [String] end_time
# Return only messages with timestamps < end_time. The default is now (i.e.
# return up to the latest messages available).
# @param [String] fields
# Selector specifying which fields to include in a partial response.
# @param [String] quota_user
# Available to use for quota purposes for server-side applications. Can be any
# arbitrary string assigned to a user, but should not exceed 40 characters.
# @param [Google::Apis::RequestOptions] options
# Request-specific options
#
# @yield [result, err] Result & error if block supplied
# @yieldparam result [Google::Apis::DataflowV1b3::ListJobMessagesResponse] parsed result object
# @yieldparam err [StandardError] error object if request failed
#
# @return [Google::Apis::DataflowV1b3::ListJobMessagesResponse]
#
# @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried
# @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification
# @raise [Google::Apis::AuthorizationError] Authorization is required
def list_project_job_messages(project_id, job_id, minimum_importance: nil, page_size: nil, page_token: nil, start_time: nil, end_time: nil, fields: nil, quota_user: nil, options: nil, &block)
command = make_simple_command(:get, 'v1b3/projects/{projectId}/jobs/{jobId}/messages', options)
command.response_representation = Google::Apis::DataflowV1b3::ListJobMessagesResponse::Representation
command.response_class = Google::Apis::DataflowV1b3::ListJobMessagesResponse
command.params['projectId'] = project_id unless project_id.nil?
command.params['jobId'] = job_id unless job_id.nil?
command.query['minimumImportance'] = minimum_importance unless minimum_importance.nil?
command.query['pageSize'] = page_size unless page_size.nil?
command.query['pageToken'] = page_token unless page_token.nil?
command.query['startTime'] = start_time unless start_time.nil?
command.query['endTime'] = end_time unless end_time.nil?
command.query['fields'] = fields unless fields.nil?
command.query['quotaUser'] = quota_user unless quota_user.nil?
execute_or_queue_command(command, &block)
end
# Reports the status of dataflow WorkItems leased by a worker.
# @param [String] project_id
# The project which owns the WorkItem's job.
# @param [String] job_id
# The job which the WorkItem is part of.
# @param [Google::Apis::DataflowV1b3::ReportWorkItemStatusRequest] report_work_item_status_request_object
# @param [String] fields
# Selector specifying which fields to include in a partial response.
# @param [String] quota_user
# Available to use for quota purposes for server-side applications. Can be any
# arbitrary string assigned to a user, but should not exceed 40 characters.
# @param [Google::Apis::RequestOptions] options
# Request-specific options
#
# @yield [result, err] Result & error if block supplied
# @yieldparam result [Google::Apis::DataflowV1b3::ReportWorkItemStatusResponse] parsed result object
# @yieldparam err [StandardError] error object if request failed
#
# @return [Google::Apis::DataflowV1b3::ReportWorkItemStatusResponse]
#
# @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried
# @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification
# @raise [Google::Apis::AuthorizationError] Authorization is required
def report_project_job_work_item_status(project_id, job_id, report_work_item_status_request_object = nil, fields: nil, quota_user: nil, options: nil, &block)
command = make_simple_command(:post, 'v1b3/projects/{projectId}/jobs/{jobId}/workItems:reportStatus', options)
command.request_representation = Google::Apis::DataflowV1b3::ReportWorkItemStatusRequest::Representation
command.request_object = report_work_item_status_request_object
command.response_representation = Google::Apis::DataflowV1b3::ReportWorkItemStatusResponse::Representation
command.response_class = Google::Apis::DataflowV1b3::ReportWorkItemStatusResponse
command.params['projectId'] = project_id unless project_id.nil?
command.params['jobId'] = job_id unless job_id.nil?
command.query['fields'] = fields unless fields.nil?
command.query['quotaUser'] = quota_user unless quota_user.nil?
execute_or_queue_command(command, &block)
end
# Leases a dataflow WorkItem to run.
# @param [String] project_id
# Identifies the project this worker belongs to.
# @param [String] job_id
# Identifies the workflow job this worker belongs to.
# @param [Google::Apis::DataflowV1b3::LeaseWorkItemRequest] lease_work_item_request_object
# @param [String] fields
# Selector specifying which fields to include in a partial response.
# @param [String] quota_user
# Available to use for quota purposes for server-side applications. Can be any
# arbitrary string assigned to a user, but should not exceed 40 characters.
# @param [Google::Apis::RequestOptions] options
# Request-specific options
#
# @yield [result, err] Result & error if block supplied
# @yieldparam result [Google::Apis::DataflowV1b3::LeaseWorkItemResponse] parsed result object
# @yieldparam err [StandardError] error object if request failed
#
# @return [Google::Apis::DataflowV1b3::LeaseWorkItemResponse]
#
# @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried
# @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification
# @raise [Google::Apis::AuthorizationError] Authorization is required
def lease_work_item(project_id, job_id, lease_work_item_request_object = nil, fields: nil, quota_user: nil, options: nil, &block)
command = make_simple_command(:post, 'v1b3/projects/{projectId}/jobs/{jobId}/workItems:lease', options)
command.request_representation = Google::Apis::DataflowV1b3::LeaseWorkItemRequest::Representation
command.request_object = lease_work_item_request_object
command.response_representation = Google::Apis::DataflowV1b3::LeaseWorkItemResponse::Representation
command.response_class = Google::Apis::DataflowV1b3::LeaseWorkItemResponse
command.params['projectId'] = project_id unless project_id.nil?
command.params['jobId'] = job_id unless job_id.nil?
command.query['fields'] = fields unless fields.nil?
command.query['quotaUser'] = quota_user unless quota_user.nil?
execute_or_queue_command(command, &block)
end
# Creates a dataflow job from a template.
# @param [String] project_id
# The project which owns the job.
# @param [Google::Apis::DataflowV1b3::CreateJobFromTemplateRequest] create_job_from_template_request_object
# @param [String] fields
# Selector specifying which fields to include in a partial response.
# @param [String] quota_user
# Available to use for quota purposes for server-side applications. Can be any
# arbitrary string assigned to a user, but should not exceed 40 characters.
# @param [Google::Apis::RequestOptions] options
# Request-specific options
#
# @yield [result, err] Result & error if block supplied
# @yieldparam result [Google::Apis::DataflowV1b3::Job] parsed result object
# @yieldparam err [StandardError] error object if request failed
#
# @return [Google::Apis::DataflowV1b3::Job]
#
# @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried
# @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification
# @raise [Google::Apis::AuthorizationError] Authorization is required
def create_job_from_template(project_id, create_job_from_template_request_object = nil, fields: nil, quota_user: nil, options: nil, &block)
command = make_simple_command(:post, 'v1b3/projects/{projectId}/templates', options)
command.request_representation = Google::Apis::DataflowV1b3::CreateJobFromTemplateRequest::Representation
command.request_object = create_job_from_template_request_object
command.response_representation = Google::Apis::DataflowV1b3::Job::Representation
command.response_class = Google::Apis::DataflowV1b3::Job
command.params['projectId'] = project_id unless project_id.nil?
command.query['fields'] = fields unless fields.nil?
command.query['quotaUser'] = quota_user unless quota_user.nil?
execute_or_queue_command(command, &block)
end
protected
def apply_command_defaults(command)
command.query['key'] = key unless key.nil?
command.query['quotaUser'] = quota_user unless quota_user.nil?
end
end
end
end
end

View File

@ -40,7 +40,8 @@ API_IDS=(adexchangebuyer:v1.4 \
content:v2 \
coordinate:v1 \
customsearch:v1 \
dataproc:v1 \
dataflow:v1b3 \
dataproc:v1 \
datastore:v1 \
deploymentmanager:v2 \
dfareporting:v2.6 \