Autogenerated update (2017-09-06)
Update: - androidmanagement_v1 - appsactivity_v1 - appstate_v1 - bigquerydatatransfer_v1 - calendar_v3 - doubleclicksearch_v2 - fitness_v1 - logging_v2 - logging_v2beta1 - oauth2_v1 - oauth2_v2 - plus_domains_v1 - storage_v1beta1
This commit is contained in:
parent
7ffbcd94e2
commit
f92bd0c533
|
@ -5801,6 +5801,8 @@
|
|||
"/androidmanagement:v1/Policy/systemUpdate": system_update
|
||||
"/androidmanagement:v1/Policy/unmuteMicrophoneDisabled": unmute_microphone_disabled
|
||||
"/androidmanagement:v1/Policy/version": version
|
||||
"/androidmanagement:v1/Policy/wifiConfigDisabled": wifi_config_disabled
|
||||
"/androidmanagement:v1/Policy/wifiConfigsLockdownEnabled": wifi_configs_lockdown_enabled
|
||||
"/androidmanagement:v1/PowerManagementEvent": power_management_event
|
||||
"/androidmanagement:v1/PowerManagementEvent/batteryLevel": battery_level
|
||||
"/androidmanagement:v1/PowerManagementEvent/createTime": create_time
|
||||
|
@ -8978,6 +8980,7 @@
|
|||
"/bigquerydatatransfer:v1/DataSource/displayName": display_name
|
||||
"/bigquerydatatransfer:v1/DataSource/helpUrl": help_url
|
||||
"/bigquerydatatransfer:v1/DataSource/manualRunsDisabled": manual_runs_disabled
|
||||
"/bigquerydatatransfer:v1/DataSource/minimumScheduleInterval": minimum_schedule_interval
|
||||
"/bigquerydatatransfer:v1/DataSource/name": name
|
||||
"/bigquerydatatransfer:v1/DataSource/parameters": parameters
|
||||
"/bigquerydatatransfer:v1/DataSource/parameters/parameter": parameter
|
||||
|
@ -8987,6 +8990,7 @@
|
|||
"/bigquerydatatransfer:v1/DataSource/supportsCustomSchedule": supports_custom_schedule
|
||||
"/bigquerydatatransfer:v1/DataSource/supportsMultipleTransfers": supports_multiple_transfers
|
||||
"/bigquerydatatransfer:v1/DataSource/transferType": transfer_type
|
||||
"/bigquerydatatransfer:v1/DataSource/updateDeadlineSeconds": update_deadline_seconds
|
||||
"/bigquerydatatransfer:v1/DataSourceParameter": data_source_parameter
|
||||
"/bigquerydatatransfer:v1/DataSourceParameter/allowedValues": allowed_values
|
||||
"/bigquerydatatransfer:v1/DataSourceParameter/allowedValues/allowed_value": allowed_value
|
||||
|
@ -9056,6 +9060,7 @@
|
|||
"/bigquerydatatransfer:v1/TransferConfig/params": params
|
||||
"/bigquerydatatransfer:v1/TransferConfig/params/param": param
|
||||
"/bigquerydatatransfer:v1/TransferConfig/schedule": schedule
|
||||
"/bigquerydatatransfer:v1/TransferConfig/state": state
|
||||
"/bigquerydatatransfer:v1/TransferConfig/status": status
|
||||
"/bigquerydatatransfer:v1/TransferConfig/updateTime": update_time
|
||||
"/bigquerydatatransfer:v1/TransferConfig/userId": user_id
|
||||
|
@ -9075,6 +9080,7 @@
|
|||
"/bigquerydatatransfer:v1/TransferRun/schedule": schedule
|
||||
"/bigquerydatatransfer:v1/TransferRun/scheduleTime": schedule_time
|
||||
"/bigquerydatatransfer:v1/TransferRun/startTime": start_time
|
||||
"/bigquerydatatransfer:v1/TransferRun/state": state
|
||||
"/bigquerydatatransfer:v1/TransferRun/status": status
|
||||
"/bigquerydatatransfer:v1/TransferRun/updateTime": update_time
|
||||
"/bigquerydatatransfer:v1/TransferRun/userId": user_id
|
||||
|
@ -9132,6 +9138,7 @@
|
|||
"/bigquerydatatransfer:v1/bigquerydatatransfer.projects.locations.transferConfigs.runs.list/pageToken": page_token
|
||||
"/bigquerydatatransfer:v1/bigquerydatatransfer.projects.locations.transferConfigs.runs.list/parent": parent
|
||||
"/bigquerydatatransfer:v1/bigquerydatatransfer.projects.locations.transferConfigs.runs.list/runAttempt": run_attempt
|
||||
"/bigquerydatatransfer:v1/bigquerydatatransfer.projects.locations.transferConfigs.runs.list/states": states
|
||||
"/bigquerydatatransfer:v1/bigquerydatatransfer.projects.locations.transferConfigs.runs.list/statuses": statuses
|
||||
"/bigquerydatatransfer:v1/bigquerydatatransfer.projects.locations.transferConfigs.runs.transferLogs.list": list_project_location_transfer_config_run_transfer_logs
|
||||
"/bigquerydatatransfer:v1/bigquerydatatransfer.projects.locations.transferConfigs.runs.transferLogs.list/messageTypes": message_types
|
||||
|
@ -9167,6 +9174,7 @@
|
|||
"/bigquerydatatransfer:v1/bigquerydatatransfer.projects.transferConfigs.runs.list/pageToken": page_token
|
||||
"/bigquerydatatransfer:v1/bigquerydatatransfer.projects.transferConfigs.runs.list/parent": parent
|
||||
"/bigquerydatatransfer:v1/bigquerydatatransfer.projects.transferConfigs.runs.list/runAttempt": run_attempt
|
||||
"/bigquerydatatransfer:v1/bigquerydatatransfer.projects.transferConfigs.runs.list/states": states
|
||||
"/bigquerydatatransfer:v1/bigquerydatatransfer.projects.transferConfigs.runs.list/statuses": statuses
|
||||
"/bigquerydatatransfer:v1/bigquerydatatransfer.projects.transferConfigs.runs.transferLogs.list": list_project_transfer_config_run_transfer_logs
|
||||
"/bigquerydatatransfer:v1/bigquerydatatransfer.projects.transferConfigs.runs.transferLogs.list/messageTypes": message_types
|
||||
|
@ -39048,6 +39056,13 @@
|
|||
"/fitness:v1/Device/type": type
|
||||
"/fitness:v1/Device/uid": uid
|
||||
"/fitness:v1/Device/version": version
|
||||
"/fitness:v1/ListDataPointChangesResponse": list_data_point_changes_response
|
||||
"/fitness:v1/ListDataPointChangesResponse/dataSourceId": data_source_id
|
||||
"/fitness:v1/ListDataPointChangesResponse/deletedDataPoint": deleted_data_point
|
||||
"/fitness:v1/ListDataPointChangesResponse/deletedDataPoint/deleted_data_point": deleted_data_point
|
||||
"/fitness:v1/ListDataPointChangesResponse/insertedDataPoint": inserted_data_point
|
||||
"/fitness:v1/ListDataPointChangesResponse/insertedDataPoint/inserted_data_point": inserted_data_point
|
||||
"/fitness:v1/ListDataPointChangesResponse/nextPageToken": next_page_token
|
||||
"/fitness:v1/ListDataSourcesResponse": list_data_sources_response
|
||||
"/fitness:v1/ListDataSourcesResponse/dataSource": data_source
|
||||
"/fitness:v1/ListDataSourcesResponse/dataSource/data_source": data_source
|
||||
|
@ -39082,6 +39097,11 @@
|
|||
"/fitness:v1/fields": fields
|
||||
"/fitness:v1/fitness.users.dataSources.create": create_user_data_source
|
||||
"/fitness:v1/fitness.users.dataSources.create/userId": user_id
|
||||
"/fitness:v1/fitness.users.dataSources.dataPointChanges.list": list_user_data_source_data_point_changes
|
||||
"/fitness:v1/fitness.users.dataSources.dataPointChanges.list/dataSourceId": data_source_id
|
||||
"/fitness:v1/fitness.users.dataSources.dataPointChanges.list/limit": limit
|
||||
"/fitness:v1/fitness.users.dataSources.dataPointChanges.list/pageToken": page_token
|
||||
"/fitness:v1/fitness.users.dataSources.dataPointChanges.list/userId": user_id
|
||||
"/fitness:v1/fitness.users.dataSources.datasets.delete": delete_user_data_source_dataset
|
||||
"/fitness:v1/fitness.users.dataSources.datasets.delete/currentTimeMillis": current_time_millis
|
||||
"/fitness:v1/fitness.users.dataSources.datasets.delete/dataSourceId": data_source_id
|
||||
|
@ -43162,9 +43182,11 @@
|
|||
"/logging:v2/logging.billingAccounts.sinks.patch": patch_billing_account_sink
|
||||
"/logging:v2/logging.billingAccounts.sinks.patch/sinkName": sink_name
|
||||
"/logging:v2/logging.billingAccounts.sinks.patch/uniqueWriterIdentity": unique_writer_identity
|
||||
"/logging:v2/logging.billingAccounts.sinks.patch/updateMask": update_mask
|
||||
"/logging:v2/logging.billingAccounts.sinks.update": update_billing_account_sink
|
||||
"/logging:v2/logging.billingAccounts.sinks.update/sinkName": sink_name
|
||||
"/logging:v2/logging.billingAccounts.sinks.update/uniqueWriterIdentity": unique_writer_identity
|
||||
"/logging:v2/logging.billingAccounts.sinks.update/updateMask": update_mask
|
||||
"/logging:v2/logging.entries.list": list_entry_log_entries
|
||||
"/logging:v2/logging.entries.write": write_entry_log_entries
|
||||
"/logging:v2/logging.folders.exclusions.create": create_folder_exclusion
|
||||
|
@ -43200,9 +43222,11 @@
|
|||
"/logging:v2/logging.folders.sinks.patch": patch_folder_sink
|
||||
"/logging:v2/logging.folders.sinks.patch/sinkName": sink_name
|
||||
"/logging:v2/logging.folders.sinks.patch/uniqueWriterIdentity": unique_writer_identity
|
||||
"/logging:v2/logging.folders.sinks.patch/updateMask": update_mask
|
||||
"/logging:v2/logging.folders.sinks.update": update_folder_sink
|
||||
"/logging:v2/logging.folders.sinks.update/sinkName": sink_name
|
||||
"/logging:v2/logging.folders.sinks.update/uniqueWriterIdentity": unique_writer_identity
|
||||
"/logging:v2/logging.folders.sinks.update/updateMask": update_mask
|
||||
"/logging:v2/logging.monitoredResourceDescriptors.list": list_monitored_resource_descriptors
|
||||
"/logging:v2/logging.monitoredResourceDescriptors.list/pageSize": page_size
|
||||
"/logging:v2/logging.monitoredResourceDescriptors.list/pageToken": page_token
|
||||
|
@ -43239,9 +43263,11 @@
|
|||
"/logging:v2/logging.organizations.sinks.patch": patch_organization_sink
|
||||
"/logging:v2/logging.organizations.sinks.patch/sinkName": sink_name
|
||||
"/logging:v2/logging.organizations.sinks.patch/uniqueWriterIdentity": unique_writer_identity
|
||||
"/logging:v2/logging.organizations.sinks.patch/updateMask": update_mask
|
||||
"/logging:v2/logging.organizations.sinks.update": update_organization_sink
|
||||
"/logging:v2/logging.organizations.sinks.update/sinkName": sink_name
|
||||
"/logging:v2/logging.organizations.sinks.update/uniqueWriterIdentity": unique_writer_identity
|
||||
"/logging:v2/logging.organizations.sinks.update/updateMask": update_mask
|
||||
"/logging:v2/logging.projects.exclusions.create": create_project_exclusion
|
||||
"/logging:v2/logging.projects.exclusions.create/parent": parent
|
||||
"/logging:v2/logging.projects.exclusions.delete": delete_project_exclusion
|
||||
|
@ -43287,9 +43313,11 @@
|
|||
"/logging:v2/logging.projects.sinks.patch": patch_project_sink
|
||||
"/logging:v2/logging.projects.sinks.patch/sinkName": sink_name
|
||||
"/logging:v2/logging.projects.sinks.patch/uniqueWriterIdentity": unique_writer_identity
|
||||
"/logging:v2/logging.projects.sinks.patch/updateMask": update_mask
|
||||
"/logging:v2/logging.projects.sinks.update": update_project_sink
|
||||
"/logging:v2/logging.projects.sinks.update/sinkName": sink_name
|
||||
"/logging:v2/logging.projects.sinks.update/uniqueWriterIdentity": unique_writer_identity
|
||||
"/logging:v2/logging.projects.sinks.update/updateMask": update_mask
|
||||
"/logging:v2/quotaUser": quota_user
|
||||
"/logging:v2beta1/BucketOptions": bucket_options
|
||||
"/logging:v2beta1/BucketOptions/explicitBuckets": explicit_buckets
|
||||
|
@ -43543,6 +43571,7 @@
|
|||
"/logging:v2beta1/logging.projects.sinks.update": update_project_sink
|
||||
"/logging:v2beta1/logging.projects.sinks.update/sinkName": sink_name
|
||||
"/logging:v2beta1/logging.projects.sinks.update/uniqueWriterIdentity": unique_writer_identity
|
||||
"/logging:v2beta1/logging.projects.sinks.update/updateMask": update_mask
|
||||
"/logging:v2beta1/quotaUser": quota_user
|
||||
"/manager:v1beta2/DeploymentsListResponse": list_deployments_response
|
||||
"/manager:v1beta2/TemplatesListResponse": list_templates_response
|
||||
|
|
|
@ -26,7 +26,7 @@ module Google
|
|||
# @see https://developers.google.com/android/management
|
||||
module AndroidmanagementV1
|
||||
VERSION = 'V1'
|
||||
REVISION = '20170807'
|
||||
REVISION = '20170828'
|
||||
|
||||
# Manage Android devices and apps for your customers
|
||||
AUTH_ANDROIDMANAGEMENT = 'https://www.googleapis.com/auth/androidmanagement'
|
||||
|
|
|
@ -338,12 +338,6 @@ module Google
|
|||
# @return [Array<Google::Apis::AndroidmanagementV1::HardwareStatus>]
|
||||
attr_accessor :hardware_status_samples
|
||||
|
||||
# The last time the device went offline or came online. This field is only set
|
||||
# when offline device monitoring is enabled in the device's policy.
|
||||
# Corresponds to the JSON property `lastOfflineStateChangeTime`
|
||||
# @return [String]
|
||||
attr_accessor :last_offline_state_change_time
|
||||
|
||||
# The last time the device sent a policy compliance report.
|
||||
# Corresponds to the JSON property `lastPolicyComplianceReportTime`
|
||||
# @return [String]
|
||||
|
@ -455,7 +449,6 @@ module Google
|
|||
@enrollment_token_name = args[:enrollment_token_name] if args.key?(:enrollment_token_name)
|
||||
@hardware_info = args[:hardware_info] if args.key?(:hardware_info)
|
||||
@hardware_status_samples = args[:hardware_status_samples] if args.key?(:hardware_status_samples)
|
||||
@last_offline_state_change_time = args[:last_offline_state_change_time] if args.key?(:last_offline_state_change_time)
|
||||
@last_policy_compliance_report_time = args[:last_policy_compliance_report_time] if args.key?(:last_policy_compliance_report_time)
|
||||
@last_policy_sync_time = args[:last_policy_sync_time] if args.key?(:last_policy_sync_time)
|
||||
@last_status_report_time = args[:last_status_report_time] if args.key?(:last_status_report_time)
|
||||
|
@ -1658,6 +1651,19 @@ module Google
|
|||
# @return [Fixnum]
|
||||
attr_accessor :version
|
||||
|
||||
# Whether configuring WiFi access points is disabled.
|
||||
# Corresponds to the JSON property `wifiConfigDisabled`
|
||||
# @return [Boolean]
|
||||
attr_accessor :wifi_config_disabled
|
||||
alias_method :wifi_config_disabled?, :wifi_config_disabled
|
||||
|
||||
# Whether WiFi networks defined in Open Network Configuration are locked so they
|
||||
# cannot be edited by the user.
|
||||
# Corresponds to the JSON property `wifiConfigsLockdownEnabled`
|
||||
# @return [Boolean]
|
||||
attr_accessor :wifi_configs_lockdown_enabled
|
||||
alias_method :wifi_configs_lockdown_enabled?, :wifi_configs_lockdown_enabled
|
||||
|
||||
def initialize(**args)
|
||||
update!(**args)
|
||||
end
|
||||
|
@ -1693,6 +1699,8 @@ module Google
|
|||
@system_update = args[:system_update] if args.key?(:system_update)
|
||||
@unmute_microphone_disabled = args[:unmute_microphone_disabled] if args.key?(:unmute_microphone_disabled)
|
||||
@version = args[:version] if args.key?(:version)
|
||||
@wifi_config_disabled = args[:wifi_config_disabled] if args.key?(:wifi_config_disabled)
|
||||
@wifi_configs_lockdown_enabled = args[:wifi_configs_lockdown_enabled] if args.key?(:wifi_configs_lockdown_enabled)
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
@ -325,7 +325,6 @@ module Google
|
|||
|
||||
collection :hardware_status_samples, as: 'hardwareStatusSamples', class: Google::Apis::AndroidmanagementV1::HardwareStatus, decorator: Google::Apis::AndroidmanagementV1::HardwareStatus::Representation
|
||||
|
||||
property :last_offline_state_change_time, as: 'lastOfflineStateChangeTime'
|
||||
property :last_policy_compliance_report_time, as: 'lastPolicyComplianceReportTime'
|
||||
property :last_policy_sync_time, as: 'lastPolicySyncTime'
|
||||
property :last_status_report_time, as: 'lastStatusReportTime'
|
||||
|
@ -618,6 +617,8 @@ module Google
|
|||
|
||||
property :unmute_microphone_disabled, as: 'unmuteMicrophoneDisabled'
|
||||
property :version, :numeric_string => true, as: 'version'
|
||||
property :wifi_config_disabled, as: 'wifiConfigDisabled'
|
||||
property :wifi_configs_lockdown_enabled, as: 'wifiConfigsLockdownEnabled'
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
@ -25,7 +25,7 @@ module Google
|
|||
# @see https://developers.google.com/google-apps/activity/
|
||||
module AppsactivityV1
|
||||
VERSION = 'V1'
|
||||
REVISION = '20170215'
|
||||
REVISION = '20170619'
|
||||
|
||||
# View the activity history of your Google apps
|
||||
AUTH_ACTIVITY = 'https://www.googleapis.com/auth/activity'
|
||||
|
|
|
@ -25,7 +25,7 @@ module Google
|
|||
# @see https://developers.google.com/games/services/web/api/states
|
||||
module AppstateV1
|
||||
VERSION = 'V1'
|
||||
REVISION = '20170420'
|
||||
REVISION = '20170831'
|
||||
|
||||
# View and manage your data for this application
|
||||
AUTH_APPSTATE = 'https://www.googleapis.com/auth/appstate'
|
||||
|
|
|
@ -26,7 +26,7 @@ module Google
|
|||
# @see https://cloud.google.com/bigquery/
|
||||
module BigquerydatatransferV1
|
||||
VERSION = 'V1'
|
||||
REVISION = '20170806'
|
||||
REVISION = '20170904'
|
||||
|
||||
# View and manage your data in Google BigQuery
|
||||
AUTH_BIGQUERY = 'https://www.googleapis.com/auth/bigquery'
|
||||
|
|
|
@ -126,6 +126,11 @@ module Google
|
|||
attr_accessor :manual_runs_disabled
|
||||
alias_method :manual_runs_disabled?, :manual_runs_disabled
|
||||
|
||||
# The minimum interval between two consecutive scheduled runs.
|
||||
# Corresponds to the JSON property `minimumScheduleInterval`
|
||||
# @return [String]
|
||||
attr_accessor :minimum_schedule_interval
|
||||
|
||||
# Data source resource name.
|
||||
# Corresponds to the JSON property `name`
|
||||
# @return [String]
|
||||
|
@ -144,12 +149,6 @@ module Google
|
|||
# @return [Array<String>]
|
||||
attr_accessor :scopes
|
||||
|
||||
# The number of seconds to wait for a status update from the data source
|
||||
# before BigQuery marks the transfer as failed.
|
||||
# Corresponds to the JSON property `statusUpdateDeadlineSeconds`
|
||||
# @return [Fixnum]
|
||||
attr_accessor :status_update_deadline_seconds
|
||||
|
||||
# Specifies whether the data source supports a user defined schedule, or
|
||||
# operates on the default schedule.
|
||||
# When set to `true`, user can override default schedule.
|
||||
|
@ -172,6 +171,12 @@ module Google
|
|||
# @return [String]
|
||||
attr_accessor :transfer_type
|
||||
|
||||
# The number of seconds to wait for an update from the data source
|
||||
# before BigQuery marks the transfer as failed.
|
||||
# Corresponds to the JSON property `updateDeadlineSeconds`
|
||||
# @return [Fixnum]
|
||||
attr_accessor :update_deadline_seconds
|
||||
|
||||
def initialize(**args)
|
||||
update!(**args)
|
||||
end
|
||||
|
@ -188,13 +193,14 @@ module Google
|
|||
@display_name = args[:display_name] if args.key?(:display_name)
|
||||
@help_url = args[:help_url] if args.key?(:help_url)
|
||||
@manual_runs_disabled = args[:manual_runs_disabled] if args.key?(:manual_runs_disabled)
|
||||
@minimum_schedule_interval = args[:minimum_schedule_interval] if args.key?(:minimum_schedule_interval)
|
||||
@name = args[:name] if args.key?(:name)
|
||||
@parameters = args[:parameters] if args.key?(:parameters)
|
||||
@scopes = args[:scopes] if args.key?(:scopes)
|
||||
@status_update_deadline_seconds = args[:status_update_deadline_seconds] if args.key?(:status_update_deadline_seconds)
|
||||
@supports_custom_schedule = args[:supports_custom_schedule] if args.key?(:supports_custom_schedule)
|
||||
@supports_multiple_transfers = args[:supports_multiple_transfers] if args.key?(:supports_multiple_transfers)
|
||||
@transfer_type = args[:transfer_type] if args.key?(:transfer_type)
|
||||
@update_deadline_seconds = args[:update_deadline_seconds] if args.key?(:update_deadline_seconds)
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -426,13 +432,13 @@ module Google
|
|||
# this token can be used as the
|
||||
# `ListTransferConfigsRequest.page_token`
|
||||
# to request the next page of list results.
|
||||
# @OutputOnly
|
||||
# Output only.
|
||||
# Corresponds to the JSON property `nextPageToken`
|
||||
# @return [String]
|
||||
attr_accessor :next_page_token
|
||||
|
||||
# The stored pipeline transfer configurations.
|
||||
# @OutputOnly
|
||||
# Output only.
|
||||
# Corresponds to the JSON property `transferConfigs`
|
||||
# @return [Array<Google::Apis::BigquerydatatransferV1::TransferConfig>]
|
||||
attr_accessor :transfer_configs
|
||||
|
@ -456,13 +462,13 @@ module Google
|
|||
# this token can be used as the
|
||||
# `GetTransferRunLogRequest.page_token`
|
||||
# to request the next page of list results.
|
||||
# @OutputOnly
|
||||
# Output only.
|
||||
# Corresponds to the JSON property `nextPageToken`
|
||||
# @return [String]
|
||||
attr_accessor :next_page_token
|
||||
|
||||
# The stored pipeline transfer messages.
|
||||
# @OutputOnly
|
||||
# Output only.
|
||||
# Corresponds to the JSON property `transferMessages`
|
||||
# @return [Array<Google::Apis::BigquerydatatransferV1::TransferMessage>]
|
||||
attr_accessor :transfer_messages
|
||||
|
@ -486,13 +492,13 @@ module Google
|
|||
# this token can be used as the
|
||||
# `ListTransferRunsRequest.page_token`
|
||||
# to request the next page of list results.
|
||||
# @OutputOnly
|
||||
# Output only.
|
||||
# Corresponds to the JSON property `nextPageToken`
|
||||
# @return [String]
|
||||
attr_accessor :next_page_token
|
||||
|
||||
# The stored pipeline transfer runs.
|
||||
# @OutputOnly
|
||||
# Output only.
|
||||
# Corresponds to the JSON property `transferRuns`
|
||||
# @return [Array<Google::Apis::BigquerydatatransferV1::TransferRun>]
|
||||
attr_accessor :transfer_runs
|
||||
|
@ -638,7 +644,7 @@ module Google
|
|||
|
||||
# Region in which BigQuery dataset is located. Currently possible values are:
|
||||
# "US" and "EU".
|
||||
# @OutputOnly
|
||||
# Output only.
|
||||
# Corresponds to the JSON property `datasetRegion`
|
||||
# @return [String]
|
||||
attr_accessor :dataset_region
|
||||
|
@ -669,9 +675,8 @@ module Google
|
|||
# @return [String]
|
||||
attr_accessor :name
|
||||
|
||||
# Next time when data transfer will run. Output only. Applicable
|
||||
# only for batch data transfers.
|
||||
# @OutputOnly
|
||||
# Next time when data transfer will run.
|
||||
# Output only.
|
||||
# Corresponds to the JSON property `nextRunTime`
|
||||
# @return [String]
|
||||
attr_accessor :next_run_time
|
||||
|
@ -698,14 +703,14 @@ module Google
|
|||
# @return [String]
|
||||
attr_accessor :schedule
|
||||
|
||||
# Status of the most recently updated transfer run.
|
||||
# @OutputOnly
|
||||
# Corresponds to the JSON property `status`
|
||||
# State of the most recently updated transfer run.
|
||||
# Output only.
|
||||
# Corresponds to the JSON property `state`
|
||||
# @return [String]
|
||||
attr_accessor :status
|
||||
attr_accessor :state
|
||||
|
||||
# Data transfer modification time. Ignored by server on input.
|
||||
# @OutputOnly
|
||||
# Output only.
|
||||
# Corresponds to the JSON property `updateTime`
|
||||
# @return [String]
|
||||
attr_accessor :update_time
|
||||
|
@ -713,7 +718,7 @@ module Google
|
|||
# GaiaID of the user on whose behalf transfer is done. Applicable only
|
||||
# to data sources that do not support service accounts. When set to 0,
|
||||
# the data source service account credentials are used.
|
||||
# @OutputOnly
|
||||
# Output only.
|
||||
# Corresponds to the JSON property `userId`
|
||||
# @return [Fixnum]
|
||||
attr_accessor :user_id
|
||||
|
@ -734,7 +739,7 @@ module Google
|
|||
@next_run_time = args[:next_run_time] if args.key?(:next_run_time)
|
||||
@params = args[:params] if args.key?(:params)
|
||||
@schedule = args[:schedule] if args.key?(:schedule)
|
||||
@status = args[:status] if args.key?(:status)
|
||||
@state = args[:state] if args.key?(:state)
|
||||
@update_time = args[:update_time] if args.key?(:update_time)
|
||||
@user_id = args[:user_id] if args.key?(:user_id)
|
||||
end
|
||||
|
@ -776,14 +781,14 @@ module Google
|
|||
include Google::Apis::Core::Hashable
|
||||
|
||||
# Data source id.
|
||||
# @OutputOnly
|
||||
# Output only.
|
||||
# Corresponds to the JSON property `dataSourceId`
|
||||
# @return [String]
|
||||
attr_accessor :data_source_id
|
||||
|
||||
# Region in which BigQuery dataset is located. Currently possible values are:
|
||||
# "US" and "EU".
|
||||
# @OutputOnly
|
||||
# Output only.
|
||||
# Corresponds to the JSON property `datasetRegion`
|
||||
# @return [String]
|
||||
attr_accessor :dataset_region
|
||||
|
@ -795,7 +800,7 @@ module Google
|
|||
|
||||
# Time when transfer run ended. Parameter ignored by server for input
|
||||
# requests.
|
||||
# @OutputOnly
|
||||
# Output only.
|
||||
# Corresponds to the JSON property `endTime`
|
||||
# @return [String]
|
||||
attr_accessor :end_time
|
||||
|
@ -825,7 +830,7 @@ module Google
|
|||
# this is empty.
|
||||
# NOTE: the system might choose to delay the schedule depending on the
|
||||
# current load, so `schedule_time` doesn't always matches this.
|
||||
# @OutputOnly
|
||||
# Output only.
|
||||
# Corresponds to the JSON property `schedule`
|
||||
# @return [String]
|
||||
attr_accessor :schedule
|
||||
|
@ -837,25 +842,25 @@ module Google
|
|||
|
||||
# Time when transfer run was started. Parameter ignored by server for input
|
||||
# requests.
|
||||
# @OutputOnly
|
||||
# Output only.
|
||||
# Corresponds to the JSON property `startTime`
|
||||
# @return [String]
|
||||
attr_accessor :start_time
|
||||
|
||||
# Data transfer run status. Ignored for input requests.
|
||||
# @OutputOnly
|
||||
# Corresponds to the JSON property `status`
|
||||
# Data transfer run state. Ignored for input requests.
|
||||
# Output only.
|
||||
# Corresponds to the JSON property `state`
|
||||
# @return [String]
|
||||
attr_accessor :status
|
||||
attr_accessor :state
|
||||
|
||||
# Last time the data transfer run status was updated.
|
||||
# @OutputOnly
|
||||
# Last time the data transfer run state was updated.
|
||||
# Output only.
|
||||
# Corresponds to the JSON property `updateTime`
|
||||
# @return [String]
|
||||
attr_accessor :update_time
|
||||
|
||||
# The user id for this transfer run.
|
||||
# @OutputOnly
|
||||
# Output only.
|
||||
# Corresponds to the JSON property `userId`
|
||||
# @return [Fixnum]
|
||||
attr_accessor :user_id
|
||||
|
@ -876,7 +881,7 @@ module Google
|
|||
@schedule = args[:schedule] if args.key?(:schedule)
|
||||
@schedule_time = args[:schedule_time] if args.key?(:schedule_time)
|
||||
@start_time = args[:start_time] if args.key?(:start_time)
|
||||
@status = args[:status] if args.key?(:status)
|
||||
@state = args[:state] if args.key?(:state)
|
||||
@update_time = args[:update_time] if args.key?(:update_time)
|
||||
@user_id = args[:user_id] if args.key?(:user_id)
|
||||
end
|
||||
|
|
|
@ -162,14 +162,15 @@ module Google
|
|||
property :display_name, as: 'displayName'
|
||||
property :help_url, as: 'helpUrl'
|
||||
property :manual_runs_disabled, as: 'manualRunsDisabled'
|
||||
property :minimum_schedule_interval, as: 'minimumScheduleInterval'
|
||||
property :name, as: 'name'
|
||||
collection :parameters, as: 'parameters', class: Google::Apis::BigquerydatatransferV1::DataSourceParameter, decorator: Google::Apis::BigquerydatatransferV1::DataSourceParameter::Representation
|
||||
|
||||
collection :scopes, as: 'scopes'
|
||||
property :status_update_deadline_seconds, as: 'statusUpdateDeadlineSeconds'
|
||||
property :supports_custom_schedule, as: 'supportsCustomSchedule'
|
||||
property :supports_multiple_transfers, as: 'supportsMultipleTransfers'
|
||||
property :transfer_type, as: 'transferType'
|
||||
property :update_deadline_seconds, as: 'updateDeadlineSeconds'
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -305,7 +306,7 @@ module Google
|
|||
property :next_run_time, as: 'nextRunTime'
|
||||
hash :params, as: 'params'
|
||||
property :schedule, as: 'schedule'
|
||||
property :status, as: 'status'
|
||||
property :state, as: 'state'
|
||||
property :update_time, as: 'updateTime'
|
||||
property :user_id, :numeric_string => true, as: 'userId'
|
||||
end
|
||||
|
@ -333,7 +334,7 @@ module Google
|
|||
property :schedule, as: 'schedule'
|
||||
property :schedule_time, as: 'scheduleTime'
|
||||
property :start_time, as: 'startTime'
|
||||
property :status, as: 'status'
|
||||
property :state, as: 'state'
|
||||
property :update_time, as: 'updateTime'
|
||||
property :user_id, :numeric_string => true, as: 'userId'
|
||||
end
|
||||
|
|
|
@ -813,8 +813,8 @@ module Google
|
|||
# `page_token` value to request the next page of list results.
|
||||
# @param [String] run_attempt
|
||||
# Indicates how run attempts are to be pulled.
|
||||
# @param [Array<String>, String] statuses
|
||||
# When specified, only transfer runs with requested statuses are returned.
|
||||
# @param [Array<String>, String] states
|
||||
# When specified, only transfer runs with requested states are returned.
|
||||
# @param [String] fields
|
||||
# Selector specifying which fields to include in a partial response.
|
||||
# @param [String] quota_user
|
||||
|
@ -832,7 +832,7 @@ module Google
|
|||
# @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried
|
||||
# @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification
|
||||
# @raise [Google::Apis::AuthorizationError] Authorization is required
|
||||
def list_project_location_transfer_config_runs(parent, page_size: nil, page_token: nil, run_attempt: nil, statuses: nil, fields: nil, quota_user: nil, options: nil, &block)
|
||||
def list_project_location_transfer_config_runs(parent, page_size: nil, page_token: nil, run_attempt: nil, states: nil, fields: nil, quota_user: nil, options: nil, &block)
|
||||
command = make_simple_command(:get, 'v1/{+parent}/runs', options)
|
||||
command.response_representation = Google::Apis::BigquerydatatransferV1::ListTransferRunsResponse::Representation
|
||||
command.response_class = Google::Apis::BigquerydatatransferV1::ListTransferRunsResponse
|
||||
|
@ -840,7 +840,7 @@ module Google
|
|||
command.query['pageSize'] = page_size unless page_size.nil?
|
||||
command.query['pageToken'] = page_token unless page_token.nil?
|
||||
command.query['runAttempt'] = run_attempt unless run_attempt.nil?
|
||||
command.query['statuses'] = statuses unless statuses.nil?
|
||||
command.query['states'] = states unless states.nil?
|
||||
command.query['fields'] = fields unless fields.nil?
|
||||
command.query['quotaUser'] = quota_user unless quota_user.nil?
|
||||
execute_or_queue_command(command, &block)
|
||||
|
@ -1227,8 +1227,8 @@ module Google
|
|||
# `page_token` value to request the next page of list results.
|
||||
# @param [String] run_attempt
|
||||
# Indicates how run attempts are to be pulled.
|
||||
# @param [Array<String>, String] statuses
|
||||
# When specified, only transfer runs with requested statuses are returned.
|
||||
# @param [Array<String>, String] states
|
||||
# When specified, only transfer runs with requested states are returned.
|
||||
# @param [String] fields
|
||||
# Selector specifying which fields to include in a partial response.
|
||||
# @param [String] quota_user
|
||||
|
@ -1246,7 +1246,7 @@ module Google
|
|||
# @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried
|
||||
# @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification
|
||||
# @raise [Google::Apis::AuthorizationError] Authorization is required
|
||||
def list_project_transfer_config_runs(parent, page_size: nil, page_token: nil, run_attempt: nil, statuses: nil, fields: nil, quota_user: nil, options: nil, &block)
|
||||
def list_project_transfer_config_runs(parent, page_size: nil, page_token: nil, run_attempt: nil, states: nil, fields: nil, quota_user: nil, options: nil, &block)
|
||||
command = make_simple_command(:get, 'v1/{+parent}/runs', options)
|
||||
command.response_representation = Google::Apis::BigquerydatatransferV1::ListTransferRunsResponse::Representation
|
||||
command.response_class = Google::Apis::BigquerydatatransferV1::ListTransferRunsResponse
|
||||
|
@ -1254,7 +1254,7 @@ module Google
|
|||
command.query['pageSize'] = page_size unless page_size.nil?
|
||||
command.query['pageToken'] = page_token unless page_token.nil?
|
||||
command.query['runAttempt'] = run_attempt unless run_attempt.nil?
|
||||
command.query['statuses'] = statuses unless statuses.nil?
|
||||
command.query['states'] = states unless states.nil?
|
||||
command.query['fields'] = fields unless fields.nil?
|
||||
command.query['quotaUser'] = quota_user unless quota_user.nil?
|
||||
execute_or_queue_command(command, &block)
|
||||
|
|
|
@ -25,7 +25,7 @@ module Google
|
|||
# @see https://developers.google.com/google-apps/calendar/firstapp
|
||||
module CalendarV3
|
||||
VERSION = 'V3'
|
||||
REVISION = '20170716'
|
||||
REVISION = '20170829'
|
||||
|
||||
# Manage your calendars
|
||||
AUTH_CALENDAR = 'https://www.googleapis.com/auth/calendar'
|
||||
|
|
|
@ -50,7 +50,7 @@ module Google
|
|||
|
||||
def initialize
|
||||
super('https://www.googleapis.com/', 'calendar/v3/')
|
||||
@batch_path = 'batch'
|
||||
@batch_path = 'batch/calendar/v3'
|
||||
end
|
||||
|
||||
# Deletes an access control rule.
|
||||
|
|
|
@ -26,7 +26,7 @@ module Google
|
|||
# @see https://developers.google.com/doubleclick-search/
|
||||
module DoubleclicksearchV2
|
||||
VERSION = 'V2'
|
||||
REVISION = '20170419'
|
||||
REVISION = '20170815'
|
||||
|
||||
# View and manage your advertising data in DoubleClick Search
|
||||
AUTH_DOUBLECLICKSEARCH = 'https://www.googleapis.com/auth/doubleclicksearch'
|
||||
|
|
|
@ -25,7 +25,7 @@ module Google
|
|||
# @see https://developers.google.com/fit/rest/
|
||||
module FitnessV1
|
||||
VERSION = 'V1'
|
||||
REVISION = '20170731'
|
||||
REVISION = '20170830'
|
||||
|
||||
# View your activity information in Google Fit
|
||||
AUTH_FITNESS_ACTIVITY_READ = 'https://www.googleapis.com/auth/fitness.activity.read'
|
||||
|
|
|
@ -716,6 +716,45 @@ module Google
|
|||
end
|
||||
end
|
||||
|
||||
#
|
||||
class ListDataPointChangesResponse
|
||||
include Google::Apis::Core::Hashable
|
||||
|
||||
# The data stream ID of the data source with data point changes.
|
||||
# Corresponds to the JSON property `dataSourceId`
|
||||
# @return [String]
|
||||
attr_accessor :data_source_id
|
||||
|
||||
# Data points that have been removed and will not be included in any other
|
||||
# request for dataset contents.
|
||||
# Corresponds to the JSON property `deletedDataPoint`
|
||||
# @return [Array<Google::Apis::FitnessV1::DataPoint>]
|
||||
attr_accessor :deleted_data_point
|
||||
|
||||
# Data points listed.
|
||||
# Corresponds to the JSON property `insertedDataPoint`
|
||||
# @return [Array<Google::Apis::FitnessV1::DataPoint>]
|
||||
attr_accessor :inserted_data_point
|
||||
|
||||
# The continuation token, which is used to page through large result sets.
|
||||
# Provide this value in a subsequent request to return the next page of results.
|
||||
# Corresponds to the JSON property `nextPageToken`
|
||||
# @return [String]
|
||||
attr_accessor :next_page_token
|
||||
|
||||
def initialize(**args)
|
||||
update!(**args)
|
||||
end
|
||||
|
||||
# Update properties of this object
|
||||
def update!(**args)
|
||||
@data_source_id = args[:data_source_id] if args.key?(:data_source_id)
|
||||
@deleted_data_point = args[:deleted_data_point] if args.key?(:deleted_data_point)
|
||||
@inserted_data_point = args[:inserted_data_point] if args.key?(:inserted_data_point)
|
||||
@next_page_token = args[:next_page_token] if args.key?(:next_page_token)
|
||||
end
|
||||
end
|
||||
|
||||
#
|
||||
class ListDataSourcesResponse
|
||||
include Google::Apis::Core::Hashable
|
||||
|
|
|
@ -112,6 +112,12 @@ module Google
|
|||
include Google::Apis::Core::JsonObjectSupport
|
||||
end
|
||||
|
||||
class ListDataPointChangesResponse
|
||||
class Representation < Google::Apis::Core::JsonRepresentation; end
|
||||
|
||||
include Google::Apis::Core::JsonObjectSupport
|
||||
end
|
||||
|
||||
class ListDataSourcesResponse
|
||||
class Representation < Google::Apis::Core::JsonRepresentation; end
|
||||
|
||||
|
@ -313,6 +319,18 @@ module Google
|
|||
end
|
||||
end
|
||||
|
||||
class ListDataPointChangesResponse
|
||||
# @private
|
||||
class Representation < Google::Apis::Core::JsonRepresentation
|
||||
property :data_source_id, as: 'dataSourceId'
|
||||
collection :deleted_data_point, as: 'deletedDataPoint', class: Google::Apis::FitnessV1::DataPoint, decorator: Google::Apis::FitnessV1::DataPoint::Representation
|
||||
|
||||
collection :inserted_data_point, as: 'insertedDataPoint', class: Google::Apis::FitnessV1::DataPoint, decorator: Google::Apis::FitnessV1::DataPoint::Representation
|
||||
|
||||
property :next_page_token, as: 'nextPageToken'
|
||||
end
|
||||
end
|
||||
|
||||
class ListDataSourcesResponse
|
||||
# @private
|
||||
class Representation < Google::Apis::Core::JsonRepresentation
|
||||
|
|
|
@ -311,6 +311,54 @@ module Google
|
|||
execute_or_queue_command(command, &block)
|
||||
end
|
||||
|
||||
# results ordered by descending end_time
|
||||
# @param [String] user_id
|
||||
# List data points for the person identified. Use "me" to indicate the
|
||||
# authenticated user. Only "me" is supported at this time.
|
||||
# @param [String] data_source_id
|
||||
# The data stream ID of the data source that created the dataset.
|
||||
# @param [Fixnum] limit
|
||||
# If specified, no more than this many data point changes will be included in
|
||||
# the response. The default is 500 data point changes.
|
||||
# @param [String] page_token
|
||||
# The continuation token, which is used to page through large result sets. To
|
||||
# get the next page of results, set this parameter to the value of nextPageToken
|
||||
# from the previous response.
|
||||
# @param [String] fields
|
||||
# Selector specifying which fields to include in a partial response.
|
||||
# @param [String] quota_user
|
||||
# Available to use for quota purposes for server-side applications. Can be any
|
||||
# arbitrary string assigned to a user, but should not exceed 40 characters.
|
||||
# Overrides userIp if both are provided.
|
||||
# @param [String] user_ip
|
||||
# IP address of the site where the request originates. Use this if you want to
|
||||
# enforce per-user limits.
|
||||
# @param [Google::Apis::RequestOptions] options
|
||||
# Request-specific options
|
||||
#
|
||||
# @yield [result, err] Result & error if block supplied
|
||||
# @yieldparam result [Google::Apis::FitnessV1::ListDataPointChangesResponse] parsed result object
|
||||
# @yieldparam err [StandardError] error object if request failed
|
||||
#
|
||||
# @return [Google::Apis::FitnessV1::ListDataPointChangesResponse]
|
||||
#
|
||||
# @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried
|
||||
# @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification
|
||||
# @raise [Google::Apis::AuthorizationError] Authorization is required
|
||||
def list_user_data_source_data_point_changes(user_id, data_source_id, limit: nil, page_token: nil, fields: nil, quota_user: nil, user_ip: nil, options: nil, &block)
|
||||
command = make_simple_command(:get, '{userId}/dataSources/{dataSourceId}/dataPointChanges', options)
|
||||
command.response_representation = Google::Apis::FitnessV1::ListDataPointChangesResponse::Representation
|
||||
command.response_class = Google::Apis::FitnessV1::ListDataPointChangesResponse
|
||||
command.params['userId'] = user_id unless user_id.nil?
|
||||
command.params['dataSourceId'] = data_source_id unless data_source_id.nil?
|
||||
command.query['limit'] = limit unless limit.nil?
|
||||
command.query['pageToken'] = page_token unless page_token.nil?
|
||||
command.query['fields'] = fields unless fields.nil?
|
||||
command.query['quotaUser'] = quota_user unless quota_user.nil?
|
||||
command.query['userIp'] = user_ip unless user_ip.nil?
|
||||
execute_or_queue_command(command, &block)
|
||||
end
|
||||
|
||||
# Performs an inclusive delete of all data points whose start and end times have
|
||||
# any overlap with the time range specified by the dataset ID. For most data
|
||||
# types, the entire data point will be deleted. For data types where the time
|
||||
|
|
|
@ -25,7 +25,7 @@ module Google
|
|||
# @see https://cloud.google.com/logging/docs/
|
||||
module LoggingV2
|
||||
VERSION = 'V2'
|
||||
REVISION = '20170828'
|
||||
REVISION = '20170905'
|
||||
|
||||
# View and manage your data across Google Cloud Platform services
|
||||
AUTH_CLOUD_PLATFORM = 'https://www.googleapis.com/auth/cloud-platform'
|
||||
|
|
|
@ -1028,18 +1028,14 @@ module Google
|
|||
# @return [String]
|
||||
attr_accessor :destination
|
||||
|
||||
# Optional. The time at which this sink will stop exporting log entries. Log
|
||||
# entries are exported only if their timestamp is earlier than the end time. If
|
||||
# this field is not supplied, there is no end time. If both a start time and an
|
||||
# end time are provided, then the end time must be later than the start time.
|
||||
# Deprecated. This field is ignored when creating or updating sinks.
|
||||
# Corresponds to the JSON property `endTime`
|
||||
# @return [String]
|
||||
attr_accessor :end_time
|
||||
|
||||
# Optional. An advanced logs filter. The only exported log entries are those
|
||||
# that are in the resource owning the sink and that match the filter. The filter
|
||||
# must use the log entry format specified by the output_version_format parameter.
|
||||
# For example, in the v2 format:
|
||||
# that are in the resource owning the sink and that match the filter. For
|
||||
# example:
|
||||
# logName="projects/[PROJECT_ID]/logs/[LOG_ID]" AND severity>=ERROR
|
||||
# Corresponds to the JSON property `filter`
|
||||
# @return [String]
|
||||
|
@ -1076,10 +1072,7 @@ module Google
|
|||
# @return [String]
|
||||
attr_accessor :output_version_format
|
||||
|
||||
# Optional. The time at which this sink will begin exporting log entries. Log
|
||||
# entries are exported only if their timestamp is not earlier than the start
|
||||
# time. The default value of this field is the time the sink is created or
|
||||
# updated.
|
||||
# Deprecated. This field is ignored when creating or updating sinks.
|
||||
# Corresponds to the JSON property `startTime`
|
||||
# @return [String]
|
||||
attr_accessor :start_time
|
||||
|
@ -1639,13 +1632,13 @@ module Google
|
|||
# missing in log entries, then this method supplies the current time or a unique
|
||||
# identifier, respectively. The supplied values are chosen so that, among the
|
||||
# log entries that did not supply their own values, the entries earlier in the
|
||||
# list will sort before the entries later in the list. See entries.list.Log
|
||||
# entries with timestamps that are more than the logs retention period in the
|
||||
# past or more than 24 hours in the future might be discarded. Discarding does
|
||||
# not return an error.To improve throughput and to avoid exceeding the quota
|
||||
# limit for calls to entries.write, you should try to include several log
|
||||
# entries in this list, rather than calling this method for each individual log
|
||||
# entry.
|
||||
# list will sort before the entries later in the list. See the entries.list
|
||||
# method.Log entries with timestamps that are more than the logs retention
|
||||
# period in the past or more than 24 hours in the future might be discarded.
|
||||
# Discarding does not return an error.To improve throughput and to avoid
|
||||
# exceeding the quota limit for calls to entries.write, you should try to
|
||||
# include several log entries in this list, rather than calling this method for
|
||||
# each individual log entry.
|
||||
# Corresponds to the JSON property `entries`
|
||||
# @return [Array<Google::Apis::LoggingV2::LogEntry>]
|
||||
attr_accessor :entries
|
||||
|
|
|
@ -334,10 +334,9 @@ module Google
|
|||
end
|
||||
|
||||
# Creates a sink that exports specified log entries to a destination. The export
|
||||
# of newly-ingested log entries begins immediately, unless the current time is
|
||||
# outside the sink's start and end times or the sink's writer_identity is not
|
||||
# permitted to write to the destination. A sink can export log entries only from
|
||||
# the resource owning the sink.
|
||||
# of newly-ingested log entries begins immediately, unless the sink's
|
||||
# writer_identity is not permitted to write to the destination. A sink can
|
||||
# export log entries only from the resource owning the sink.
|
||||
# @param [String] parent
|
||||
# Required. The resource in which to create the sink:
|
||||
# "projects/[PROJECT_ID]"
|
||||
|
@ -504,9 +503,8 @@ module Google
|
|||
end
|
||||
|
||||
# Updates a sink. This method replaces the following fields in the existing sink
|
||||
# with values from the new sink: destination, filter, output_version_format,
|
||||
# start_time, and end_time. The updated sink might also have a new
|
||||
# writer_identity; see the unique_writer_identity field.
|
||||
# with values from the new sink: destination, and filter. The updated sink might
|
||||
# also have a new writer_identity; see the unique_writer_identity field.
|
||||
# @param [String] sink_name
|
||||
# Required. The full resource name of the sink to update, including the parent
|
||||
# resource and the sink identifier:
|
||||
|
@ -526,6 +524,15 @@ module Google
|
|||
# changed to a unique service account.
|
||||
# It is an error if the old value is true and the new value is set to false or
|
||||
# defaulted to false.
|
||||
# @param [String] update_mask
|
||||
# Optional. Field mask that specifies the fields in sink that need an update. A
|
||||
# sink field will be overwritten if, and only if, it is in the update mask. name
|
||||
# and output only fields cannot be updated.An empty updateMask is temporarily
|
||||
# treated as using the following mask for backwards compatibility purposes:
|
||||
# destination,filter,includeChildren At some point in the future, behavior will
|
||||
# be removed and specifying an empty updateMask will be an error.For a detailed
|
||||
# FieldMask definition, see https://developers.google.com/protocol-buffers/docs/
|
||||
# reference/google.protobuf#fieldmaskExample: updateMask=filter.
|
||||
# @param [String] fields
|
||||
# Selector specifying which fields to include in a partial response.
|
||||
# @param [String] quota_user
|
||||
|
@ -543,7 +550,7 @@ module Google
|
|||
# @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried
|
||||
# @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification
|
||||
# @raise [Google::Apis::AuthorizationError] Authorization is required
|
||||
def patch_billing_account_sink(sink_name, log_sink_object = nil, unique_writer_identity: nil, fields: nil, quota_user: nil, options: nil, &block)
|
||||
def patch_billing_account_sink(sink_name, log_sink_object = nil, unique_writer_identity: nil, update_mask: nil, fields: nil, quota_user: nil, options: nil, &block)
|
||||
command = make_simple_command(:patch, 'v2/{+sinkName}', options)
|
||||
command.request_representation = Google::Apis::LoggingV2::LogSink::Representation
|
||||
command.request_object = log_sink_object
|
||||
|
@ -551,15 +558,15 @@ module Google
|
|||
command.response_class = Google::Apis::LoggingV2::LogSink
|
||||
command.params['sinkName'] = sink_name unless sink_name.nil?
|
||||
command.query['uniqueWriterIdentity'] = unique_writer_identity unless unique_writer_identity.nil?
|
||||
command.query['updateMask'] = update_mask unless update_mask.nil?
|
||||
command.query['fields'] = fields unless fields.nil?
|
||||
command.query['quotaUser'] = quota_user unless quota_user.nil?
|
||||
execute_or_queue_command(command, &block)
|
||||
end
|
||||
|
||||
# Updates a sink. This method replaces the following fields in the existing sink
|
||||
# with values from the new sink: destination, filter, output_version_format,
|
||||
# start_time, and end_time. The updated sink might also have a new
|
||||
# writer_identity; see the unique_writer_identity field.
|
||||
# with values from the new sink: destination, and filter. The updated sink might
|
||||
# also have a new writer_identity; see the unique_writer_identity field.
|
||||
# @param [String] sink_name
|
||||
# Required. The full resource name of the sink to update, including the parent
|
||||
# resource and the sink identifier:
|
||||
|
@ -579,6 +586,15 @@ module Google
|
|||
# changed to a unique service account.
|
||||
# It is an error if the old value is true and the new value is set to false or
|
||||
# defaulted to false.
|
||||
# @param [String] update_mask
|
||||
# Optional. Field mask that specifies the fields in sink that need an update. A
|
||||
# sink field will be overwritten if, and only if, it is in the update mask. name
|
||||
# and output only fields cannot be updated.An empty updateMask is temporarily
|
||||
# treated as using the following mask for backwards compatibility purposes:
|
||||
# destination,filter,includeChildren At some point in the future, behavior will
|
||||
# be removed and specifying an empty updateMask will be an error.For a detailed
|
||||
# FieldMask definition, see https://developers.google.com/protocol-buffers/docs/
|
||||
# reference/google.protobuf#fieldmaskExample: updateMask=filter.
|
||||
# @param [String] fields
|
||||
# Selector specifying which fields to include in a partial response.
|
||||
# @param [String] quota_user
|
||||
|
@ -596,7 +612,7 @@ module Google
|
|||
# @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried
|
||||
# @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification
|
||||
# @raise [Google::Apis::AuthorizationError] Authorization is required
|
||||
def update_billing_account_sink(sink_name, log_sink_object = nil, unique_writer_identity: nil, fields: nil, quota_user: nil, options: nil, &block)
|
||||
def update_billing_account_sink(sink_name, log_sink_object = nil, unique_writer_identity: nil, update_mask: nil, fields: nil, quota_user: nil, options: nil, &block)
|
||||
command = make_simple_command(:put, 'v2/{+sinkName}', options)
|
||||
command.request_representation = Google::Apis::LoggingV2::LogSink::Representation
|
||||
command.request_object = log_sink_object
|
||||
|
@ -604,6 +620,7 @@ module Google
|
|||
command.response_class = Google::Apis::LoggingV2::LogSink
|
||||
command.params['sinkName'] = sink_name unless sink_name.nil?
|
||||
command.query['uniqueWriterIdentity'] = unique_writer_identity unless unique_writer_identity.nil?
|
||||
command.query['updateMask'] = update_mask unless update_mask.nil?
|
||||
command.query['fields'] = fields unless fields.nil?
|
||||
command.query['quotaUser'] = quota_user unless quota_user.nil?
|
||||
execute_or_queue_command(command, &block)
|
||||
|
@ -960,10 +977,9 @@ module Google
|
|||
end
|
||||
|
||||
# Creates a sink that exports specified log entries to a destination. The export
|
||||
# of newly-ingested log entries begins immediately, unless the current time is
|
||||
# outside the sink's start and end times or the sink's writer_identity is not
|
||||
# permitted to write to the destination. A sink can export log entries only from
|
||||
# the resource owning the sink.
|
||||
# of newly-ingested log entries begins immediately, unless the sink's
|
||||
# writer_identity is not permitted to write to the destination. A sink can
|
||||
# export log entries only from the resource owning the sink.
|
||||
# @param [String] parent
|
||||
# Required. The resource in which to create the sink:
|
||||
# "projects/[PROJECT_ID]"
|
||||
|
@ -1130,9 +1146,8 @@ module Google
|
|||
end
|
||||
|
||||
# Updates a sink. This method replaces the following fields in the existing sink
|
||||
# with values from the new sink: destination, filter, output_version_format,
|
||||
# start_time, and end_time. The updated sink might also have a new
|
||||
# writer_identity; see the unique_writer_identity field.
|
||||
# with values from the new sink: destination, and filter. The updated sink might
|
||||
# also have a new writer_identity; see the unique_writer_identity field.
|
||||
# @param [String] sink_name
|
||||
# Required. The full resource name of the sink to update, including the parent
|
||||
# resource and the sink identifier:
|
||||
|
@ -1152,6 +1167,15 @@ module Google
|
|||
# changed to a unique service account.
|
||||
# It is an error if the old value is true and the new value is set to false or
|
||||
# defaulted to false.
|
||||
# @param [String] update_mask
|
||||
# Optional. Field mask that specifies the fields in sink that need an update. A
|
||||
# sink field will be overwritten if, and only if, it is in the update mask. name
|
||||
# and output only fields cannot be updated.An empty updateMask is temporarily
|
||||
# treated as using the following mask for backwards compatibility purposes:
|
||||
# destination,filter,includeChildren At some point in the future, behavior will
|
||||
# be removed and specifying an empty updateMask will be an error.For a detailed
|
||||
# FieldMask definition, see https://developers.google.com/protocol-buffers/docs/
|
||||
# reference/google.protobuf#fieldmaskExample: updateMask=filter.
|
||||
# @param [String] fields
|
||||
# Selector specifying which fields to include in a partial response.
|
||||
# @param [String] quota_user
|
||||
|
@ -1169,7 +1193,7 @@ module Google
|
|||
# @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried
|
||||
# @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification
|
||||
# @raise [Google::Apis::AuthorizationError] Authorization is required
|
||||
def patch_folder_sink(sink_name, log_sink_object = nil, unique_writer_identity: nil, fields: nil, quota_user: nil, options: nil, &block)
|
||||
def patch_folder_sink(sink_name, log_sink_object = nil, unique_writer_identity: nil, update_mask: nil, fields: nil, quota_user: nil, options: nil, &block)
|
||||
command = make_simple_command(:patch, 'v2/{+sinkName}', options)
|
||||
command.request_representation = Google::Apis::LoggingV2::LogSink::Representation
|
||||
command.request_object = log_sink_object
|
||||
|
@ -1177,15 +1201,15 @@ module Google
|
|||
command.response_class = Google::Apis::LoggingV2::LogSink
|
||||
command.params['sinkName'] = sink_name unless sink_name.nil?
|
||||
command.query['uniqueWriterIdentity'] = unique_writer_identity unless unique_writer_identity.nil?
|
||||
command.query['updateMask'] = update_mask unless update_mask.nil?
|
||||
command.query['fields'] = fields unless fields.nil?
|
||||
command.query['quotaUser'] = quota_user unless quota_user.nil?
|
||||
execute_or_queue_command(command, &block)
|
||||
end
|
||||
|
||||
# Updates a sink. This method replaces the following fields in the existing sink
|
||||
# with values from the new sink: destination, filter, output_version_format,
|
||||
# start_time, and end_time. The updated sink might also have a new
|
||||
# writer_identity; see the unique_writer_identity field.
|
||||
# with values from the new sink: destination, and filter. The updated sink might
|
||||
# also have a new writer_identity; see the unique_writer_identity field.
|
||||
# @param [String] sink_name
|
||||
# Required. The full resource name of the sink to update, including the parent
|
||||
# resource and the sink identifier:
|
||||
|
@ -1205,6 +1229,15 @@ module Google
|
|||
# changed to a unique service account.
|
||||
# It is an error if the old value is true and the new value is set to false or
|
||||
# defaulted to false.
|
||||
# @param [String] update_mask
|
||||
# Optional. Field mask that specifies the fields in sink that need an update. A
|
||||
# sink field will be overwritten if, and only if, it is in the update mask. name
|
||||
# and output only fields cannot be updated.An empty updateMask is temporarily
|
||||
# treated as using the following mask for backwards compatibility purposes:
|
||||
# destination,filter,includeChildren At some point in the future, behavior will
|
||||
# be removed and specifying an empty updateMask will be an error.For a detailed
|
||||
# FieldMask definition, see https://developers.google.com/protocol-buffers/docs/
|
||||
# reference/google.protobuf#fieldmaskExample: updateMask=filter.
|
||||
# @param [String] fields
|
||||
# Selector specifying which fields to include in a partial response.
|
||||
# @param [String] quota_user
|
||||
|
@ -1222,7 +1255,7 @@ module Google
|
|||
# @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried
|
||||
# @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification
|
||||
# @raise [Google::Apis::AuthorizationError] Authorization is required
|
||||
def update_folder_sink(sink_name, log_sink_object = nil, unique_writer_identity: nil, fields: nil, quota_user: nil, options: nil, &block)
|
||||
def update_folder_sink(sink_name, log_sink_object = nil, unique_writer_identity: nil, update_mask: nil, fields: nil, quota_user: nil, options: nil, &block)
|
||||
command = make_simple_command(:put, 'v2/{+sinkName}', options)
|
||||
command.request_representation = Google::Apis::LoggingV2::LogSink::Representation
|
||||
command.request_object = log_sink_object
|
||||
|
@ -1230,6 +1263,7 @@ module Google
|
|||
command.response_class = Google::Apis::LoggingV2::LogSink
|
||||
command.params['sinkName'] = sink_name unless sink_name.nil?
|
||||
command.query['uniqueWriterIdentity'] = unique_writer_identity unless unique_writer_identity.nil?
|
||||
command.query['updateMask'] = update_mask unless update_mask.nil?
|
||||
command.query['fields'] = fields unless fields.nil?
|
||||
command.query['quotaUser'] = quota_user unless quota_user.nil?
|
||||
execute_or_queue_command(command, &block)
|
||||
|
@ -1560,10 +1594,9 @@ module Google
|
|||
end
|
||||
|
||||
# Creates a sink that exports specified log entries to a destination. The export
|
||||
# of newly-ingested log entries begins immediately, unless the current time is
|
||||
# outside the sink's start and end times or the sink's writer_identity is not
|
||||
# permitted to write to the destination. A sink can export log entries only from
|
||||
# the resource owning the sink.
|
||||
# of newly-ingested log entries begins immediately, unless the sink's
|
||||
# writer_identity is not permitted to write to the destination. A sink can
|
||||
# export log entries only from the resource owning the sink.
|
||||
# @param [String] parent
|
||||
# Required. The resource in which to create the sink:
|
||||
# "projects/[PROJECT_ID]"
|
||||
|
@ -1730,9 +1763,8 @@ module Google
|
|||
end
|
||||
|
||||
# Updates a sink. This method replaces the following fields in the existing sink
|
||||
# with values from the new sink: destination, filter, output_version_format,
|
||||
# start_time, and end_time. The updated sink might also have a new
|
||||
# writer_identity; see the unique_writer_identity field.
|
||||
# with values from the new sink: destination, and filter. The updated sink might
|
||||
# also have a new writer_identity; see the unique_writer_identity field.
|
||||
# @param [String] sink_name
|
||||
# Required. The full resource name of the sink to update, including the parent
|
||||
# resource and the sink identifier:
|
||||
|
@ -1752,6 +1784,15 @@ module Google
|
|||
# changed to a unique service account.
|
||||
# It is an error if the old value is true and the new value is set to false or
|
||||
# defaulted to false.
|
||||
# @param [String] update_mask
|
||||
# Optional. Field mask that specifies the fields in sink that need an update. A
|
||||
# sink field will be overwritten if, and only if, it is in the update mask. name
|
||||
# and output only fields cannot be updated.An empty updateMask is temporarily
|
||||
# treated as using the following mask for backwards compatibility purposes:
|
||||
# destination,filter,includeChildren At some point in the future, behavior will
|
||||
# be removed and specifying an empty updateMask will be an error.For a detailed
|
||||
# FieldMask definition, see https://developers.google.com/protocol-buffers/docs/
|
||||
# reference/google.protobuf#fieldmaskExample: updateMask=filter.
|
||||
# @param [String] fields
|
||||
# Selector specifying which fields to include in a partial response.
|
||||
# @param [String] quota_user
|
||||
|
@ -1769,7 +1810,7 @@ module Google
|
|||
# @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried
|
||||
# @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification
|
||||
# @raise [Google::Apis::AuthorizationError] Authorization is required
|
||||
def patch_organization_sink(sink_name, log_sink_object = nil, unique_writer_identity: nil, fields: nil, quota_user: nil, options: nil, &block)
|
||||
def patch_organization_sink(sink_name, log_sink_object = nil, unique_writer_identity: nil, update_mask: nil, fields: nil, quota_user: nil, options: nil, &block)
|
||||
command = make_simple_command(:patch, 'v2/{+sinkName}', options)
|
||||
command.request_representation = Google::Apis::LoggingV2::LogSink::Representation
|
||||
command.request_object = log_sink_object
|
||||
|
@ -1777,15 +1818,15 @@ module Google
|
|||
command.response_class = Google::Apis::LoggingV2::LogSink
|
||||
command.params['sinkName'] = sink_name unless sink_name.nil?
|
||||
command.query['uniqueWriterIdentity'] = unique_writer_identity unless unique_writer_identity.nil?
|
||||
command.query['updateMask'] = update_mask unless update_mask.nil?
|
||||
command.query['fields'] = fields unless fields.nil?
|
||||
command.query['quotaUser'] = quota_user unless quota_user.nil?
|
||||
execute_or_queue_command(command, &block)
|
||||
end
|
||||
|
||||
# Updates a sink. This method replaces the following fields in the existing sink
|
||||
# with values from the new sink: destination, filter, output_version_format,
|
||||
# start_time, and end_time. The updated sink might also have a new
|
||||
# writer_identity; see the unique_writer_identity field.
|
||||
# with values from the new sink: destination, and filter. The updated sink might
|
||||
# also have a new writer_identity; see the unique_writer_identity field.
|
||||
# @param [String] sink_name
|
||||
# Required. The full resource name of the sink to update, including the parent
|
||||
# resource and the sink identifier:
|
||||
|
@ -1805,6 +1846,15 @@ module Google
|
|||
# changed to a unique service account.
|
||||
# It is an error if the old value is true and the new value is set to false or
|
||||
# defaulted to false.
|
||||
# @param [String] update_mask
|
||||
# Optional. Field mask that specifies the fields in sink that need an update. A
|
||||
# sink field will be overwritten if, and only if, it is in the update mask. name
|
||||
# and output only fields cannot be updated.An empty updateMask is temporarily
|
||||
# treated as using the following mask for backwards compatibility purposes:
|
||||
# destination,filter,includeChildren At some point in the future, behavior will
|
||||
# be removed and specifying an empty updateMask will be an error.For a detailed
|
||||
# FieldMask definition, see https://developers.google.com/protocol-buffers/docs/
|
||||
# reference/google.protobuf#fieldmaskExample: updateMask=filter.
|
||||
# @param [String] fields
|
||||
# Selector specifying which fields to include in a partial response.
|
||||
# @param [String] quota_user
|
||||
|
@ -1822,7 +1872,7 @@ module Google
|
|||
# @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried
|
||||
# @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification
|
||||
# @raise [Google::Apis::AuthorizationError] Authorization is required
|
||||
def update_organization_sink(sink_name, log_sink_object = nil, unique_writer_identity: nil, fields: nil, quota_user: nil, options: nil, &block)
|
||||
def update_organization_sink(sink_name, log_sink_object = nil, unique_writer_identity: nil, update_mask: nil, fields: nil, quota_user: nil, options: nil, &block)
|
||||
command = make_simple_command(:put, 'v2/{+sinkName}', options)
|
||||
command.request_representation = Google::Apis::LoggingV2::LogSink::Representation
|
||||
command.request_object = log_sink_object
|
||||
|
@ -1830,6 +1880,7 @@ module Google
|
|||
command.response_class = Google::Apis::LoggingV2::LogSink
|
||||
command.params['sinkName'] = sink_name unless sink_name.nil?
|
||||
command.query['uniqueWriterIdentity'] = unique_writer_identity unless unique_writer_identity.nil?
|
||||
command.query['updateMask'] = update_mask unless update_mask.nil?
|
||||
command.query['fields'] = fields unless fields.nil?
|
||||
command.query['quotaUser'] = quota_user unless quota_user.nil?
|
||||
execute_or_queue_command(command, &block)
|
||||
|
@ -2298,10 +2349,9 @@ module Google
|
|||
end
|
||||
|
||||
# Creates a sink that exports specified log entries to a destination. The export
|
||||
# of newly-ingested log entries begins immediately, unless the current time is
|
||||
# outside the sink's start and end times or the sink's writer_identity is not
|
||||
# permitted to write to the destination. A sink can export log entries only from
|
||||
# the resource owning the sink.
|
||||
# of newly-ingested log entries begins immediately, unless the sink's
|
||||
# writer_identity is not permitted to write to the destination. A sink can
|
||||
# export log entries only from the resource owning the sink.
|
||||
# @param [String] parent
|
||||
# Required. The resource in which to create the sink:
|
||||
# "projects/[PROJECT_ID]"
|
||||
|
@ -2468,9 +2518,8 @@ module Google
|
|||
end
|
||||
|
||||
# Updates a sink. This method replaces the following fields in the existing sink
|
||||
# with values from the new sink: destination, filter, output_version_format,
|
||||
# start_time, and end_time. The updated sink might also have a new
|
||||
# writer_identity; see the unique_writer_identity field.
|
||||
# with values from the new sink: destination, and filter. The updated sink might
|
||||
# also have a new writer_identity; see the unique_writer_identity field.
|
||||
# @param [String] sink_name
|
||||
# Required. The full resource name of the sink to update, including the parent
|
||||
# resource and the sink identifier:
|
||||
|
@ -2490,6 +2539,15 @@ module Google
|
|||
# changed to a unique service account.
|
||||
# It is an error if the old value is true and the new value is set to false or
|
||||
# defaulted to false.
|
||||
# @param [String] update_mask
|
||||
# Optional. Field mask that specifies the fields in sink that need an update. A
|
||||
# sink field will be overwritten if, and only if, it is in the update mask. name
|
||||
# and output only fields cannot be updated.An empty updateMask is temporarily
|
||||
# treated as using the following mask for backwards compatibility purposes:
|
||||
# destination,filter,includeChildren At some point in the future, behavior will
|
||||
# be removed and specifying an empty updateMask will be an error.For a detailed
|
||||
# FieldMask definition, see https://developers.google.com/protocol-buffers/docs/
|
||||
# reference/google.protobuf#fieldmaskExample: updateMask=filter.
|
||||
# @param [String] fields
|
||||
# Selector specifying which fields to include in a partial response.
|
||||
# @param [String] quota_user
|
||||
|
@ -2507,7 +2565,7 @@ module Google
|
|||
# @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried
|
||||
# @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification
|
||||
# @raise [Google::Apis::AuthorizationError] Authorization is required
|
||||
def patch_project_sink(sink_name, log_sink_object = nil, unique_writer_identity: nil, fields: nil, quota_user: nil, options: nil, &block)
|
||||
def patch_project_sink(sink_name, log_sink_object = nil, unique_writer_identity: nil, update_mask: nil, fields: nil, quota_user: nil, options: nil, &block)
|
||||
command = make_simple_command(:patch, 'v2/{+sinkName}', options)
|
||||
command.request_representation = Google::Apis::LoggingV2::LogSink::Representation
|
||||
command.request_object = log_sink_object
|
||||
|
@ -2515,15 +2573,15 @@ module Google
|
|||
command.response_class = Google::Apis::LoggingV2::LogSink
|
||||
command.params['sinkName'] = sink_name unless sink_name.nil?
|
||||
command.query['uniqueWriterIdentity'] = unique_writer_identity unless unique_writer_identity.nil?
|
||||
command.query['updateMask'] = update_mask unless update_mask.nil?
|
||||
command.query['fields'] = fields unless fields.nil?
|
||||
command.query['quotaUser'] = quota_user unless quota_user.nil?
|
||||
execute_or_queue_command(command, &block)
|
||||
end
|
||||
|
||||
# Updates a sink. This method replaces the following fields in the existing sink
|
||||
# with values from the new sink: destination, filter, output_version_format,
|
||||
# start_time, and end_time. The updated sink might also have a new
|
||||
# writer_identity; see the unique_writer_identity field.
|
||||
# with values from the new sink: destination, and filter. The updated sink might
|
||||
# also have a new writer_identity; see the unique_writer_identity field.
|
||||
# @param [String] sink_name
|
||||
# Required. The full resource name of the sink to update, including the parent
|
||||
# resource and the sink identifier:
|
||||
|
@ -2543,6 +2601,15 @@ module Google
|
|||
# changed to a unique service account.
|
||||
# It is an error if the old value is true and the new value is set to false or
|
||||
# defaulted to false.
|
||||
# @param [String] update_mask
|
||||
# Optional. Field mask that specifies the fields in sink that need an update. A
|
||||
# sink field will be overwritten if, and only if, it is in the update mask. name
|
||||
# and output only fields cannot be updated.An empty updateMask is temporarily
|
||||
# treated as using the following mask for backwards compatibility purposes:
|
||||
# destination,filter,includeChildren At some point in the future, behavior will
|
||||
# be removed and specifying an empty updateMask will be an error.For a detailed
|
||||
# FieldMask definition, see https://developers.google.com/protocol-buffers/docs/
|
||||
# reference/google.protobuf#fieldmaskExample: updateMask=filter.
|
||||
# @param [String] fields
|
||||
# Selector specifying which fields to include in a partial response.
|
||||
# @param [String] quota_user
|
||||
|
@ -2560,7 +2627,7 @@ module Google
|
|||
# @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried
|
||||
# @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification
|
||||
# @raise [Google::Apis::AuthorizationError] Authorization is required
|
||||
def update_project_sink(sink_name, log_sink_object = nil, unique_writer_identity: nil, fields: nil, quota_user: nil, options: nil, &block)
|
||||
def update_project_sink(sink_name, log_sink_object = nil, unique_writer_identity: nil, update_mask: nil, fields: nil, quota_user: nil, options: nil, &block)
|
||||
command = make_simple_command(:put, 'v2/{+sinkName}', options)
|
||||
command.request_representation = Google::Apis::LoggingV2::LogSink::Representation
|
||||
command.request_object = log_sink_object
|
||||
|
@ -2568,6 +2635,7 @@ module Google
|
|||
command.response_class = Google::Apis::LoggingV2::LogSink
|
||||
command.params['sinkName'] = sink_name unless sink_name.nil?
|
||||
command.query['uniqueWriterIdentity'] = unique_writer_identity unless unique_writer_identity.nil?
|
||||
command.query['updateMask'] = update_mask unless update_mask.nil?
|
||||
command.query['fields'] = fields unless fields.nil?
|
||||
command.query['quotaUser'] = quota_user unless quota_user.nil?
|
||||
execute_or_queue_command(command, &block)
|
||||
|
|
|
@ -25,7 +25,7 @@ module Google
|
|||
# @see https://cloud.google.com/logging/docs/
|
||||
module LoggingV2beta1
|
||||
VERSION = 'V2beta1'
|
||||
REVISION = '20170828'
|
||||
REVISION = '20170905'
|
||||
|
||||
# View and manage your data across Google Cloud Platform services
|
||||
AUTH_CLOUD_PLATFORM = 'https://www.googleapis.com/auth/cloud-platform'
|
||||
|
|
|
@ -951,18 +951,14 @@ module Google
|
|||
# @return [String]
|
||||
attr_accessor :destination
|
||||
|
||||
# Optional. The time at which this sink will stop exporting log entries. Log
|
||||
# entries are exported only if their timestamp is earlier than the end time. If
|
||||
# this field is not supplied, there is no end time. If both a start time and an
|
||||
# end time are provided, then the end time must be later than the start time.
|
||||
# Deprecated. This field is ignored when creating or updating sinks.
|
||||
# Corresponds to the JSON property `endTime`
|
||||
# @return [String]
|
||||
attr_accessor :end_time
|
||||
|
||||
# Optional. An advanced logs filter. The only exported log entries are those
|
||||
# that are in the resource owning the sink and that match the filter. The filter
|
||||
# must use the log entry format specified by the output_version_format parameter.
|
||||
# For example, in the v2 format:
|
||||
# that are in the resource owning the sink and that match the filter. For
|
||||
# example:
|
||||
# logName="projects/[PROJECT_ID]/logs/[LOG_ID]" AND severity>=ERROR
|
||||
# Corresponds to the JSON property `filter`
|
||||
# @return [String]
|
||||
|
@ -999,10 +995,7 @@ module Google
|
|||
# @return [String]
|
||||
attr_accessor :output_version_format
|
||||
|
||||
# Optional. The time at which this sink will begin exporting log entries. Log
|
||||
# entries are exported only if their timestamp is not earlier than the start
|
||||
# time. The default value of this field is the time the sink is created or
|
||||
# updated.
|
||||
# Deprecated. This field is ignored when creating or updating sinks.
|
||||
# Corresponds to the JSON property `startTime`
|
||||
# @return [String]
|
||||
attr_accessor :start_time
|
||||
|
@ -1562,13 +1555,13 @@ module Google
|
|||
# missing in log entries, then this method supplies the current time or a unique
|
||||
# identifier, respectively. The supplied values are chosen so that, among the
|
||||
# log entries that did not supply their own values, the entries earlier in the
|
||||
# list will sort before the entries later in the list. See entries.list.Log
|
||||
# entries with timestamps that are more than the logs retention period in the
|
||||
# past or more than 24 hours in the future might be discarded. Discarding does
|
||||
# not return an error.To improve throughput and to avoid exceeding the quota
|
||||
# limit for calls to entries.write, you should try to include several log
|
||||
# entries in this list, rather than calling this method for each individual log
|
||||
# entry.
|
||||
# list will sort before the entries later in the list. See the entries.list
|
||||
# method.Log entries with timestamps that are more than the logs retention
|
||||
# period in the past or more than 24 hours in the future might be discarded.
|
||||
# Discarding does not return an error.To improve throughput and to avoid
|
||||
# exceeding the quota limit for calls to entries.write, you should try to
|
||||
# include several log entries in this list, rather than calling this method for
|
||||
# each individual log entry.
|
||||
# Corresponds to the JSON property `entries`
|
||||
# @return [Array<Google::Apis::LoggingV2beta1::LogEntry>]
|
||||
attr_accessor :entries
|
||||
|
|
|
@ -581,10 +581,9 @@ module Google
|
|||
end
|
||||
|
||||
# Creates a sink that exports specified log entries to a destination. The export
|
||||
# of newly-ingested log entries begins immediately, unless the current time is
|
||||
# outside the sink's start and end times or the sink's writer_identity is not
|
||||
# permitted to write to the destination. A sink can export log entries only from
|
||||
# the resource owning the sink.
|
||||
# of newly-ingested log entries begins immediately, unless the sink's
|
||||
# writer_identity is not permitted to write to the destination. A sink can
|
||||
# export log entries only from the resource owning the sink.
|
||||
# @param [String] parent
|
||||
# Required. The resource in which to create the sink:
|
||||
# "projects/[PROJECT_ID]"
|
||||
|
@ -751,9 +750,8 @@ module Google
|
|||
end
|
||||
|
||||
# Updates a sink. This method replaces the following fields in the existing sink
|
||||
# with values from the new sink: destination, filter, output_version_format,
|
||||
# start_time, and end_time. The updated sink might also have a new
|
||||
# writer_identity; see the unique_writer_identity field.
|
||||
# with values from the new sink: destination, and filter. The updated sink might
|
||||
# also have a new writer_identity; see the unique_writer_identity field.
|
||||
# @param [String] sink_name
|
||||
# Required. The full resource name of the sink to update, including the parent
|
||||
# resource and the sink identifier:
|
||||
|
@ -773,6 +771,15 @@ module Google
|
|||
# changed to a unique service account.
|
||||
# It is an error if the old value is true and the new value is set to false or
|
||||
# defaulted to false.
|
||||
# @param [String] update_mask
|
||||
# Optional. Field mask that specifies the fields in sink that need an update. A
|
||||
# sink field will be overwritten if, and only if, it is in the update mask. name
|
||||
# and output only fields cannot be updated.An empty updateMask is temporarily
|
||||
# treated as using the following mask for backwards compatibility purposes:
|
||||
# destination,filter,includeChildren At some point in the future, behavior will
|
||||
# be removed and specifying an empty updateMask will be an error.For a detailed
|
||||
# FieldMask definition, see https://developers.google.com/protocol-buffers/docs/
|
||||
# reference/google.protobuf#fieldmaskExample: updateMask=filter.
|
||||
# @param [String] fields
|
||||
# Selector specifying which fields to include in a partial response.
|
||||
# @param [String] quota_user
|
||||
|
@ -790,7 +797,7 @@ module Google
|
|||
# @raise [Google::Apis::ServerError] An error occurred on the server and the request can be retried
|
||||
# @raise [Google::Apis::ClientError] The request is invalid and should not be retried without modification
|
||||
# @raise [Google::Apis::AuthorizationError] Authorization is required
|
||||
def update_project_sink(sink_name, log_sink_object = nil, unique_writer_identity: nil, fields: nil, quota_user: nil, options: nil, &block)
|
||||
def update_project_sink(sink_name, log_sink_object = nil, unique_writer_identity: nil, update_mask: nil, fields: nil, quota_user: nil, options: nil, &block)
|
||||
command = make_simple_command(:put, 'v2beta1/{+sinkName}', options)
|
||||
command.request_representation = Google::Apis::LoggingV2beta1::LogSink::Representation
|
||||
command.request_object = log_sink_object
|
||||
|
@ -798,6 +805,7 @@ module Google
|
|||
command.response_class = Google::Apis::LoggingV2beta1::LogSink
|
||||
command.params['sinkName'] = sink_name unless sink_name.nil?
|
||||
command.query['uniqueWriterIdentity'] = unique_writer_identity unless unique_writer_identity.nil?
|
||||
command.query['updateMask'] = update_mask unless update_mask.nil?
|
||||
command.query['fields'] = fields unless fields.nil?
|
||||
command.query['quotaUser'] = quota_user unless quota_user.nil?
|
||||
execute_or_queue_command(command, &block)
|
||||
|
|
|
@ -25,7 +25,7 @@ module Google
|
|||
# @see https://developers.google.com/accounts/docs/OAuth2
|
||||
module Oauth2V1
|
||||
VERSION = 'V1'
|
||||
REVISION = '20161103'
|
||||
REVISION = '20170807'
|
||||
|
||||
# Know the list of people in your circles, your age range, and language
|
||||
AUTH_PLUS_LOGIN = 'https://www.googleapis.com/auth/plus.login'
|
||||
|
|
|
@ -25,7 +25,7 @@ module Google
|
|||
# @see https://developers.google.com/accounts/docs/OAuth2
|
||||
module Oauth2V2
|
||||
VERSION = 'V2'
|
||||
REVISION = '20161103'
|
||||
REVISION = '20170807'
|
||||
|
||||
# Know the list of people in your circles, your age range, and language
|
||||
AUTH_PLUS_LOGIN = 'https://www.googleapis.com/auth/plus.login'
|
||||
|
|
|
@ -25,7 +25,7 @@ module Google
|
|||
# @see https://developers.google.com/+/domains/
|
||||
module PlusDomainsV1
|
||||
VERSION = 'V1'
|
||||
REVISION = '20170410'
|
||||
REVISION = '20170828'
|
||||
|
||||
# View your circles and the people and pages in them
|
||||
AUTH_PLUS_CIRCLES_READ = 'https://www.googleapis.com/auth/plus.circles.read'
|
||||
|
|
|
@ -52,7 +52,7 @@ module Google
|
|||
|
||||
def initialize
|
||||
super('https://www.googleapis.com/', 'replicapoolupdater/v1beta1/projects/')
|
||||
@batch_path = 'batch'
|
||||
@batch_path = 'batch/replicapoolupdater/v1beta1'
|
||||
end
|
||||
|
||||
# Cancels an update. The update must be PAUSED before it can be cancelled. This
|
||||
|
|
|
@ -50,7 +50,7 @@ module Google
|
|||
|
||||
def initialize
|
||||
super('https://www.googleapis.com/', 'siteVerification/v1/')
|
||||
@batch_path = 'batch/siteVerification/v1'
|
||||
@batch_path = 'batch'
|
||||
end
|
||||
|
||||
# Relinquish ownership of a website or domain.
|
||||
|
|
|
@ -25,7 +25,7 @@ module Google
|
|||
# @see https://developers.google.com/storage/docs/json_api/
|
||||
module StorageV1beta1
|
||||
VERSION = 'V1beta1'
|
||||
REVISION = '20170329'
|
||||
REVISION = '20170816'
|
||||
|
||||
# Manage your data and permissions in Google Cloud Storage
|
||||
AUTH_DEVSTORAGE_FULL_CONTROL = 'https://www.googleapis.com/auth/devstorage.full_control'
|
||||
|
|
Loading…
Reference in New Issue