feat: Automated regeneration of ml v1 client

This commit is contained in:
Yoshi Automation Bot 2020-10-21 11:46:25 -07:00 committed by GitHub
parent 653ee5914b
commit 546b6f2c42
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
5 changed files with 263 additions and 79 deletions

View File

@ -114291,6 +114291,9 @@
"/ml:v1/GoogleCloudMlV1__AddTrialMeasurementRequest": google_cloud_ml_v1__add_trial_measurement_request
"/ml:v1/GoogleCloudMlV1__AddTrialMeasurementRequest/measurement": measurement
"/ml:v1/GoogleCloudMlV1__AutoScaling": google_cloud_ml_v1__auto_scaling
"/ml:v1/GoogleCloudMlV1__AutoScaling/maxNodes": max_nodes
"/ml:v1/GoogleCloudMlV1__AutoScaling/metrics": metrics
"/ml:v1/GoogleCloudMlV1__AutoScaling/metrics/metric": metric
"/ml:v1/GoogleCloudMlV1__AutoScaling/minNodes": min_nodes
"/ml:v1/GoogleCloudMlV1__AutomatedStoppingConfig": google_cloud_ml_v1__automated_stopping_config
"/ml:v1/GoogleCloudMlV1__AutomatedStoppingConfig/decayCurveStoppingConfig": decay_curve_stopping_config
@ -114423,6 +114426,9 @@
"/ml:v1/GoogleCloudMlV1__Measurement/metrics": metrics
"/ml:v1/GoogleCloudMlV1__Measurement/metrics/metric": metric
"/ml:v1/GoogleCloudMlV1__Measurement/stepCount": step_count
"/ml:v1/GoogleCloudMlV1__MetricSpec": google_cloud_ml_v1__metric_spec
"/ml:v1/GoogleCloudMlV1__MetricSpec/name": name
"/ml:v1/GoogleCloudMlV1__MetricSpec/target": target
"/ml:v1/GoogleCloudMlV1__Model": google_cloud_ml_v1__model
"/ml:v1/GoogleCloudMlV1__Model/defaultVersion": default_version
"/ml:v1/GoogleCloudMlV1__Model/description": description

View File

@ -25,7 +25,7 @@ module Google
# @see https://cloud.google.com/ml/
module MlV1
VERSION = 'V1'
REVISION = '20200905'
REVISION = '20201016'
# View and manage your data across Google Cloud Platform services
AUTH_CLOUD_PLATFORM = 'https://www.googleapis.com/auth/cloud-platform'

View File

@ -514,6 +514,18 @@ module Google
class GoogleCloudMlV1AutoScaling
include Google::Apis::Core::Hashable
# The maximum number of nodes to scale this model under load. The actual value
# will depend on resource quota and availability.
# Corresponds to the JSON property `maxNodes`
# @return [Fixnum]
attr_accessor :max_nodes
# MetricSpec contains the specifications to use to calculate the desired nodes
# count.
# Corresponds to the JSON property `metrics`
# @return [Array<Google::Apis::MlV1::GoogleCloudMlV1MetricSpec>]
attr_accessor :metrics
# Optional. The minimum number of nodes to allocate for this model. These nodes
# are always up, starting from the time the model is deployed. Therefore, the
# cost of operating this model will be at least `rate` * `min_nodes` * number of
@ -548,6 +560,8 @@ module Google
# Update properties of this object
def update!(**args)
@max_nodes = args[:max_nodes] if args.key?(:max_nodes)
@metrics = args[:metrics] if args.key?(:metrics)
@min_nodes = args[:min_nodes] if args.key?(:min_nodes)
end
end
@ -790,12 +804,14 @@ module Google
end
end
# ContainerPort represents a network port in a single container.
# Represents a network port in a single container. This message is a subset of
# the [Kubernetes ContainerPort v1 core specification](https://kubernetes.io/
# docs/reference/generated/kubernetes-api/v1.18/#containerport-v1-core).
class GoogleCloudMlV1ContainerPort
include Google::Apis::Core::Hashable
# Number of port to expose on the pod's IP address. This must be a valid port
# number, 0 < x < 65536.
# Number of the port to expose on the container. This must be a valid port
# number: 0 < PORT_NUMBER < 65536.
# Corresponds to the JSON property `containerPort`
# @return [Fixnum]
attr_accessor :container_port
@ -810,53 +826,123 @@ module Google
end
end
# Specify a custom container to deploy. Our ContainerSpec is a subset of the
# Kubernetes Container specification. https://kubernetes.io/docs/reference/
# generated/kubernetes-api/v1.10/#container-v1-core
# Specification of a custom container for serving predictions. This message is a
# subset of the [Kubernetes Container v1 core specification](https://kubernetes.
# io/docs/reference/generated/kubernetes-api/v1.18/#container-v1-core).
class GoogleCloudMlV1ContainerSpec
include Google::Apis::Core::Hashable
# Immutable. Arguments to the entrypoint. The docker image's CMD is used if this
# is not provided. Variable references $(VAR_NAME) are expanded using the
# container's environment. If a variable cannot be resolved, the reference in
# the input string will be unchanged. The $(VAR_NAME) syntax can be escaped with
# a double $$, ie: $$(VAR_NAME). Escaped references will never be expanded,
# regardless of whether the variable exists or not. More info: https://
# kubernetes.io/docs/tasks/inject-data-application/define-command-argument-
# container/#running-a-command-in-a-shell
# Immutable. Specifies arguments for the command that runs when the container
# starts. This overrides the container's [`CMD`](https://docs.docker.com/engine/
# reference/builder/#cmd). Specify this field as an array of executable and
# arguments, similar to a Docker `CMD`'s "default parameters" form. If you don't
# specify this field but do specify the command field, then the command from the
# `command` field runs without any additional arguments. See the [Kubernetes
# documentation about how the `command` and `args` fields interact with a
# container's `ENTRYPOINT` and `CMD`](https://kubernetes.io/docs/tasks/inject-
# data-application/define-command-argument-container/#notes). If you don't
# specify this field and don't specify the `commmand` field, then the container'
# s [`ENTRYPOINT`](https://docs.docker.com/engine/reference/builder/#cmd) and `
# CMD` determine what runs based on their default behavior. See the [Docker
# documentation about how `CMD` and `ENTRYPOINT` interact](https://docs.docker.
# com/engine/reference/builder/#understand-how-cmd-and-entrypoint-interact). In
# this field, you can reference [environment variables set by AI Platform
# Prediction](/ai-platform/prediction/docs/custom-container-requirements#aip-
# variables) and environment variables set in the env field. You cannot
# reference environment variables set in the Docker image. In order for
# environment variables to be expanded, reference them by using the following
# syntax: $( VARIABLE_NAME) Note that this differs from Bash variable expansion,
# which does not use parentheses. If a variable cannot be resolved, the
# reference in the input string is used unchanged. To avoid variable expansion,
# you can escape this syntax with `$$`; for example: $$(VARIABLE_NAME) This
# field corresponds to the `args` field of the [Kubernetes Containers v1 core
# API](https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.18/#
# container-v1-core).
# Corresponds to the JSON property `args`
# @return [Array<String>]
attr_accessor :args
# Immutable. Entrypoint array. Not executed within a shell. The docker image's
# ENTRYPOINT is used if this is not provided. Variable references $(VAR_NAME)
# are expanded using the container's environment. If a variable cannot be
# resolved, the reference in the input string will be unchanged. The $(VAR_NAME)
# syntax can be escaped with a double $$, ie: $$(VAR_NAME). Escaped references
# will never be expanded, regardless of whether the variable exists or not. More
# info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-
# argument-container/#running-a-command-in-a-shell
# Immutable. Specifies the command that runs when the container starts. This
# overrides the container's [`ENTRYPOINT`](https://docs.docker.com/engine/
# reference/builder/#entrypoint). Specify this field as an array of executable
# and arguments, similar to a Docker `ENTRYPOINT`'s "exec" form, not its "shell"
# form. If you do not specify this field, then the container's `ENTRYPOINT` runs,
# in conjunction with the args field or the container's [`CMD`](https://docs.
# docker.com/engine/reference/builder/#cmd), if either exists. If this field is
# not specified and the container does not have an `ENTRYPOINT`, then refer to
# the [Docker documentation about how `CMD` and `ENTRYPOINT` interact](https://
# docs.docker.com/engine/reference/builder/#understand-how-cmd-and-entrypoint-
# interact). If you specify this field, then you can also specify the `args`
# field to provide additional arguments for this command. However, if you
# specify this field, then the container's `CMD` is ignored. See the [Kubernetes
# documentation about how the `command` and `args` fields interact with a
# container's `ENTRYPOINT` and `CMD`](https://kubernetes.io/docs/tasks/inject-
# data-application/define-command-argument-container/#notes). In this field, you
# can reference [environment variables set by AI Platform Prediction](/ai-
# platform/prediction/docs/custom-container-requirements#aip-variables) and
# environment variables set in the env field. You cannot reference environment
# variables set in the Docker image. In order for environment variables to be
# expanded, reference them by using the following syntax: $( VARIABLE_NAME) Note
# that this differs from Bash variable expansion, which does not use parentheses.
# If a variable cannot be resolved, the reference in the input string is used
# unchanged. To avoid variable expansion, you can escape this syntax with `$$`;
# for example: $$(VARIABLE_NAME) This field corresponds to the `command` field
# of the [Kubernetes Containers v1 core API](https://kubernetes.io/docs/
# reference/generated/kubernetes-api/v1.18/#container-v1-core).
# Corresponds to the JSON property `command`
# @return [Array<String>]
attr_accessor :command
# Immutable. List of environment variables to set in the container.
# Immutable. List of environment variables to set in the container. After the
# container starts running, code running in the container can read these
# environment variables. Additionally, the command and args fields can reference
# these variables. Later entries in this list can also reference earlier entries.
# For example, the following example sets the variable `VAR_2` to have the
# value `foo bar`: ```json [ ` "name": "VAR_1", "value": "foo" `, ` "name": "
# VAR_2", "value": "$(VAR_1) bar" ` ] ``` If you switch the order of the
# variables in the example, then the expansion does not occur. This field
# corresponds to the `env` field of the [Kubernetes Containers v1 core API](
# https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.18/#container-
# v1-core).
# Corresponds to the JSON property `env`
# @return [Array<Google::Apis::MlV1::GoogleCloudMlV1EnvVar>]
attr_accessor :env
# Docker image name. More info: https://kubernetes.io/docs/concepts/containers/
# images
# URI of the Docker image to be used as the custom container for serving
# predictions. This URI must identify [an image in Artifact Registry](/artifact-
# registry/docs/overview) and begin with the hostname ``REGION`-docker.pkg.dev`,
# where ``REGION`` is replaced by the region that matches AI Platform Prediction
# [regional endpoint](/ai-platform/prediction/docs/regional-endpoints) that you
# are using. For example, if you are using the `us-central1-ml.googleapis.com`
# endpoint, then this URI must begin with `us-central1-docker.pkg.dev`. To use a
# custom container, the [AI Platform Google-managed service account](/ai-
# platform/prediction/docs/custom-service-account#default) must have permission
# to pull (read) the Docker image at this URI. The AI Platform Google-managed
# service account has the following format: `service-`PROJECT_NUMBER`@cloud-ml.
# google.com.iam.gserviceaccount.com` `PROJECT_NUMBER` is replaced by your
# Google Cloud project number. By default, this service account has necessary
# permissions to pull an Artifact Registry image in the same Google Cloud
# project where you are using AI Platform Prediction. In this case, no
# configuration is necessary. If you want to use an image from a different
# Google Cloud project, learn how to [grant the Artifact Registry Reader (roles/
# artifactregistry.reader) role for a repository](/artifact-registry/docs/access-
# control#grant-repo) to your projet's AI Platform Google-managed service
# account. To learn about the requirements for the Docker image itself, read [
# Custom container requirements](/ai-platform/prediction/docs/custom-container-
# requirements).
# Corresponds to the JSON property `image`
# @return [String]
attr_accessor :image
# Immutable. List of ports to expose from the container. Exposing a port here
# gives the system additional information about the network connections a
# container uses, but is primarily informational. Not specifying a port here
# DOES NOT prevent that port from being exposed. Any port which is listening on
# the default "0.0.0.0" address inside a container will be accessible from the
# network.
# Immutable. List of ports to expose from the container. AI Platform Prediction
# sends any prediction requests that it receives to the first port on this list.
# AI Platform Prediction also sends [liveness and health checks](/ai-platform/
# prediction/docs/custom-container-requirements#health) to this port. If you do
# not specify this field, it defaults to following value: ```json [ ` "
# containerPort": 8080 ` ] ``` AI Platform Prediction does not use ports other
# than the first one listed. This field corresponds to the `ports` field of the [
# Kubernetes Containers v1 core API](https://kubernetes.io/docs/reference/
# generated/kubernetes-api/v1.18/#container-v1-core).
# Corresponds to the JSON property `ports`
# @return [Array<Google::Apis::MlV1::GoogleCloudMlV1ContainerPort>]
attr_accessor :ports
@ -898,21 +984,30 @@ module Google
end
end
# EnvVar represents an environment variable present in a Container.
# Represents an environment variable to be made available in a container. This
# message is a subset of the [Kubernetes EnvVar v1 core specification](https://
# kubernetes.io/docs/reference/generated/kubernetes-api/v1.18/#envvar-v1-core).
class GoogleCloudMlV1EnvVar
include Google::Apis::Core::Hashable
# Name of the environment variable. Must be a C_IDENTIFIER.
# Name of the environment variable. Must be a [valid C identifier](https://
# github.com/kubernetes/kubernetes/blob/v1.18.8/staging/src/k8s.io/apimachinery/
# pkg/util/validation/validation.go#L258) and must not begin with the prefix `
# AIP_`.
# Corresponds to the JSON property `name`
# @return [String]
attr_accessor :name
# Variable references $(VAR_NAME) are expanded using the previous defined
# environment variables in the container and any service environment variables.
# If a variable cannot be resolved, the reference in the input string will be
# unchanged. The $(VAR_NAME) syntax can be escaped with a double $$, ie: $$(
# VAR_NAME). Escaped references will never be expanded, regardless of whether
# the variable exists or not. Defaults to "".
# Value of the environment variable. Defaults to an empty string. In this field,
# you can reference [environment variables set by AI Platform Prediction](/ai-
# platform/prediction/docs/custom-container-requirements#aip-variables) and
# environment variables set earlier in the same env field as where this message
# occurs. You cannot reference environment variables set in the Docker image. In
# order for environment variables to be expanded, reference them by using the
# following syntax: $(VARIABLE_NAME) Note that this differs from Bash variable
# expansion, which does not use parentheses. If a variable cannot be resolved,
# the reference in the input string is used unchanged. To avoid variable
# expansion, you can escape this syntax with `$$`; for example: $$(VARIABLE_NAME)
# Corresponds to the JSON property `value`
# @return [String]
attr_accessor :value
@ -1537,6 +1632,33 @@ module Google
end
end
# MetricSpec contains the specifications to use to calculate the desired nodes
# count when autoscaling is enabled.
class GoogleCloudMlV1MetricSpec
include Google::Apis::Core::Hashable
# metric name.
# Corresponds to the JSON property `name`
# @return [String]
attr_accessor :name
# Target specifies the target value for the given metric; once real metric
# deviates from the threshold by a certain percentage, the node count changes.
# Corresponds to the JSON property `target`
# @return [Fixnum]
attr_accessor :target
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@name = args[:name] if args.key?(:name)
@target = args[:target] if args.key?(:target)
end
end
# Represents a machine learning solution. A model can have multiple versions,
# each of which is a deployed, trained model ready to receive prediction
# requests. The model itself is just a container.
@ -1583,23 +1705,22 @@ module Google
attr_accessor :name
# Optional. If true, online prediction nodes send `stderr` and `stdout` streams
# to Stackdriver Logging. These can be more verbose than the standard access
# logs (see `onlinePredictionLogging`) and can incur higher cost. However, they
# are helpful for debugging. Note that [Stackdriver logs may incur a cost](/
# stackdriver/pricing), especially if your project receives prediction requests
# at a high QPS. Estimate your costs before enabling this option. Default is
# false.
# to Cloud Logging. These can be more verbose than the standard access logs (see
# `onlinePredictionLogging`) and can incur higher cost. However, they are
# helpful for debugging. Note that [logs may incur a cost](/stackdriver/pricing),
# especially if your project receives prediction requests at a high QPS.
# Estimate your costs before enabling this option. Default is false.
# Corresponds to the JSON property `onlinePredictionConsoleLogging`
# @return [Boolean]
attr_accessor :online_prediction_console_logging
alias_method :online_prediction_console_logging?, :online_prediction_console_logging
# Optional. If true, online prediction access logs are sent to StackDriver
# Logging. These logs are like standard server access logs, containing
# information like timestamp and latency for each request. Note that [
# Stackdriver logs may incur a cost](/stackdriver/pricing), especially if your
# project receives prediction requests at a high queries per second rate (QPS).
# Estimate your costs before enabling this option. Default is false.
# Optional. If true, online prediction access logs are sent to Cloud Logging.
# These logs are like standard server access logs, containing information like
# timestamp and latency for each request. Note that [logs may incur a cost](/
# stackdriver/pricing), especially if your project receives prediction requests
# at a high queries per second rate (QPS). Estimate your costs before enabling
# this option. Default is false.
# Corresponds to the JSON property `onlinePredictionLogging`
# @return [Boolean]
attr_accessor :online_prediction_logging
@ -2057,18 +2178,49 @@ module Google
end
end
# RouteMap is used to override HTTP paths sent to a Custom Container. If
# specified, the HTTP server implemented in the ContainerSpec must support the
# route. If unspecified, standard HTTP paths will be used.
# Specifies HTTP paths served by a custom container. AI Platform Prediction
# sends requests to these paths on the container; the custom container must run
# an HTTP server that responds to these requests with appropriate responses.
# Read [Custom container requirements](/ai-platform/prediction/docs/custom-
# container-requirements) for details on how to create your container image to
# meet these requirements.
class GoogleCloudMlV1RouteMap
include Google::Apis::Core::Hashable
# HTTP path to send health check requests.
# HTTP path on the container to send health checkss to. AI Platform Prediction
# intermittently sends GET requests to this path on the container's IP address
# and port to check that the container is healthy. Read more about [health
# checks](/ai-platform/prediction/docs/custom-container-requirements#checks).
# For example, if you set this field to `/bar`, then AI Platform Prediction
# intermittently sends a GET request to the following URL on the container:
# localhost:PORT/bar PORT refers to the first value of Version.container.ports.
# If you don't specify this field, it defaults to the following value: /v1/
# models/MODEL/versions/VERSION The placeholders in this value are replaced as
# follows: * MODEL: The name of the parent Model. This does not include the "
# projects/PROJECT_ID/models/" prefix that the API returns in output; it is the
# bare model name, as provided to projects.models.create. * VERSION: The name of
# the model version. This does not include the "projects/PROJECT_ID/models/MODEL/
# versions/" prefix that the API returns in output; it is the bare version name,
# as provided to projects.models.versions.create.
# Corresponds to the JSON property `health`
# @return [String]
attr_accessor :health
# HTTP path to send prediction requests.
# HTTP path on the container to send prediction requests to. AI Platform
# Prediction forwards requests sent using projects.predict to this path on the
# container's IP address and port. AI Platform Prediction then returns the
# container's response in the API response. For example, if you set this field
# to `/foo`, then when AI Platform Prediction receives a prediction request, it
# forwards the request body in a POST request to the following URL on the
# container: localhost:PORT/foo PORT refers to the first value of Version.
# container.ports. If you don't specify this field, it defaults to the following
# value: /v1/models/MODEL/versions/VERSION:predict The placeholders in this
# value are replaced as follows: * MODEL: The name of the parent Model. This
# does not include the "projects/PROJECT_ID/models/" prefix that the API returns
# in output; it is the bare model name, as provided to projects.models.create. *
# VERSION: The name of the model version. This does not include the "projects/
# PROJECT_ID/models/MODEL/versions/" prefix that the API returns in output; it
# is the bare version name, as provided to projects.models.versions.create.
# Corresponds to the JSON property `predict`
# @return [String]
attr_accessor :predict
@ -2785,9 +2937,9 @@ module Google
# @return [Google::Apis::MlV1::GoogleCloudMlV1AutoScaling]
attr_accessor :auto_scaling
# Specify a custom container to deploy. Our ContainerSpec is a subset of the
# Kubernetes Container specification. https://kubernetes.io/docs/reference/
# generated/kubernetes-api/v1.10/#container-v1-core
# Specification of a custom container for serving predictions. This message is a
# subset of the [Kubernetes Container v1 core specification](https://kubernetes.
# io/docs/reference/generated/kubernetes-api/v1.18/#container-v1-core).
# Corresponds to the JSON property `container`
# @return [Google::Apis::MlV1::GoogleCloudMlV1ContainerSpec]
attr_accessor :container
@ -2797,13 +2949,18 @@ module Google
# @return [String]
attr_accessor :create_time
# Required. The Cloud Storage location of the trained model used to create the
# version. See the [guide to model deployment](/ml-engine/docs/tensorflow/
# deploying-models) for more information. When passing Version to projects.
# models.versions.create the model service uses the specified location as the
# source of the model. Once deployed, the model version is hosted by the
# prediction service, so this location is useful only as a historical record.
# The total number of model files can't exceed 1000.
# The Cloud Storage URI of a directory containing trained model artifacts to be
# used to create the model version. See the [guide to deploying models](/ai-
# platform/prediction/docs/deploying-models) for more information. The total
# number of files under this directory must not exceed 1000. During projects.
# models.versions.create, AI Platform Prediction copies all files from the
# specified directory to a location managed by the service. From then on, AI
# Platform Prediction uses these copies of the model artifacts to serve
# predictions, not the original files in Cloud Storage, so this location is
# useful only as a historical record. If you specify container, then this field
# is optional. Otherwise, it is required. Learn [how to use this field with a
# custom container](/ai-platform/prediction/docs/custom-container-requirements#
# artifacts).
# Corresponds to the JSON property `deploymentUri`
# @return [String]
attr_accessor :deployment_uri
@ -2844,10 +3001,8 @@ module Google
# deployment_uri to determine a framework. If you choose `SCIKIT_LEARN` or `
# XGBOOST`, you must also set the runtime version of the model to 1.4 or greater.
# Do **not** specify a framework if you're deploying a [custom prediction
# routine](/ml-engine/docs/tensorflow/custom-prediction-routines). If you
# specify a [Compute Engine (N1) machine type](/ml-engine/docs/machine-types-
# online-prediction) in the `machineType` field, you must specify `TENSORFLOW`
# for the framework.
# routine](/ai-platform/prediction/docs/custom-prediction-routines) or if you're
# using a [custom container](/ai-platform/prediction/docs/use-custom-container).
# Corresponds to the JSON property `framework`
# @return [String]
attr_accessor :framework
@ -2880,9 +3035,9 @@ module Google
# standard-8` * `n1-standard-16` * `n1-standard-32` * `n1-highmem-2` * `n1-
# highmem-4` * `n1-highmem-8` * `n1-highmem-16` * `n1-highmem-32` * `n1-highcpu-
# 2` * `n1-highcpu-4` * `n1-highcpu-8` * `n1-highcpu-16` * `n1-highcpu-32` `mls1-
# c1-m2` is generally available. All other machine types are available in beta.
# Learn more about the [differences between machine types](/ml-engine/docs/
# machine-types-online-prediction).
# c4-m2` is in beta. All other machine types are generally available. Learn more
# about the [differences between machine types](/ml-engine/docs/machine-types-
# online-prediction).
# Corresponds to the JSON property `machineType`
# @return [String]
attr_accessor :machine_type
@ -2964,9 +3119,12 @@ module Google
# @return [Google::Apis::MlV1::GoogleCloudMlV1RequestLoggingConfig]
attr_accessor :request_logging_config
# RouteMap is used to override HTTP paths sent to a Custom Container. If
# specified, the HTTP server implemented in the ContainerSpec must support the
# route. If unspecified, standard HTTP paths will be used.
# Specifies HTTP paths served by a custom container. AI Platform Prediction
# sends requests to these paths on the container; the custom container must run
# an HTTP server that responds to these requests with appropriate responses.
# Read [Custom container requirements](/ai-platform/prediction/docs/custom-
# container-requirements) for details on how to create your container image to
# meet these requirements.
# Corresponds to the JSON property `routes`
# @return [Google::Apis::MlV1::GoogleCloudMlV1RouteMap]
attr_accessor :routes
@ -2978,7 +3136,10 @@ module Google
# @return [String]
attr_accessor :runtime_version
# Optional. Specifies the service account for resource access control.
# Optional. Specifies the service account for resource access control. If you
# specify this field, then you must also specify either the `containerSpec` or
# the `predictionClass` field. Learn more about [using a custom service account](
# /ai-platform/prediction/docs/custom-service-account).
# Corresponds to the JSON property `serviceAccount`
# @return [String]
attr_accessor :service_account

View File

@ -304,6 +304,12 @@ module Google
include Google::Apis::Core::JsonObjectSupport
end
class GoogleCloudMlV1MetricSpec
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class GoogleCloudMlV1Model
class Representation < Google::Apis::Core::JsonRepresentation; end
@ -666,6 +672,9 @@ module Google
class GoogleCloudMlV1AutoScaling
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :max_nodes, as: 'maxNodes'
collection :metrics, as: 'metrics', class: Google::Apis::MlV1::GoogleCloudMlV1MetricSpec, decorator: Google::Apis::MlV1::GoogleCloudMlV1MetricSpec::Representation
property :min_nodes, as: 'minNodes'
end
end
@ -951,6 +960,14 @@ module Google
end
end
class GoogleCloudMlV1MetricSpec
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :name, as: 'name'
property :target, as: 'target'
end
end
class GoogleCloudMlV1Model
# @private
class Representation < Google::Apis::Core::JsonRepresentation

View File

@ -4,7 +4,7 @@
"git": {
"name": ".",
"remote": "https://github.com/googleapis/google-api-ruby-client.git",
"sha": "c98c719bbab68d0890524d53f8b629d7858af9c2"
"sha": "cb0c5bf94e2b1c915107eec83041d4409c900155"
}
}
]